@eagleoutice/flowr 2.2.1 → 2.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli/flowr.js +2 -1
- package/cli/repl/commands/repl-cfg.js +30 -7
- package/cli/repl/commands/repl-dataflow.js +22 -2
- package/cli/repl/commands/repl-normalize.js +22 -2
- package/cli/repl/commands/repl-parse.js +8 -2
- package/cli/repl/core.js +4 -0
- package/cli/repl/server/connection.js +1 -1
- package/cli/script-core/statistics-helper-core.js +1 -1
- package/config.js +8 -1
- package/dataflow/extractor.js +1 -1
- package/dataflow/graph/dataflowgraph-builder.d.ts +11 -10
- package/dataflow/graph/dataflowgraph-builder.js +11 -10
- package/dataflow/graph/edge.d.ts +1 -1
- package/dataflow/graph/edge.js +2 -2
- package/dataflow/graph/vertex.d.ts +6 -6
- package/dataflow/graph/vertex.js +5 -5
- package/dataflow/internal/process/functions/call/built-in/built-in-access.js +9 -5
- package/dataflow/internal/process/functions/call/built-in/built-in-source.js +2 -2
- package/documentation/doc-util/doc-cfg.js +2 -2
- package/documentation/doc-util/doc-types.js +3 -3
- package/documentation/print-interface-wiki.js +0 -2
- package/documentation/print-query-wiki.js +30 -0
- package/package.json +8 -6
- package/queries/catalog/call-context-query/call-context-query-executor.js +1 -1
- package/queries/catalog/happens-before-query/happens-before-query-executor.d.ts +1 -1
- package/queries/catalog/happens-before-query/happens-before-query-executor.js +2 -2
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.d.ts +4 -0
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +34 -0
- package/queries/catalog/resolve-value-query/resolve-value-query-format.d.ts +72 -0
- package/queries/catalog/resolve-value-query/resolve-value-query-format.js +49 -0
- package/queries/query.d.ts +60 -1
- package/queries/query.js +3 -1
- package/r-bridge/data/data.d.ts +2 -2
- package/r-bridge/data/data.js +2 -2
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +46 -29
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-types.d.ts +4 -1
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-types.js +3 -0
- package/util/cfg/cfg.d.ts +10 -1
- package/util/cfg/cfg.js +56 -2
- package/util/range.d.ts +21 -0
- package/util/range.js +3 -0
- package/util/version.js +1 -1
package/cli/flowr.js
CHANGED
|
@@ -23,6 +23,7 @@ const print_version_1 = require("./repl/print-version");
|
|
|
23
23
|
const flowr_main_options_1 = require("./flowr-main-options");
|
|
24
24
|
const tree_sitter_executor_1 = require("../r-bridge/lang-4.x/tree-sitter/tree-sitter-executor");
|
|
25
25
|
const fs_1 = __importDefault(require("fs"));
|
|
26
|
+
const path_1 = __importDefault(require("path"));
|
|
26
27
|
exports.toolName = 'flowr';
|
|
27
28
|
exports.optionHelp = [
|
|
28
29
|
{
|
|
@@ -63,7 +64,7 @@ if (options['config-json']) {
|
|
|
63
64
|
if (!usedConfig) {
|
|
64
65
|
if (options['config-file']) {
|
|
65
66
|
// validate it exists
|
|
66
|
-
if (!fs_1.default.existsSync(options['config-file'])) {
|
|
67
|
+
if (!fs_1.default.existsSync(path_1.default.resolve(options['config-file']))) {
|
|
67
68
|
log_1.log.error(`Config file '${options['config-file']}' does not exist`);
|
|
68
69
|
process.exit(1);
|
|
69
70
|
}
|
|
@@ -1,24 +1,41 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
2
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
6
|
exports.controlflowStarCommand = exports.controlflowCommand = void 0;
|
|
4
7
|
const cfg_1 = require("../../../util/cfg/cfg");
|
|
5
8
|
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
|
|
6
9
|
const retriever_1 = require("../../../r-bridge/retriever");
|
|
7
10
|
const cfg_2 = require("../../../util/mermaid/cfg");
|
|
11
|
+
const ansi_1 = require("../../../util/ansi");
|
|
12
|
+
const clipboardy_1 = __importDefault(require("clipboardy"));
|
|
8
13
|
async function controlflow(parser, remainingLine) {
|
|
9
|
-
return await (0, default_pipelines_1.
|
|
14
|
+
return await (0, default_pipelines_1.createDataflowPipeline)(parser, {
|
|
10
15
|
request: (0, retriever_1.requestFromInput)(remainingLine.trim())
|
|
11
16
|
}).allRemainingSteps();
|
|
12
17
|
}
|
|
18
|
+
function handleString(code) {
|
|
19
|
+
return code.startsWith('"') ? JSON.parse(code) : code;
|
|
20
|
+
}
|
|
21
|
+
function formatInfo(out, type) {
|
|
22
|
+
return out.formatter.format(`Copied ${type} to clipboard.`, { color: 7 /* Colors.White */, effect: ansi_1.ColorEffect.Foreground, style: 3 /* FontStyles.Italic */ });
|
|
23
|
+
}
|
|
13
24
|
exports.controlflowCommand = {
|
|
14
25
|
description: `Get mermaid code for the control-flow graph of R code, start with '${retriever_1.fileProtocol}' to indicate a file`,
|
|
15
26
|
usageExample: ':controlflow',
|
|
16
27
|
aliases: ['cfg', 'cf'],
|
|
17
28
|
script: false,
|
|
18
29
|
fn: async (output, shell, remainingLine) => {
|
|
19
|
-
const result = await controlflow(shell, remainingLine);
|
|
20
|
-
const cfg = (0, cfg_1.extractCFG)(result.normalize);
|
|
21
|
-
|
|
30
|
+
const result = await controlflow(shell, handleString(remainingLine));
|
|
31
|
+
const cfg = (0, cfg_1.extractCFG)(result.normalize, result.dataflow.graph);
|
|
32
|
+
const mermaid = (0, cfg_2.cfgToMermaid)(cfg, result.normalize);
|
|
33
|
+
output.stdout(mermaid);
|
|
34
|
+
try {
|
|
35
|
+
clipboardy_1.default.writeSync(mermaid);
|
|
36
|
+
output.stdout(formatInfo(output, 'mermaid code'));
|
|
37
|
+
}
|
|
38
|
+
catch (e) { /* do nothing this is a service thing */ }
|
|
22
39
|
}
|
|
23
40
|
};
|
|
24
41
|
exports.controlflowStarCommand = {
|
|
@@ -27,9 +44,15 @@ exports.controlflowStarCommand = {
|
|
|
27
44
|
aliases: ['cfg*', 'cf*'],
|
|
28
45
|
script: false,
|
|
29
46
|
fn: async (output, shell, remainingLine) => {
|
|
30
|
-
const result = await controlflow(shell, remainingLine);
|
|
31
|
-
const cfg = (0, cfg_1.extractCFG)(result.normalize);
|
|
32
|
-
|
|
47
|
+
const result = await controlflow(shell, handleString(remainingLine));
|
|
48
|
+
const cfg = (0, cfg_1.extractCFG)(result.normalize, result.dataflow.graph);
|
|
49
|
+
const mermaid = (0, cfg_2.cfgToMermaidUrl)(cfg, result.normalize);
|
|
50
|
+
output.stdout(mermaid);
|
|
51
|
+
try {
|
|
52
|
+
clipboardy_1.default.writeSync(mermaid);
|
|
53
|
+
output.stdout(formatInfo(output, 'mermaid url'));
|
|
54
|
+
}
|
|
55
|
+
catch (e) { /* do nothing this is a service thing */ }
|
|
33
56
|
}
|
|
34
57
|
};
|
|
35
58
|
//# sourceMappingURL=repl-cfg.js.map
|
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
2
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
6
|
exports.dataflowStarCommand = exports.dataflowCommand = void 0;
|
|
4
7
|
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
|
|
5
8
|
const retriever_1 = require("../../../r-bridge/retriever");
|
|
6
9
|
const dfg_1 = require("../../../util/mermaid/dfg");
|
|
10
|
+
const clipboardy_1 = __importDefault(require("clipboardy"));
|
|
11
|
+
const ansi_1 = require("../../../util/ansi");
|
|
7
12
|
async function dataflow(parser, remainingLine) {
|
|
8
13
|
return await (0, default_pipelines_1.createDataflowPipeline)(parser, {
|
|
9
14
|
request: (0, retriever_1.requestFromInput)(remainingLine.trim())
|
|
@@ -12,6 +17,9 @@ async function dataflow(parser, remainingLine) {
|
|
|
12
17
|
function handleString(code) {
|
|
13
18
|
return code.startsWith('"') ? JSON.parse(code) : code;
|
|
14
19
|
}
|
|
20
|
+
function formatInfo(out, type, timing) {
|
|
21
|
+
return out.formatter.format(`Copied ${type} to clipboard (dataflow: ${timing}ms).`, { color: 7 /* Colors.White */, effect: ansi_1.ColorEffect.Foreground, style: 3 /* FontStyles.Italic */ });
|
|
22
|
+
}
|
|
15
23
|
exports.dataflowCommand = {
|
|
16
24
|
description: `Get mermaid code for the dataflow graph of R code, start with '${retriever_1.fileProtocol}' to indicate a file`,
|
|
17
25
|
usageExample: ':dataflow',
|
|
@@ -19,7 +27,13 @@ exports.dataflowCommand = {
|
|
|
19
27
|
script: false,
|
|
20
28
|
fn: async (output, shell, remainingLine) => {
|
|
21
29
|
const result = await dataflow(shell, handleString(remainingLine));
|
|
22
|
-
|
|
30
|
+
const mermaid = (0, dfg_1.graphToMermaid)({ graph: result.dataflow.graph, includeEnvironments: false }).string;
|
|
31
|
+
output.stdout(mermaid);
|
|
32
|
+
try {
|
|
33
|
+
clipboardy_1.default.writeSync(mermaid);
|
|
34
|
+
output.stdout(formatInfo(output, 'mermaid code', result.dataflow['.meta'].timing));
|
|
35
|
+
}
|
|
36
|
+
catch (e) { /* do nothing this is a service thing */ }
|
|
23
37
|
}
|
|
24
38
|
};
|
|
25
39
|
exports.dataflowStarCommand = {
|
|
@@ -29,7 +43,13 @@ exports.dataflowStarCommand = {
|
|
|
29
43
|
script: false,
|
|
30
44
|
fn: async (output, shell, remainingLine) => {
|
|
31
45
|
const result = await dataflow(shell, handleString(remainingLine));
|
|
32
|
-
|
|
46
|
+
const mermaid = (0, dfg_1.graphToMermaidUrl)(result.dataflow.graph, false);
|
|
47
|
+
output.stdout(mermaid);
|
|
48
|
+
try {
|
|
49
|
+
clipboardy_1.default.writeSync(mermaid);
|
|
50
|
+
output.stdout(formatInfo(output, 'mermaid url', result.dataflow['.meta'].timing));
|
|
51
|
+
}
|
|
52
|
+
catch (e) { /* do nothing this is a service thing */ }
|
|
33
53
|
}
|
|
34
54
|
};
|
|
35
55
|
//# sourceMappingURL=repl-dataflow.js.map
|
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
2
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
6
|
exports.normalizeStarCommand = exports.normalizeCommand = void 0;
|
|
4
7
|
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
|
|
5
8
|
const retriever_1 = require("../../../r-bridge/retriever");
|
|
6
9
|
const ast_1 = require("../../../util/mermaid/ast");
|
|
10
|
+
const clipboardy_1 = __importDefault(require("clipboardy"));
|
|
11
|
+
const ansi_1 = require("../../../util/ansi");
|
|
7
12
|
async function normalize(parser, remainingLine) {
|
|
8
13
|
return await (0, default_pipelines_1.createNormalizePipeline)(parser, {
|
|
9
14
|
request: (0, retriever_1.requestFromInput)(remainingLine.trim())
|
|
@@ -12,6 +17,9 @@ async function normalize(parser, remainingLine) {
|
|
|
12
17
|
function handleString(code) {
|
|
13
18
|
return code.startsWith('"') ? JSON.parse(code) : code;
|
|
14
19
|
}
|
|
20
|
+
function formatInfo(out, type, timing) {
|
|
21
|
+
return out.formatter.format(`Copied ${type} to clipboard (normalize: ${timing}ms).`, { color: 7 /* Colors.White */, effect: ansi_1.ColorEffect.Foreground, style: 3 /* FontStyles.Italic */ });
|
|
22
|
+
}
|
|
15
23
|
exports.normalizeCommand = {
|
|
16
24
|
description: `Get mermaid code for the normalized AST of R code, start with '${retriever_1.fileProtocol}' to indicate a file`,
|
|
17
25
|
usageExample: ':normalize',
|
|
@@ -19,7 +27,13 @@ exports.normalizeCommand = {
|
|
|
19
27
|
script: false,
|
|
20
28
|
fn: async (output, shell, remainingLine) => {
|
|
21
29
|
const result = await normalize(shell, handleString(remainingLine));
|
|
22
|
-
|
|
30
|
+
const mermaid = (0, ast_1.normalizedAstToMermaid)(result.normalize.ast);
|
|
31
|
+
output.stdout(mermaid);
|
|
32
|
+
try {
|
|
33
|
+
clipboardy_1.default.writeSync(mermaid);
|
|
34
|
+
output.stdout(formatInfo(output, 'mermaid url', result.normalize['.meta'].timing));
|
|
35
|
+
}
|
|
36
|
+
catch (e) { /* do nothing this is a service thing */ }
|
|
23
37
|
}
|
|
24
38
|
};
|
|
25
39
|
exports.normalizeStarCommand = {
|
|
@@ -29,7 +43,13 @@ exports.normalizeStarCommand = {
|
|
|
29
43
|
script: false,
|
|
30
44
|
fn: async (output, shell, remainingLine) => {
|
|
31
45
|
const result = await normalize(shell, handleString(remainingLine));
|
|
32
|
-
|
|
46
|
+
const mermaid = (0, ast_1.normalizedAstToMermaidUrl)(result.normalize.ast);
|
|
47
|
+
output.stdout(mermaid);
|
|
48
|
+
try {
|
|
49
|
+
clipboardy_1.default.writeSync(mermaid);
|
|
50
|
+
output.stdout(formatInfo(output, 'mermaid url', result.normalize['.meta'].timing));
|
|
51
|
+
}
|
|
52
|
+
catch (e) { /* do nothing this is a service thing */ }
|
|
33
53
|
}
|
|
34
54
|
};
|
|
35
55
|
//# sourceMappingURL=repl-normalize.js.map
|
|
@@ -93,8 +93,14 @@ exports.parseCommand = {
|
|
|
93
93
|
const result = await (0, default_pipelines_1.createParsePipeline)(parser, {
|
|
94
94
|
request: (0, retriever_1.requestFromInput)((0, retriever_1.removeRQuotes)(remainingLine.trim()))
|
|
95
95
|
}).allRemainingSteps();
|
|
96
|
-
|
|
97
|
-
|
|
96
|
+
if (parser.name === 'r-shell') {
|
|
97
|
+
const object = (0, format_1.convertPreparedParsedData)((0, format_1.prepareParsedData)(result.parse.parsed));
|
|
98
|
+
output.stdout(depthListToTextTree(toDepthMap(object), output.formatter));
|
|
99
|
+
}
|
|
100
|
+
else {
|
|
101
|
+
// print the tree-sitter ast
|
|
102
|
+
output.stdout(JSON.stringify(result.parse.parsed, null, 2));
|
|
103
|
+
}
|
|
98
104
|
}
|
|
99
105
|
};
|
|
100
106
|
//# sourceMappingURL=repl-parse.js.map
|
package/cli/repl/core.js
CHANGED
|
@@ -48,6 +48,7 @@ const scripts_info_1 = require("../common/scripts-info");
|
|
|
48
48
|
const retriever_1 = require("../../r-bridge/retriever");
|
|
49
49
|
const repl_main_1 = require("./commands/repl-main");
|
|
50
50
|
const shell_1 = require("../../r-bridge/shell");
|
|
51
|
+
const log_1 = require("../../util/log");
|
|
51
52
|
let _replCompleterKeywords = undefined;
|
|
52
53
|
function replCompleterKeywords() {
|
|
53
54
|
if (_replCompleterKeywords === undefined) {
|
|
@@ -107,6 +108,9 @@ async function replProcessStatement(output, statement, parser, allowRSessionAcce
|
|
|
107
108
|
}
|
|
108
109
|
catch (e) {
|
|
109
110
|
output.stdout(`${bold(`Failed to execute command ${command}`)}: ${e?.message}. Using the ${bold('--verbose')} flag on startup may provide additional information.\n`);
|
|
111
|
+
if (log_1.log.settings.minLevel < 6 /* LogLevel.Fatal */) {
|
|
112
|
+
console.error(e);
|
|
113
|
+
}
|
|
110
114
|
}
|
|
111
115
|
}
|
|
112
116
|
else {
|
|
@@ -152,7 +152,7 @@ class FlowRServerConnection {
|
|
|
152
152
|
async sendFileAnalysisResponse(results, message) {
|
|
153
153
|
let cfg = undefined;
|
|
154
154
|
if (message.cfg) {
|
|
155
|
-
cfg = (0, cfg_1.extractCFG)(results.normalize);
|
|
155
|
+
cfg = (0, cfg_1.extractCFG)(results.normalize, results.dataflow?.graph);
|
|
156
156
|
}
|
|
157
157
|
const config = () => ({ context: message.filename ?? 'unknown', getId: (0, quads_1.defaultQuadIdGenerator)() });
|
|
158
158
|
const sanitizedResults = sanitizeAnalysisResults(results);
|
|
@@ -59,7 +59,7 @@ async function getStatsForSingleFile(options) {
|
|
|
59
59
|
if (stats.outputs.size === 1) {
|
|
60
60
|
if (options['dump-json']) {
|
|
61
61
|
const [, output] = [...stats.outputs.entries()][0];
|
|
62
|
-
const cfg = (0, cfg_1.extractCFG)(output.normalize);
|
|
62
|
+
const cfg = (0, cfg_1.extractCFG)(output.normalize, output.dataflow.graph);
|
|
63
63
|
statistics_file_1.statisticsFileProvider.append('output-json', 'parse', await (0, print_1.printStepResult)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, output.parse, 2 /* StepOutputFormat.Json */));
|
|
64
64
|
statistics_file_1.statisticsFileProvider.append('output-json', 'normalize', await (0, print_1.printStepResult)(_10_normalize_1.NORMALIZE, output.normalize, 2 /* StepOutputFormat.Json */));
|
|
65
65
|
statistics_file_1.statisticsFileProvider.append('output-json', 'dataflow', await (0, print_1.printStepResult)(_20_dataflow_1.STATIC_DATAFLOW, output.dataflow, 2 /* StepOutputFormat.Json */));
|
package/config.js
CHANGED
|
@@ -31,7 +31,6 @@ const defaultEngineConfigs = {
|
|
|
31
31
|
};
|
|
32
32
|
exports.defaultConfigOptions = {
|
|
33
33
|
ignoreSourceCalls: false,
|
|
34
|
-
rPath: undefined,
|
|
35
34
|
semantics: {
|
|
36
35
|
environment: {
|
|
37
36
|
overwriteBuiltIns: {
|
|
@@ -126,6 +125,14 @@ function getEngineConfig(engine) {
|
|
|
126
125
|
}
|
|
127
126
|
function loadConfigFromFile(configFile, workingDirectory) {
|
|
128
127
|
if (configFile !== undefined) {
|
|
128
|
+
if (path_1.default.isAbsolute(configFile) && fs_1.default.existsSync(configFile)) {
|
|
129
|
+
log_1.log.trace(`Found config at ${configFile} (absolute)`);
|
|
130
|
+
const ret = parseConfig(fs_1.default.readFileSync(configFile, { encoding: 'utf-8' }));
|
|
131
|
+
if (ret) {
|
|
132
|
+
log_1.log.info(`Using config ${JSON.stringify(ret)}`);
|
|
133
|
+
return ret;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
129
136
|
let searchPath = path_1.default.resolve(workingDirectory);
|
|
130
137
|
do {
|
|
131
138
|
const configPath = path_1.default.join(searchPath, configFile);
|
package/dataflow/extractor.js
CHANGED
|
@@ -57,7 +57,7 @@ function resolveLinkToSideEffects(ast, graph) {
|
|
|
57
57
|
if (typeof s !== 'object') {
|
|
58
58
|
continue;
|
|
59
59
|
}
|
|
60
|
-
cfg ??= (0, cfg_1.extractCFG)(ast).graph;
|
|
60
|
+
cfg ??= (0, cfg_1.extractCFG)(ast, graph).graph;
|
|
61
61
|
/* this has to change whenever we add a new link to relations because we currently offer no abstraction for the type */
|
|
62
62
|
const potentials = (0, identify_link_to_last_call_relation_1.identifyLinkToLastCallRelation)(s.id, cfg, graph, s.linkTo);
|
|
63
63
|
for (const pot of potentials) {
|
|
@@ -85,57 +85,58 @@ export declare class DataflowGraphBuilder extends DataflowGraph {
|
|
|
85
85
|
/**
|
|
86
86
|
* Adds a **read edge** (E1).
|
|
87
87
|
*
|
|
88
|
-
* @param from -
|
|
89
|
-
* @param to -
|
|
88
|
+
* @param from - NodeId of the source vertex
|
|
89
|
+
* @param to - Either a single or multiple target ids.
|
|
90
|
+
* If you pass multiple this will construct a single edge for each of them.
|
|
90
91
|
*/
|
|
91
92
|
reads(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
92
93
|
/**
|
|
93
94
|
* Adds a **defined-by edge** (E2), with from as defined variable, and to
|
|
94
95
|
* as a variable/function contributing to its definition.
|
|
95
96
|
*
|
|
96
|
-
* @see reads for parameters.
|
|
97
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
97
98
|
*/
|
|
98
99
|
definedBy(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
99
100
|
/**
|
|
100
101
|
* Adds a **call edge** (E5) with from as caller, and to as callee.
|
|
101
102
|
*
|
|
102
|
-
* @see reads for parameters.
|
|
103
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
103
104
|
*/
|
|
104
105
|
calls(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
105
106
|
/**
|
|
106
107
|
* Adds a **return edge** (E6) with from as function, and to as exit point.
|
|
107
108
|
*
|
|
108
|
-
* @see reads for parameters.
|
|
109
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
109
110
|
*/
|
|
110
111
|
returns(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
111
112
|
/**
|
|
112
113
|
* Adds a **defines-on-call edge** (E7) with from as variable, and to as its definition
|
|
113
114
|
*
|
|
114
|
-
* @see reads for parameters.
|
|
115
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
115
116
|
*/
|
|
116
117
|
definesOnCall(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
117
118
|
/**
|
|
118
119
|
* Adds a **defined-by-on-call edge** with from as definition, and to as variable.
|
|
119
120
|
*
|
|
120
|
-
* @see reads for parameters.
|
|
121
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
121
122
|
*/
|
|
122
123
|
definedByOnCall(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
123
124
|
/**
|
|
124
125
|
* Adds an **argument edge** (E9) with from as function call, and to as argument.
|
|
125
126
|
*
|
|
126
|
-
* @see reads for parameters.
|
|
127
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
127
128
|
*/
|
|
128
129
|
argument(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
129
130
|
/**
|
|
130
131
|
* Adds a **non-standard evaluation edge** with from as vertex, and to as vertex.
|
|
131
132
|
*
|
|
132
|
-
* @see reads for parameters.
|
|
133
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
133
134
|
*/
|
|
134
135
|
nse(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
135
136
|
/**
|
|
136
137
|
* Adds a **side-effect-on-call edge** with from as vertex, and to as vertex.
|
|
137
138
|
*
|
|
138
|
-
* @see reads for parameters.
|
|
139
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
139
140
|
*/
|
|
140
141
|
sideEffectOnCall(from: NodeId, to: DataflowGraphEdgeTarget): this;
|
|
141
142
|
/**
|
|
@@ -173,8 +173,9 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
173
173
|
/**
|
|
174
174
|
* Adds a **read edge** (E1).
|
|
175
175
|
*
|
|
176
|
-
* @param from -
|
|
177
|
-
* @param to -
|
|
176
|
+
* @param from - NodeId of the source vertex
|
|
177
|
+
* @param to - Either a single or multiple target ids.
|
|
178
|
+
* If you pass multiple this will construct a single edge for each of them.
|
|
178
179
|
*/
|
|
179
180
|
reads(from, to) {
|
|
180
181
|
return this.edgeHelper(from, to, edge_1.EdgeType.Reads);
|
|
@@ -183,7 +184,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
183
184
|
* Adds a **defined-by edge** (E2), with from as defined variable, and to
|
|
184
185
|
* as a variable/function contributing to its definition.
|
|
185
186
|
*
|
|
186
|
-
* @see reads for parameters.
|
|
187
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
187
188
|
*/
|
|
188
189
|
definedBy(from, to) {
|
|
189
190
|
return this.edgeHelper(from, to, edge_1.EdgeType.DefinedBy);
|
|
@@ -191,7 +192,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
191
192
|
/**
|
|
192
193
|
* Adds a **call edge** (E5) with from as caller, and to as callee.
|
|
193
194
|
*
|
|
194
|
-
* @see reads for parameters.
|
|
195
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
195
196
|
*/
|
|
196
197
|
calls(from, to) {
|
|
197
198
|
return this.edgeHelper(from, to, edge_1.EdgeType.Calls);
|
|
@@ -199,7 +200,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
199
200
|
/**
|
|
200
201
|
* Adds a **return edge** (E6) with from as function, and to as exit point.
|
|
201
202
|
*
|
|
202
|
-
* @see reads for parameters.
|
|
203
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
203
204
|
*/
|
|
204
205
|
returns(from, to) {
|
|
205
206
|
return this.edgeHelper(from, to, edge_1.EdgeType.Returns);
|
|
@@ -207,7 +208,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
207
208
|
/**
|
|
208
209
|
* Adds a **defines-on-call edge** (E7) with from as variable, and to as its definition
|
|
209
210
|
*
|
|
210
|
-
* @see reads for parameters.
|
|
211
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
211
212
|
*/
|
|
212
213
|
definesOnCall(from, to) {
|
|
213
214
|
return this.edgeHelper(from, to, edge_1.EdgeType.DefinesOnCall);
|
|
@@ -215,7 +216,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
215
216
|
/**
|
|
216
217
|
* Adds a **defined-by-on-call edge** with from as definition, and to as variable.
|
|
217
218
|
*
|
|
218
|
-
* @see reads for parameters.
|
|
219
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
219
220
|
*/
|
|
220
221
|
definedByOnCall(from, to) {
|
|
221
222
|
return this.edgeHelper(from, to, edge_1.EdgeType.DefinedByOnCall);
|
|
@@ -223,7 +224,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
223
224
|
/**
|
|
224
225
|
* Adds an **argument edge** (E9) with from as function call, and to as argument.
|
|
225
226
|
*
|
|
226
|
-
* @see reads for parameters.
|
|
227
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
227
228
|
*/
|
|
228
229
|
argument(from, to) {
|
|
229
230
|
return this.edgeHelper(from, to, edge_1.EdgeType.Argument);
|
|
@@ -231,7 +232,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
231
232
|
/**
|
|
232
233
|
* Adds a **non-standard evaluation edge** with from as vertex, and to as vertex.
|
|
233
234
|
*
|
|
234
|
-
* @see reads for parameters.
|
|
235
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
235
236
|
*/
|
|
236
237
|
nse(from, to) {
|
|
237
238
|
return this.edgeHelper(from, to, edge_1.EdgeType.NonStandardEvaluation);
|
|
@@ -239,7 +240,7 @@ class DataflowGraphBuilder extends graph_1.DataflowGraph {
|
|
|
239
240
|
/**
|
|
240
241
|
* Adds a **side-effect-on-call edge** with from as vertex, and to as vertex.
|
|
241
242
|
*
|
|
242
|
-
* @see reads for parameters.
|
|
243
|
+
* @see {@link DataflowGraphBuilder#reads|reads} for parameters.
|
|
243
244
|
*/
|
|
244
245
|
sideEffectOnCall(from, to) {
|
|
245
246
|
return this.edgeHelper(from, to, edge_1.EdgeType.SideEffectOnCall);
|
package/dataflow/graph/edge.d.ts
CHANGED
|
@@ -78,7 +78,7 @@ export declare const enum TraverseEdge {
|
|
|
78
78
|
*
|
|
79
79
|
* Counterpart of {@link edgeDoesNotIncludeType}.
|
|
80
80
|
*/
|
|
81
|
-
export declare function edgeIncludesType(type: EdgeTypeBits,
|
|
81
|
+
export declare function edgeIncludesType(type: EdgeTypeBits, typesToInclude: EdgeTypeBits): boolean;
|
|
82
82
|
/**
|
|
83
83
|
* Check if the given-edge type does not include the given type.
|
|
84
84
|
* Counterpart of {@link edgeIncludesType}.
|
package/dataflow/graph/edge.js
CHANGED
|
@@ -85,8 +85,8 @@ function edgeTypesToNames(bits) {
|
|
|
85
85
|
*
|
|
86
86
|
* Counterpart of {@link edgeDoesNotIncludeType}.
|
|
87
87
|
*/
|
|
88
|
-
function edgeIncludesType(type,
|
|
89
|
-
return (
|
|
88
|
+
function edgeIncludesType(type, typesToInclude) {
|
|
89
|
+
return (typesToInclude & type) !== 0;
|
|
90
90
|
}
|
|
91
91
|
/**
|
|
92
92
|
* Check if the given-edge type does not include the given type.
|
|
@@ -21,7 +21,7 @@ export declare const ValidVertexTypeReverse: {
|
|
|
21
21
|
*/
|
|
22
22
|
export interface ContainerLeafIndex {
|
|
23
23
|
/**
|
|
24
|
-
*
|
|
24
|
+
* Distinctive lexeme of index e.g. 'name' for `list(name = 'John')`
|
|
25
25
|
*/
|
|
26
26
|
readonly lexeme: string;
|
|
27
27
|
/**
|
|
@@ -201,21 +201,21 @@ export type DataflowGraphVertices<Vertex extends DataflowGraphVertexInfo = Dataf
|
|
|
201
201
|
/**
|
|
202
202
|
* Check if the given vertex is a {@link DataflowGraphVertexValue|value vertex}.
|
|
203
203
|
*/
|
|
204
|
-
export declare function isValueVertex(vertex
|
|
204
|
+
export declare function isValueVertex(vertex?: DataflowGraphVertexBase): vertex is DataflowGraphVertexValue;
|
|
205
205
|
/**
|
|
206
206
|
* Check if the given vertex is a {@link DataflowGraphVertexUse|use vertex}.
|
|
207
207
|
*/
|
|
208
|
-
export declare function isUseVertex(vertex
|
|
208
|
+
export declare function isUseVertex(vertex?: DataflowGraphVertexBase): vertex is DataflowGraphVertexUse;
|
|
209
209
|
/**
|
|
210
210
|
* Check if the given vertex is a {@link DataflowGraphVertexFunctionCall|function call vertex}.
|
|
211
211
|
*/
|
|
212
|
-
export declare function isFunctionCallVertex(vertex
|
|
212
|
+
export declare function isFunctionCallVertex(vertex?: DataflowGraphVertexBase): vertex is DataflowGraphVertexFunctionCall;
|
|
213
213
|
/**
|
|
214
214
|
* Check if the given vertex is a {@link DataflowGraphVertexVariableDefinition|variable definition vertex}.
|
|
215
215
|
*/
|
|
216
|
-
export declare function isVariableDefinitionVertex(vertex
|
|
216
|
+
export declare function isVariableDefinitionVertex(vertex?: DataflowGraphVertexBase): vertex is DataflowGraphVertexVariableDefinition;
|
|
217
217
|
/**
|
|
218
218
|
* Check if the given vertex is a {@link DataflowGraphVertexFunctionDefinition|function definition vertex}.
|
|
219
219
|
*/
|
|
220
|
-
export declare function isFunctionDefinitionVertex(vertex
|
|
220
|
+
export declare function isFunctionDefinitionVertex(vertex?: DataflowGraphVertexBase): vertex is DataflowGraphVertexFunctionDefinition;
|
|
221
221
|
export {};
|
package/dataflow/graph/vertex.js
CHANGED
|
@@ -24,30 +24,30 @@ function isParentContainerIndex(index) {
|
|
|
24
24
|
* Check if the given vertex is a {@link DataflowGraphVertexValue|value vertex}.
|
|
25
25
|
*/
|
|
26
26
|
function isValueVertex(vertex) {
|
|
27
|
-
return vertex
|
|
27
|
+
return vertex?.tag === VertexType.Value;
|
|
28
28
|
}
|
|
29
29
|
/**
|
|
30
30
|
* Check if the given vertex is a {@link DataflowGraphVertexUse|use vertex}.
|
|
31
31
|
*/
|
|
32
32
|
function isUseVertex(vertex) {
|
|
33
|
-
return vertex
|
|
33
|
+
return vertex?.tag === VertexType.Use;
|
|
34
34
|
}
|
|
35
35
|
/**
|
|
36
36
|
* Check if the given vertex is a {@link DataflowGraphVertexFunctionCall|function call vertex}.
|
|
37
37
|
*/
|
|
38
38
|
function isFunctionCallVertex(vertex) {
|
|
39
|
-
return vertex
|
|
39
|
+
return vertex?.tag === VertexType.FunctionCall;
|
|
40
40
|
}
|
|
41
41
|
/**
|
|
42
42
|
* Check if the given vertex is a {@link DataflowGraphVertexVariableDefinition|variable definition vertex}.
|
|
43
43
|
*/
|
|
44
44
|
function isVariableDefinitionVertex(vertex) {
|
|
45
|
-
return vertex
|
|
45
|
+
return vertex?.tag === VertexType.VariableDefinition;
|
|
46
46
|
}
|
|
47
47
|
/**
|
|
48
48
|
* Check if the given vertex is a {@link DataflowGraphVertexFunctionDefinition|function definition vertex}.
|
|
49
49
|
*/
|
|
50
50
|
function isFunctionDefinitionVertex(vertex) {
|
|
51
|
-
return vertex
|
|
51
|
+
return vertex?.tag === VertexType.FunctionDefinition;
|
|
52
52
|
}
|
|
53
53
|
//# sourceMappingURL=vertex.js.map
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.processAccess = processAccess;
|
|
4
|
-
const assert_1 = require("../../../../../../util/assert");
|
|
5
4
|
const known_call_handling_1 = require("../known-call-handling");
|
|
6
5
|
const r_function_call_1 = require("../../../../../../r-bridge/lang-4.x/ast/model/nodes/r-function-call");
|
|
7
6
|
const logger_1 = require("../../../../../logger");
|
|
@@ -35,9 +34,12 @@ function processAccess(name, args, rootId, data, config) {
|
|
|
35
34
|
return (0, known_call_handling_1.processKnownFunctionCall)({ name, args, rootId, data, forceArgs: config.forceArgs }).information;
|
|
36
35
|
}
|
|
37
36
|
const head = args[0];
|
|
38
|
-
(0, assert_1.guard)(head !== r_function_call_1.EmptyArgument, () => `Access ${name.content} has no source, impossible!`);
|
|
39
37
|
let fnCall;
|
|
40
|
-
if (
|
|
38
|
+
if (head === r_function_call_1.EmptyArgument) {
|
|
39
|
+
// in this case we may be within a pipe
|
|
40
|
+
fnCall = (0, known_call_handling_1.processKnownFunctionCall)({ name, args, rootId, data, forceArgs: config.forceArgs });
|
|
41
|
+
}
|
|
42
|
+
else if (!config.treatIndicesAsString) {
|
|
41
43
|
/* within an access operation which treats its fields, we redefine the table assignment ':=' as a trigger if this is to be treated as a definition */
|
|
42
44
|
// do we have a local definition that needs to be recovered?
|
|
43
45
|
fnCall = processNumberBasedAccess(data, name, args, rootId, config, head);
|
|
@@ -46,7 +48,9 @@ function processAccess(name, args, rootId, data, config) {
|
|
|
46
48
|
fnCall = processStringBasedAccess(args, data, name, rootId, config);
|
|
47
49
|
}
|
|
48
50
|
const info = fnCall.information;
|
|
49
|
-
|
|
51
|
+
if (head !== r_function_call_1.EmptyArgument) {
|
|
52
|
+
info.graph.addEdge(name.info.id, fnCall.processedArguments[0]?.entryPoint ?? head.info.id, edge_1.EdgeType.Returns);
|
|
53
|
+
}
|
|
50
54
|
/* access always reads all of its indices */
|
|
51
55
|
for (const arg of fnCall.processedArguments) {
|
|
52
56
|
if (arg !== undefined) {
|
|
@@ -70,7 +74,7 @@ function processAccess(name, args, rootId, data, config) {
|
|
|
70
74
|
unknownReferences: (0, environment_1.makeAllMaybe)(info.unknownReferences, info.graph, info.environment, false),
|
|
71
75
|
entryPoint: rootId,
|
|
72
76
|
/** it is, to be precise, the accessed element we want to map to maybe */
|
|
73
|
-
in: info.in.map(ref => {
|
|
77
|
+
in: head === r_function_call_1.EmptyArgument ? info.in : info.in.map(ref => {
|
|
74
78
|
if (ref.nodeId === head.value?.info.id) {
|
|
75
79
|
return (0, environment_1.makeReferenceMaybe)(ref, info.graph, info.environment, false);
|
|
76
80
|
}
|
|
@@ -83,10 +83,10 @@ function sourceRequest(rootId, request, data, information, getId) {
|
|
|
83
83
|
}
|
|
84
84
|
// take the entry point as well as all the written references, and give them a control dependency to the source call to show that they are conditional
|
|
85
85
|
if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
|
|
86
|
-
dataflow.graph.addControlDependency(dataflow.entryPoint, rootId);
|
|
86
|
+
dataflow.graph.addControlDependency(dataflow.entryPoint, rootId, true);
|
|
87
87
|
}
|
|
88
88
|
for (const out of dataflow.out) {
|
|
89
|
-
dataflow.graph.addControlDependency(out.nodeId, rootId);
|
|
89
|
+
dataflow.graph.addControlDependency(out.nodeId, rootId, true);
|
|
90
90
|
}
|
|
91
91
|
// update our graph with the sourced file's information
|
|
92
92
|
const newInformation = { ...information };
|
|
@@ -6,12 +6,12 @@ const pipeline_executor_1 = require("../../core/pipeline-executor");
|
|
|
6
6
|
const default_pipelines_1 = require("../../core/steps/pipeline/default-pipelines");
|
|
7
7
|
const retriever_1 = require("../../r-bridge/retriever");
|
|
8
8
|
async function getCfg(shell, code) {
|
|
9
|
-
const steps = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.
|
|
9
|
+
const steps = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
|
|
10
10
|
parser: shell,
|
|
11
11
|
request: (0, retriever_1.requestFromInput)(code)
|
|
12
12
|
}).allRemainingSteps();
|
|
13
13
|
return {
|
|
14
|
-
info: (0, cfg_1.extractCFG)(steps.normalize),
|
|
14
|
+
info: (0, cfg_1.extractCFG)(steps.normalize, steps.dataflow?.graph),
|
|
15
15
|
ast: steps.normalize
|
|
16
16
|
};
|
|
17
17
|
}
|
|
@@ -312,9 +312,9 @@ function implSnippet(node, program, showName = true, nesting = 0) {
|
|
|
312
312
|
const sep = node.comments ? ' \n' : '\n';
|
|
313
313
|
let text = node.comments?.join('\n') ?? '';
|
|
314
314
|
const code = node.node.getFullText(program.getSourceFile(node.node.getSourceFile().fileName));
|
|
315
|
-
text += `\n<details><summary style="color:gray">Defined at <a href="${getTypePathLink(node)}">${getTypePathLink(node, '.')}</a></summary>\n\n${(0, doc_code_1.codeBlock)('ts', code)}\n\n</details>\n`;
|
|
316
|
-
const init = showName ?
|
|
317
|
-
return ` ${indent}
|
|
315
|
+
text += `\n${indent}<details><summary style="color:gray">Defined at <a href="${getTypePathLink(node)}">${getTypePathLink(node, '.')}</a></summary>\n\n${(0, doc_code_1.codeBlock)('ts', code)}\n\n</details>\n`;
|
|
316
|
+
const init = showName ? `* ${bold}[${node.name}](${getTypePathLink(node)})${bold} ${sep}${indent}` : '';
|
|
317
|
+
return ` ${indent}${showName ? init : ''} ${text.replaceAll('\t', ' ').split(/\n/g).join(`\n${indent} `)}`;
|
|
318
318
|
}
|
|
319
319
|
exports.mermaidHide = ['Leaf', 'Location', 'Namespace', 'Base', 'WithChildren', 'Partial', 'RAccessBase'];
|
|
320
320
|
function printHierarchy({ program, hierarchy, root, collapseFromNesting = 1, initialNesting = 0, maxDepth = 20 }) {
|
|
@@ -175,7 +175,6 @@ ${(0, doc_code_1.codeBlock)('shell', ':query @config')}
|
|
|
175
175
|
The following summarizes the configuration options:
|
|
176
176
|
|
|
177
177
|
- \`ignoreSourceCalls\`: If set to \`true\`, _flowR_ will ignore source calls when analyzing the code, i.e., ignoring the inclusion of other files.
|
|
178
|
-
- \`rPath\`: The path to the R executable. If not set, _flowR_ will try to find the R executable in the system's PATH.
|
|
179
178
|
- \`semantics\`: allows to configure the way _flowR_ handles R, although we currently only support \`semantics/environment/overwriteBuiltIns\`.
|
|
180
179
|
You may use this to overwrite _flowR_'s handling of built-in function and even completely clear the preset definitions shipped with flowR.
|
|
181
180
|
See [Configure BuiltIn Semantics](#configure-builtin-semantics) for more information.
|
|
@@ -191,7 +190,6 @@ So you can configure _flowR_ by adding a file like the following:
|
|
|
191
190
|
|
|
192
191
|
${(0, doc_code_1.codeBlock)('json', JSON.stringify({
|
|
193
192
|
ignoreSourceCalls: true,
|
|
194
|
-
rPath: '/usr/bin/R',
|
|
195
193
|
semantics: {
|
|
196
194
|
environment: {
|
|
197
195
|
overwriteBuiltIns: {
|