@eagleoutice/flowr 2.2.2 → 2.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli/repl/commands/repl-dataflow.js +7 -4
- package/cli/repl/commands/repl-parse.js +43 -2
- package/cli/repl/print-version.d.ts +1 -0
- package/cli/repl/print-version.js +7 -2
- package/cli/repl/server/connection.js +10 -8
- package/core/pipeline-executor.d.ts +6 -0
- package/core/pipeline-executor.js +8 -0
- package/core/print/dataflow-printer.js +3 -0
- package/core/steps/all/core/01-parse-tree-sitter.d.ts +7 -0
- package/core/steps/pipeline/default-pipelines.d.ts +57 -47
- package/core/steps/pipeline/default-pipelines.js +23 -2
- package/core/steps/pipeline/pipeline.d.ts +1 -1
- package/core/steps/pipeline/pipeline.js +1 -1
- package/core/steps/pipeline-step.d.ts +1 -3
- package/dataflow/environments/resolve-by-name.d.ts +3 -2
- package/dataflow/environments/resolve-by-name.js +4 -4
- package/dataflow/extractor.d.ts +10 -0
- package/dataflow/extractor.js +10 -0
- package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-source.js +20 -4
- package/documentation/doc-util/doc-dfg.d.ts +5 -3
- package/documentation/doc-util/doc-dfg.js +10 -8
- package/documentation/doc-util/doc-files.d.ts +1 -1
- package/documentation/doc-util/doc-files.js +1 -1
- package/documentation/doc-util/doc-normalized-ast.d.ts +2 -1
- package/documentation/doc-util/doc-normalized-ast.js +4 -5
- package/documentation/doc-util/doc-repl.d.ts +6 -2
- package/documentation/doc-util/doc-repl.js +10 -6
- package/documentation/doc-util/doc-structure.d.ts +1 -1
- package/documentation/doc-util/doc-types.d.ts +7 -5
- package/documentation/doc-util/doc-types.js +15 -10
- package/documentation/index.d.ts +9 -0
- package/documentation/index.js +26 -0
- package/documentation/print-capabilities-markdown.js +105 -19
- package/documentation/print-core-wiki.d.ts +1 -0
- package/documentation/print-core-wiki.js +406 -0
- package/documentation/print-dataflow-graph-wiki.js +27 -27
- package/documentation/print-interface-wiki.js +1 -1
- package/documentation/print-linting-and-testing-wiki.js +26 -8
- package/documentation/print-normalized-ast-wiki.js +22 -17
- package/documentation/print-query-wiki.js +7 -7
- package/documentation/print-search-wiki.js +2 -1
- package/package.json +3 -2
- package/queries/catalog/happens-before-query/happens-before-query-format.js +1 -1
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +1 -1
- package/queries/catalog/resolve-value-query/resolve-value-query-format.js +1 -1
- package/queries/catalog/search-query/search-query-format.js +1 -1
- package/r-bridge/data/data.d.ts +48 -7
- package/r-bridge/data/data.js +62 -8
- package/r-bridge/data/types.d.ts +7 -1
- package/r-bridge/lang-4.x/ast/model/processing/decorate.d.ts +2 -0
- package/r-bridge/lang-4.x/ast/model/processing/node-id.js +2 -5
- package/r-bridge/lang-4.x/ast/parser/json/format.d.ts +6 -0
- package/r-bridge/lang-4.x/ast/parser/json/format.js +6 -0
- package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +13 -2
- package/r-bridge/lang-4.x/ast/parser/json/parser.js +19 -3
- package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-root.d.ts +3 -0
- package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-root.js +3 -0
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +6 -1
- package/r-bridge/parser.d.ts +10 -0
- package/r-bridge/parser.js +26 -2
- package/search/flowr-search-builder.d.ts +1 -2
- package/search/flowr-search-builder.js +1 -3
- package/util/mermaid/dfg.d.ts +3 -0
- package/util/mermaid/dfg.js +24 -8
- package/util/strings.d.ts +9 -0
- package/util/strings.js +14 -0
- package/util/version.js +1 -1
|
@@ -9,9 +9,12 @@ const retriever_1 = require("../../../r-bridge/retriever");
|
|
|
9
9
|
const dfg_1 = require("../../../util/mermaid/dfg");
|
|
10
10
|
const clipboardy_1 = __importDefault(require("clipboardy"));
|
|
11
11
|
const ansi_1 = require("../../../util/ansi");
|
|
12
|
-
|
|
12
|
+
/**
|
|
13
|
+
* Obtain the dataflow graph using a known parser (such as the {@link RShell} or {@link TreeSitterExecutor}).
|
|
14
|
+
*/
|
|
15
|
+
async function replGetDataflow(parser, code) {
|
|
13
16
|
return await (0, default_pipelines_1.createDataflowPipeline)(parser, {
|
|
14
|
-
request: (0, retriever_1.requestFromInput)(
|
|
17
|
+
request: (0, retriever_1.requestFromInput)(code.trim())
|
|
15
18
|
}).allRemainingSteps();
|
|
16
19
|
}
|
|
17
20
|
function handleString(code) {
|
|
@@ -26,7 +29,7 @@ exports.dataflowCommand = {
|
|
|
26
29
|
aliases: ['d', 'df'],
|
|
27
30
|
script: false,
|
|
28
31
|
fn: async (output, shell, remainingLine) => {
|
|
29
|
-
const result = await
|
|
32
|
+
const result = await replGetDataflow(shell, handleString(remainingLine));
|
|
30
33
|
const mermaid = (0, dfg_1.graphToMermaid)({ graph: result.dataflow.graph, includeEnvironments: false }).string;
|
|
31
34
|
output.stdout(mermaid);
|
|
32
35
|
try {
|
|
@@ -42,7 +45,7 @@ exports.dataflowStarCommand = {
|
|
|
42
45
|
aliases: ['d*', 'df*'],
|
|
43
46
|
script: false,
|
|
44
47
|
fn: async (output, shell, remainingLine) => {
|
|
45
|
-
const result = await
|
|
48
|
+
const result = await replGetDataflow(shell, handleString(remainingLine));
|
|
46
49
|
const mermaid = (0, dfg_1.graphToMermaidUrl)(result.dataflow.graph, false);
|
|
47
50
|
output.stdout(mermaid);
|
|
48
51
|
try {
|
|
@@ -17,7 +17,41 @@ function toDepthMap(entry) {
|
|
|
17
17
|
result.push({ ...current, leaf: children.length === 0 });
|
|
18
18
|
children.reverse();
|
|
19
19
|
const nextDepth = current.depth + 1;
|
|
20
|
-
|
|
20
|
+
for (const c of children) {
|
|
21
|
+
visit.push({ depth: nextDepth, node: c });
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
return result;
|
|
25
|
+
}
|
|
26
|
+
function treeSitterToJsonEntry(node) {
|
|
27
|
+
return {
|
|
28
|
+
token: node.type,
|
|
29
|
+
children: [],
|
|
30
|
+
text: node.text,
|
|
31
|
+
id: node.id,
|
|
32
|
+
parent: node.parent?.id ?? -1,
|
|
33
|
+
terminal: node.isNamed,
|
|
34
|
+
line1: node.startPosition.row + 1,
|
|
35
|
+
col1: node.startPosition.column + 1,
|
|
36
|
+
line2: node.endPosition.row + 1,
|
|
37
|
+
col2: node.endPosition.column + 1
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
function treeSitterToDepthList(node) {
|
|
41
|
+
const visit = [{ depth: 0, node }];
|
|
42
|
+
const result = [];
|
|
43
|
+
while (visit.length > 0) {
|
|
44
|
+
const current = visit.pop();
|
|
45
|
+
if (current === undefined) {
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
const children = current.node.children;
|
|
49
|
+
result.push({ depth: current.depth, node: treeSitterToJsonEntry(current.node), leaf: children.length === 0 });
|
|
50
|
+
children.reverse();
|
|
51
|
+
const nextDepth = current.depth + 1;
|
|
52
|
+
for (const c of children) {
|
|
53
|
+
visit.push({ depth: nextDepth, node: c });
|
|
54
|
+
}
|
|
21
55
|
}
|
|
22
56
|
return result;
|
|
23
57
|
}
|
|
@@ -57,6 +91,9 @@ function retrieveLocationString(locationRaw) {
|
|
|
57
91
|
if (extracted[0] === extracted[2] && extracted[1] === extracted[3]) {
|
|
58
92
|
return ` (${extracted[0]}:${extracted[1]})`;
|
|
59
93
|
}
|
|
94
|
+
else if (extracted[0] === extracted[2]) {
|
|
95
|
+
return ` (${extracted[0]}:${extracted[1]}─${extracted[3]})`;
|
|
96
|
+
}
|
|
60
97
|
else {
|
|
61
98
|
return ` (${extracted[0]}:${extracted[1]}─${extracted[2]}:${extracted[3]})`;
|
|
62
99
|
}
|
|
@@ -66,6 +103,10 @@ function depthListToTextTree(list, f) {
|
|
|
66
103
|
const deadDepths = new Set();
|
|
67
104
|
let i = 0;
|
|
68
105
|
for (const { depth, node, leaf } of list) {
|
|
106
|
+
if (depth > 10) {
|
|
107
|
+
result += '...';
|
|
108
|
+
break;
|
|
109
|
+
}
|
|
69
110
|
const nextDepth = i + 1 < list.length ? list[i + 1].depth : 0;
|
|
70
111
|
deadDepths.delete(depth);
|
|
71
112
|
result += initialIndentation(i, depth, deadDepths, nextDepth, list, f);
|
|
@@ -99,7 +140,7 @@ exports.parseCommand = {
|
|
|
99
140
|
}
|
|
100
141
|
else {
|
|
101
142
|
// print the tree-sitter ast
|
|
102
|
-
output.stdout(
|
|
143
|
+
output.stdout(depthListToTextTree(treeSitterToDepthList(result.parse.parsed.rootNode), output.formatter));
|
|
103
144
|
}
|
|
104
145
|
}
|
|
105
146
|
};
|
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.versionReplString = versionReplString;
|
|
3
4
|
exports.printVersionRepl = printVersionRepl;
|
|
4
5
|
const repl_version_1 = require("./commands/repl-version");
|
|
5
|
-
async function
|
|
6
|
+
async function versionReplString(parser) {
|
|
6
7
|
const version = await (0, repl_version_1.retrieveVersionInformation)(parser);
|
|
7
|
-
|
|
8
|
+
const rVersion = version.r === 'none' ? '' : version.r === 'unknown' ? ', R version unknown' : `, R v${version.r}`;
|
|
9
|
+
return `flowR repl using flowR v${version.flowr}${rVersion} (${version.engine} engine)`;
|
|
10
|
+
}
|
|
11
|
+
async function printVersionRepl(parser) {
|
|
12
|
+
console.log(await versionReplString(parser));
|
|
8
13
|
}
|
|
9
14
|
//# sourceMappingURL=print-version.js.map
|
|
@@ -38,9 +38,6 @@ const core_1 = require("../core");
|
|
|
38
38
|
const cfg_1 = require("../../../util/cfg/cfg");
|
|
39
39
|
const quads_1 = require("../../../util/quads");
|
|
40
40
|
const print_1 = require("../../../core/print/print");
|
|
41
|
-
const _00_parse_1 = require("../../../core/steps/all/core/00-parse");
|
|
42
|
-
const _10_normalize_1 = require("../../../core/steps/all/core/10-normalize");
|
|
43
|
-
const _20_dataflow_1 = require("../../../core/steps/all/core/20-dataflow");
|
|
44
41
|
const ansi_1 = require("../../../util/ansi");
|
|
45
42
|
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
|
|
46
43
|
const graph_1 = require("../../../dataflow/graph/graph");
|
|
@@ -136,7 +133,7 @@ class FlowRServerConnection {
|
|
|
136
133
|
}
|
|
137
134
|
const tempFile = tmp.fileSync({ postfix: '.R' });
|
|
138
135
|
const slicer = this.createPipelineExecutorForRequest(message, tempFile.name);
|
|
139
|
-
await slicer.allRemainingSteps(false).then(async (results) => await this.sendFileAnalysisResponse(results, message))
|
|
136
|
+
await slicer.allRemainingSteps(false).then(async (results) => await this.sendFileAnalysisResponse(slicer, results, message))
|
|
140
137
|
.catch(e => {
|
|
141
138
|
this.logger.error(`[${this.name}] Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}`);
|
|
142
139
|
(0, send_1.sendMessage)(this.socket, {
|
|
@@ -149,13 +146,18 @@ class FlowRServerConnection {
|
|
|
149
146
|
// this is an interestingly named function that means "I am a callback that removes a file" - so this deletes the file
|
|
150
147
|
tempFile.removeCallback();
|
|
151
148
|
}
|
|
152
|
-
async sendFileAnalysisResponse(results, message) {
|
|
149
|
+
async sendFileAnalysisResponse(slicer, results, message) {
|
|
153
150
|
let cfg = undefined;
|
|
154
151
|
if (message.cfg) {
|
|
155
152
|
cfg = (0, cfg_1.extractCFG)(results.normalize, results.dataflow?.graph);
|
|
156
153
|
}
|
|
157
154
|
const config = () => ({ context: message.filename ?? 'unknown', getId: (0, quads_1.defaultQuadIdGenerator)() });
|
|
158
155
|
const sanitizedResults = sanitizeAnalysisResults(results);
|
|
156
|
+
const pipeline = slicer.getPipeline();
|
|
157
|
+
const parseStep = pipeline.steps.get('parse');
|
|
158
|
+
const normalizedStep = pipeline.steps.get('normalize');
|
|
159
|
+
const dataflowStep = pipeline.steps.get('dataflow');
|
|
160
|
+
(0, assert_1.guard)(parseStep !== undefined && normalizedStep !== undefined && dataflowStep !== undefined, 'All steps must be present');
|
|
159
161
|
if (message.format === 'n-quads') {
|
|
160
162
|
(0, send_1.sendMessage)(this.socket, {
|
|
161
163
|
type: 'response-file-analysis',
|
|
@@ -163,9 +165,9 @@ class FlowRServerConnection {
|
|
|
163
165
|
id: message.id,
|
|
164
166
|
cfg: cfg ? (0, cfg_1.cfg2quads)(cfg, config()) : undefined,
|
|
165
167
|
results: {
|
|
166
|
-
parse: await (0, print_1.printStepResult)(
|
|
167
|
-
normalize: await (0, print_1.printStepResult)(
|
|
168
|
-
dataflow: await (0, print_1.printStepResult)(
|
|
168
|
+
parse: await (0, print_1.printStepResult)(parseStep, sanitizedResults.parse, 5 /* StepOutputFormat.RdfQuads */, config()),
|
|
169
|
+
normalize: await (0, print_1.printStepResult)(normalizedStep, sanitizedResults.normalize, 5 /* StepOutputFormat.RdfQuads */, config()),
|
|
170
|
+
dataflow: await (0, print_1.printStepResult)(dataflowStep, sanitizedResults.dataflow, 5 /* StepOutputFormat.RdfQuads */, config())
|
|
169
171
|
}
|
|
170
172
|
});
|
|
171
173
|
}
|
|
@@ -95,10 +95,16 @@ export declare class PipelineExecutor<P extends Pipeline> {
|
|
|
95
95
|
* Construct a new pipeline executor.
|
|
96
96
|
* The required additional input is specified by the {@link IPipelineStep#requiredInput|required input configuration} of each step in the `pipeline`.
|
|
97
97
|
*
|
|
98
|
+
* Please see {@link createDataflowPipeline} and friends for engine agnostic shortcuts to create a pipeline executor.
|
|
99
|
+
*
|
|
98
100
|
* @param pipeline - The {@link Pipeline} to execute, probably created with {@link createPipeline}.
|
|
99
101
|
* @param input - External {@link PipelineInput|configuration and input} required to execute the given pipeline.
|
|
100
102
|
*/
|
|
101
103
|
constructor(pipeline: P, input: PipelineInput<P>);
|
|
104
|
+
/**
|
|
105
|
+
* Retrieve the {@link Pipeline|pipeline} that is currently being.
|
|
106
|
+
*/
|
|
107
|
+
getPipeline(): P;
|
|
102
108
|
/**
|
|
103
109
|
* Retrieve the current {@link PipelineStepStage|stage} the pipeline executor is in.
|
|
104
110
|
*
|
|
@@ -99,6 +99,8 @@ class PipelineExecutor {
|
|
|
99
99
|
* Construct a new pipeline executor.
|
|
100
100
|
* The required additional input is specified by the {@link IPipelineStep#requiredInput|required input configuration} of each step in the `pipeline`.
|
|
101
101
|
*
|
|
102
|
+
* Please see {@link createDataflowPipeline} and friends for engine agnostic shortcuts to create a pipeline executor.
|
|
103
|
+
*
|
|
102
104
|
* @param pipeline - The {@link Pipeline} to execute, probably created with {@link createPipeline}.
|
|
103
105
|
* @param input - External {@link PipelineInput|configuration and input} required to execute the given pipeline.
|
|
104
106
|
*/
|
|
@@ -114,6 +116,12 @@ class PipelineExecutor {
|
|
|
114
116
|
}
|
|
115
117
|
(0, built_in_config_1.registerBuiltInDefinitions)(builtIns.definitions);
|
|
116
118
|
}
|
|
119
|
+
/**
|
|
120
|
+
* Retrieve the {@link Pipeline|pipeline} that is currently being.
|
|
121
|
+
*/
|
|
122
|
+
getPipeline() {
|
|
123
|
+
return this.pipeline;
|
|
124
|
+
}
|
|
117
125
|
/**
|
|
118
126
|
* Retrieve the current {@link PipelineStepStage|stage} the pipeline executor is in.
|
|
119
127
|
*
|
|
@@ -5,6 +5,13 @@ import type { ParseRequiredInput } from '../../../../r-bridge/parser';
|
|
|
5
5
|
export interface ParseStepOutputTS {
|
|
6
6
|
readonly parsed: Tree;
|
|
7
7
|
}
|
|
8
|
+
export interface TreeSitterParseJson {
|
|
9
|
+
readonly '.meta': {
|
|
10
|
+
readonly tokenCount: number;
|
|
11
|
+
readonly tokenCountNoComments: number;
|
|
12
|
+
};
|
|
13
|
+
readonly str: string;
|
|
14
|
+
}
|
|
8
15
|
export declare const PARSE_WITH_TREE_SITTER_STEP: {
|
|
9
16
|
readonly name: "parse";
|
|
10
17
|
readonly humanReadableName: "parse with tree-sitter";
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Contains the default pipeline for working with
|
|
2
|
+
* Contains the default pipeline for working with flowR
|
|
3
3
|
*/
|
|
4
4
|
import type { PipelineInput } from './pipeline';
|
|
5
5
|
import type { KnownParser, Parser } from '../../../r-bridge/parser';
|
|
6
6
|
import { PipelineExecutor } from '../../pipeline-executor';
|
|
7
|
+
import type { RShell } from '../../../r-bridge/shell';
|
|
8
|
+
import type { TreeSitterExecutor } from '../../../r-bridge/lang-4.x/tree-sitter/tree-sitter-executor';
|
|
7
9
|
export declare const DEFAULT_SLICING_PIPELINE: import("./pipeline").Pipeline<{
|
|
8
10
|
readonly name: "parse";
|
|
9
11
|
readonly humanReadableName: "parse with R shell";
|
|
@@ -256,21 +258,6 @@ export declare const TREE_SITTER_SLICING_PIPELINE: import("./pipeline").Pipeline
|
|
|
256
258
|
readonly 4: typeof import("../../print/dataflow-printer").dataflowGraphToMermaidUrl;
|
|
257
259
|
};
|
|
258
260
|
readonly dependencies: readonly ["normalize"];
|
|
259
|
-
} | {
|
|
260
|
-
readonly name: "parse";
|
|
261
|
-
readonly humanReadableName: "parse with tree-sitter";
|
|
262
|
-
readonly description: "Parse the given R code into an AST using tree-sitter";
|
|
263
|
-
readonly processor: (_results: unknown, input: Partial<import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>>) => Promise<import("../../../r-bridge/parser").ParseStepOutput<import("web-tree-sitter").Tree>>;
|
|
264
|
-
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
|
|
265
|
-
readonly printer: {
|
|
266
|
-
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
267
|
-
readonly 2: {
|
|
268
|
-
(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
|
269
|
-
(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
|
|
270
|
-
};
|
|
271
|
-
};
|
|
272
|
-
readonly dependencies: readonly [];
|
|
273
|
-
readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>;
|
|
274
261
|
} | {
|
|
275
262
|
readonly name: "slice";
|
|
276
263
|
readonly humanReadableName: "static slice";
|
|
@@ -299,6 +286,21 @@ export declare const TREE_SITTER_SLICING_PIPELINE: import("./pipeline").Pipeline
|
|
|
299
286
|
};
|
|
300
287
|
readonly dependencies: readonly ["slice"];
|
|
301
288
|
readonly requiredInput: import("../all/static-slicing/10-reconstruct").ReconstructRequiredInput;
|
|
289
|
+
} | {
|
|
290
|
+
readonly name: "parse";
|
|
291
|
+
readonly humanReadableName: "parse with tree-sitter";
|
|
292
|
+
readonly description: "Parse the given R code into an AST using tree-sitter";
|
|
293
|
+
readonly processor: (_results: unknown, input: Partial<import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>>) => Promise<import("../../../r-bridge/parser").ParseStepOutput<import("web-tree-sitter").Tree>>;
|
|
294
|
+
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
|
|
295
|
+
readonly printer: {
|
|
296
|
+
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
297
|
+
readonly 2: {
|
|
298
|
+
(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
|
299
|
+
(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
|
|
300
|
+
};
|
|
301
|
+
};
|
|
302
|
+
readonly dependencies: readonly [];
|
|
303
|
+
readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>;
|
|
302
304
|
} | {
|
|
303
305
|
readonly name: "normalize";
|
|
304
306
|
readonly humanReadableName: "normalize tree-sitter tree";
|
|
@@ -337,21 +339,6 @@ export declare const TREE_SITTER_SLICE_AND_RECONSTRUCT_PIPELINE: import("./pipel
|
|
|
337
339
|
readonly 4: typeof import("../../print/dataflow-printer").dataflowGraphToMermaidUrl;
|
|
338
340
|
};
|
|
339
341
|
readonly dependencies: readonly ["normalize"];
|
|
340
|
-
} | {
|
|
341
|
-
readonly name: "parse";
|
|
342
|
-
readonly humanReadableName: "parse with tree-sitter";
|
|
343
|
-
readonly description: "Parse the given R code into an AST using tree-sitter";
|
|
344
|
-
readonly processor: (_results: unknown, input: Partial<import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>>) => Promise<import("../../../r-bridge/parser").ParseStepOutput<import("web-tree-sitter").Tree>>;
|
|
345
|
-
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
|
|
346
|
-
readonly printer: {
|
|
347
|
-
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
348
|
-
readonly 2: {
|
|
349
|
-
(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
|
350
|
-
(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
|
|
351
|
-
};
|
|
352
|
-
};
|
|
353
|
-
readonly dependencies: readonly [];
|
|
354
|
-
readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>;
|
|
355
342
|
} | {
|
|
356
343
|
readonly name: "slice";
|
|
357
344
|
readonly humanReadableName: "static slice";
|
|
@@ -380,6 +367,21 @@ export declare const TREE_SITTER_SLICE_AND_RECONSTRUCT_PIPELINE: import("./pipel
|
|
|
380
367
|
};
|
|
381
368
|
readonly dependencies: readonly ["slice"];
|
|
382
369
|
readonly requiredInput: import("../all/static-slicing/10-reconstruct").ReconstructRequiredInput;
|
|
370
|
+
} | {
|
|
371
|
+
readonly name: "parse";
|
|
372
|
+
readonly humanReadableName: "parse with tree-sitter";
|
|
373
|
+
readonly description: "Parse the given R code into an AST using tree-sitter";
|
|
374
|
+
readonly processor: (_results: unknown, input: Partial<import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>>) => Promise<import("../../../r-bridge/parser").ParseStepOutput<import("web-tree-sitter").Tree>>;
|
|
375
|
+
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
|
|
376
|
+
readonly printer: {
|
|
377
|
+
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
378
|
+
readonly 2: {
|
|
379
|
+
(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
|
380
|
+
(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
|
|
381
|
+
};
|
|
382
|
+
};
|
|
383
|
+
readonly dependencies: readonly [];
|
|
384
|
+
readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>;
|
|
383
385
|
} | {
|
|
384
386
|
readonly name: "normalize";
|
|
385
387
|
readonly humanReadableName: "normalize tree-sitter tree";
|
|
@@ -418,6 +420,20 @@ export declare const TREE_SITTER_SLICE_WITHOUT_RECONSTRUCT_PIPELINE: import("./p
|
|
|
418
420
|
readonly 4: typeof import("../../print/dataflow-printer").dataflowGraphToMermaidUrl;
|
|
419
421
|
};
|
|
420
422
|
readonly dependencies: readonly ["normalize"];
|
|
423
|
+
} | {
|
|
424
|
+
readonly name: "slice";
|
|
425
|
+
readonly humanReadableName: "static slice";
|
|
426
|
+
readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
|
|
427
|
+
readonly processor: (results: {
|
|
428
|
+
dataflow?: import("../../../dataflow/info").DataflowInformation;
|
|
429
|
+
normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
430
|
+
}, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
|
|
431
|
+
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
|
|
432
|
+
readonly printer: {
|
|
433
|
+
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
434
|
+
};
|
|
435
|
+
readonly dependencies: readonly ["dataflow"];
|
|
436
|
+
readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
|
|
421
437
|
} | {
|
|
422
438
|
readonly name: "parse";
|
|
423
439
|
readonly humanReadableName: "parse with tree-sitter";
|
|
@@ -433,20 +449,6 @@ export declare const TREE_SITTER_SLICE_WITHOUT_RECONSTRUCT_PIPELINE: import("./p
|
|
|
433
449
|
};
|
|
434
450
|
readonly dependencies: readonly [];
|
|
435
451
|
readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>;
|
|
436
|
-
} | {
|
|
437
|
-
readonly name: "slice";
|
|
438
|
-
readonly humanReadableName: "static slice";
|
|
439
|
-
readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
|
|
440
|
-
readonly processor: (results: {
|
|
441
|
-
dataflow?: import("../../../dataflow/info").DataflowInformation;
|
|
442
|
-
normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
443
|
-
}, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
|
|
444
|
-
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
|
|
445
|
-
readonly printer: {
|
|
446
|
-
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
447
|
-
};
|
|
448
|
-
readonly dependencies: readonly ["dataflow"];
|
|
449
|
-
readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
|
|
450
452
|
} | {
|
|
451
453
|
readonly name: "normalize";
|
|
452
454
|
readonly humanReadableName: "normalize tree-sitter tree";
|
|
@@ -466,8 +468,8 @@ export declare const TREE_SITTER_SLICE_WITHOUT_RECONSTRUCT_PIPELINE: import("./p
|
|
|
466
468
|
readonly requiredInput: import("../all/core/10-normalize").NormalizeRequiredInput;
|
|
467
469
|
}>;
|
|
468
470
|
/**
|
|
469
|
-
* The default pipeline for working with
|
|
470
|
-
*
|
|
471
|
+
* The default pipeline for working with flowR, including the dataflow step.
|
|
472
|
+
* See the {@link DEFAULT_NORMALIZE_PIPELINE} for the pipeline without the dataflow step
|
|
471
473
|
* and the {@link DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE} for the pipeline with slicing and reconstructing steps
|
|
472
474
|
*/
|
|
473
475
|
export declare const DEFAULT_DATAFLOW_PIPELINE: import("./pipeline").Pipeline<{
|
|
@@ -678,7 +680,15 @@ export declare const TREE_SITTER_PARSE_PIPELINE: import("./pipeline").Pipeline<{
|
|
|
678
680
|
readonly dependencies: readonly [];
|
|
679
681
|
readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<import("web-tree-sitter").Tree>;
|
|
680
682
|
}>;
|
|
683
|
+
export declare function createParsePipeline(parser: TreeSitterExecutor, inputs: Omit<PipelineInput<typeof DEFAULT_PARSE_PIPELINE>, 'parser'>): PipelineExecutor<typeof TREE_SITTER_PARSE_PIPELINE>;
|
|
684
|
+
export declare function createParsePipeline(parser: RShell, inputs: Omit<PipelineInput<typeof DEFAULT_PARSE_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_PARSE_PIPELINE>;
|
|
681
685
|
export declare function createParsePipeline(parser: KnownParser, inputs: Omit<PipelineInput<typeof DEFAULT_PARSE_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_PARSE_PIPELINE> | PipelineExecutor<typeof TREE_SITTER_PARSE_PIPELINE>;
|
|
686
|
+
export declare function createSlicePipeline(parser: TreeSitterExecutor, inputs: Omit<PipelineInput<typeof DEFAULT_SLICING_PIPELINE>, 'parser'>): PipelineExecutor<typeof TREE_SITTER_SLICING_PIPELINE>;
|
|
687
|
+
export declare function createSlicePipeline(parser: RShell, inputs: Omit<PipelineInput<typeof DEFAULT_SLICING_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_SLICING_PIPELINE>;
|
|
682
688
|
export declare function createSlicePipeline(parser: KnownParser, inputs: Omit<PipelineInput<typeof DEFAULT_SLICING_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_SLICING_PIPELINE> | PipelineExecutor<typeof TREE_SITTER_SLICING_PIPELINE>;
|
|
689
|
+
export declare function createNormalizePipeline(parser: TreeSitterExecutor, inputs: Omit<PipelineInput<typeof DEFAULT_NORMALIZE_PIPELINE>, 'parser'>): PipelineExecutor<typeof TREE_SITTER_NORMALIZE_PIPELINE>;
|
|
690
|
+
export declare function createNormalizePipeline(parser: RShell, inputs: Omit<PipelineInput<typeof DEFAULT_NORMALIZE_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_NORMALIZE_PIPELINE>;
|
|
683
691
|
export declare function createNormalizePipeline(parser: KnownParser, inputs: Omit<PipelineInput<typeof DEFAULT_NORMALIZE_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_NORMALIZE_PIPELINE> | PipelineExecutor<typeof TREE_SITTER_NORMALIZE_PIPELINE>;
|
|
692
|
+
export declare function createDataflowPipeline(parser: TreeSitterExecutor, inputs: Omit<PipelineInput<typeof DEFAULT_DATAFLOW_PIPELINE>, 'parser'>): PipelineExecutor<typeof TREE_SITTER_DATAFLOW_PIPELINE>;
|
|
693
|
+
export declare function createDataflowPipeline(parser: RShell, inputs: Omit<PipelineInput<typeof DEFAULT_DATAFLOW_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_DATAFLOW_PIPELINE>;
|
|
684
694
|
export declare function createDataflowPipeline(parser: KnownParser, inputs: Omit<PipelineInput<typeof DEFAULT_DATAFLOW_PIPELINE>, 'parser'>): PipelineExecutor<typeof DEFAULT_DATAFLOW_PIPELINE> | PipelineExecutor<typeof TREE_SITTER_DATAFLOW_PIPELINE>;
|
|
@@ -21,8 +21,8 @@ exports.TREE_SITTER_SLICING_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_
|
|
|
21
21
|
exports.TREE_SITTER_SLICE_AND_RECONSTRUCT_PIPELINE = exports.TREE_SITTER_SLICING_PIPELINE;
|
|
22
22
|
exports.TREE_SITTER_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP, _11_normalize_tree_sitter_1.NORMALIZE_TREE_SITTER, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE);
|
|
23
23
|
/**
|
|
24
|
-
* The default pipeline for working with
|
|
25
|
-
*
|
|
24
|
+
* The default pipeline for working with flowR, including the dataflow step.
|
|
25
|
+
* See the {@link DEFAULT_NORMALIZE_PIPELINE} for the pipeline without the dataflow step
|
|
26
26
|
* and the {@link DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE} for the pipeline with slicing and reconstructing steps
|
|
27
27
|
*/
|
|
28
28
|
exports.DEFAULT_DATAFLOW_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW);
|
|
@@ -32,6 +32,11 @@ exports.DEFAULT_NORMALIZE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.
|
|
|
32
32
|
exports.TREE_SITTER_NORMALIZE_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP, _11_normalize_tree_sitter_1.NORMALIZE_TREE_SITTER);
|
|
33
33
|
exports.DEFAULT_PARSE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP);
|
|
34
34
|
exports.TREE_SITTER_PARSE_PIPELINE = (0, pipeline_1.createPipeline)(_01_parse_tree_sitter_1.PARSE_WITH_TREE_SITTER_STEP);
|
|
35
|
+
/**
|
|
36
|
+
* Returns either a {@link DEFAULT_PARSE_PIPELINE} or a {@link TREE_SITTER_PARSE_PIPELINE} depending on the parser used.
|
|
37
|
+
*
|
|
38
|
+
* @see {@link createNormalizePipeline}, {@link createDataflowPipeline}, {@link createSlicePipeline}
|
|
39
|
+
*/
|
|
35
40
|
function createParsePipeline(parser, inputs) {
|
|
36
41
|
const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_PARSE_PIPELINE : exports.DEFAULT_PARSE_PIPELINE;
|
|
37
42
|
return new pipeline_executor_1.PipelineExecutor(base, {
|
|
@@ -39,6 +44,11 @@ function createParsePipeline(parser, inputs) {
|
|
|
39
44
|
...inputs
|
|
40
45
|
});
|
|
41
46
|
}
|
|
47
|
+
/**
|
|
48
|
+
* Returns either a {@link DEFAULT_SLICING_PIPELINE} or a {@link TREE_SITTER_SLICING_PIPELINE} depending on the parser used.
|
|
49
|
+
*
|
|
50
|
+
* @see {@link createParsePipeline}, {@link createNormalizePipeline}, {@link createDataflowPipeline}
|
|
51
|
+
*/
|
|
42
52
|
function createSlicePipeline(parser, inputs) {
|
|
43
53
|
const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_SLICING_PIPELINE : exports.DEFAULT_SLICING_PIPELINE;
|
|
44
54
|
return new pipeline_executor_1.PipelineExecutor(base, {
|
|
@@ -46,6 +56,11 @@ function createSlicePipeline(parser, inputs) {
|
|
|
46
56
|
...inputs
|
|
47
57
|
});
|
|
48
58
|
}
|
|
59
|
+
/**
|
|
60
|
+
* Returns either a {@link DEFAULT_NORMALIZE_PIPELINE} or a {@link TREE_SITTER_NORMALIZE_PIPELINE} depending on the parser used.
|
|
61
|
+
*
|
|
62
|
+
* @see {@link createParsePipeline}, {@link createDataflowPipeline}, {@link createSlicePipeline}
|
|
63
|
+
*/
|
|
49
64
|
function createNormalizePipeline(parser, inputs) {
|
|
50
65
|
const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_NORMALIZE_PIPELINE : exports.DEFAULT_NORMALIZE_PIPELINE;
|
|
51
66
|
return new pipeline_executor_1.PipelineExecutor(base, {
|
|
@@ -53,6 +68,12 @@ function createNormalizePipeline(parser, inputs) {
|
|
|
53
68
|
...inputs
|
|
54
69
|
});
|
|
55
70
|
}
|
|
71
|
+
/**
|
|
72
|
+
* Returns either a {@link DEFAULT_DATAFLOW_PIPELINE} or a {@link TREE_SITTER_DATAFLOW_PIPELINE} depending on the parser used.
|
|
73
|
+
*
|
|
74
|
+
* @see {@link createParsePipeline}, {@link createNormalizePipeline}, {@link createSlicePipeline}
|
|
75
|
+
*
|
|
76
|
+
*/
|
|
56
77
|
function createDataflowPipeline(parser, inputs) {
|
|
57
78
|
const base = parser.name === 'tree-sitter' ? exports.TREE_SITTER_DATAFLOW_PIPELINE : exports.DEFAULT_DATAFLOW_PIPELINE;
|
|
58
79
|
return new pipeline_executor_1.PipelineExecutor(base, {
|
|
@@ -120,7 +120,7 @@ export type PipelineOutput<P extends Pipeline> = {
|
|
|
120
120
|
*
|
|
121
121
|
* 0) the collection of {@link IPipelineStep|steps} is not empty
|
|
122
122
|
* 1) all {@link IPipelineStepOrder#name|names} of {@link IPipelineStep|steps} are unique for the given pipeline
|
|
123
|
-
* 2) all {@link IPipelineStepOrder#dependencies|dependencies} of all {@link IPipelineStep|steps}
|
|
123
|
+
* 2) all {@link IPipelineStepOrder#dependencies|dependencies} of all {@link IPipelineStep|steps} exist
|
|
124
124
|
* 3) there are no cycles in the dependency graph
|
|
125
125
|
* 4) the target of a {@link IPipelineStepOrder#decorates|step's decoration} exists
|
|
126
126
|
* 5) if a {@link IPipelineStepOrder#decorates|decoration} applies, all of its {@link IPipelineStepOrder#dependencies|dependencies} are already in the pipeline
|
|
@@ -9,7 +9,7 @@ const create_pipeline_1 = require("./create-pipeline");
|
|
|
9
9
|
*
|
|
10
10
|
* 0) the collection of {@link IPipelineStep|steps} is not empty
|
|
11
11
|
* 1) all {@link IPipelineStepOrder#name|names} of {@link IPipelineStep|steps} are unique for the given pipeline
|
|
12
|
-
* 2) all {@link IPipelineStepOrder#dependencies|dependencies} of all {@link IPipelineStep|steps}
|
|
12
|
+
* 2) all {@link IPipelineStepOrder#dependencies|dependencies} of all {@link IPipelineStep|steps} exist
|
|
13
13
|
* 3) there are no cycles in the dependency graph
|
|
14
14
|
* 4) the target of a {@link IPipelineStepOrder#decorates|step's decoration} exists
|
|
15
15
|
* 5) if a {@link IPipelineStepOrder#decorates|decoration} applies, all of its {@link IPipelineStepOrder#dependencies|dependencies} are already in the pipeline
|
|
@@ -67,9 +67,7 @@ export interface IPipelineStep<Name extends PipelineStepName = PipelineStepName,
|
|
|
67
67
|
readonly description: string;
|
|
68
68
|
/** The main processor that essentially performs the logic of this step */
|
|
69
69
|
readonly processor: (...input: Parameters<Fn>) => ReturnType<Fn>;
|
|
70
|
-
/**
|
|
71
|
-
* How to visualize the results of the respective step to the user?
|
|
72
|
-
*/
|
|
70
|
+
/** How to visualize the results of the respective step to the user? */
|
|
73
71
|
readonly printer: {
|
|
74
72
|
[K in StepOutputFormat]?: IPipelineStepPrinter<Fn, K, never[]>;
|
|
75
73
|
} & {
|
|
@@ -4,6 +4,7 @@ import type { Identifier, IdentifierDefinition } from './identifier';
|
|
|
4
4
|
import { ReferenceType } from './identifier';
|
|
5
5
|
import type { NodeId } from '../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
6
6
|
import type { DataflowGraph } from '../graph/graph';
|
|
7
|
+
import type { AstIdMap } from '../../r-bridge/lang-4.x/ast/model/processing/decorate';
|
|
7
8
|
/**
|
|
8
9
|
* Resolves a given identifier name to a list of its possible definition location using R scoping and resolving rules.
|
|
9
10
|
*
|
|
@@ -18,9 +19,9 @@ export declare function resolveByName(name: Identifier, environment: REnvironmen
|
|
|
18
19
|
export declare function resolvesToBuiltInConstant(name: Identifier | undefined, environment: REnvironmentInformation, wantedValue: unknown): Ternary;
|
|
19
20
|
export declare function resolveToConstants(name: Identifier | undefined, environment: REnvironmentInformation): unknown[] | undefined;
|
|
20
21
|
export declare function getAliases(sourceIds: readonly NodeId[], dataflow: DataflowGraph, environment: REnvironmentInformation): NodeId[] | undefined;
|
|
21
|
-
export declare function resolveToValues(identifier: Identifier | undefined, environment: REnvironmentInformation,
|
|
22
|
+
export declare function resolveToValues(identifier: Identifier | undefined, environment: REnvironmentInformation, idMap?: AstIdMap): unknown[] | undefined;
|
|
22
23
|
/**
|
|
23
24
|
* Convenience function using the variable resolver as specified within the configuration file
|
|
24
25
|
* In the future we may want to have this set once at the start of the analysis
|
|
25
26
|
*/
|
|
26
|
-
export declare function resolveValueOfVariable(identifier: Identifier | undefined, environment: REnvironmentInformation,
|
|
27
|
+
export declare function resolveValueOfVariable(identifier: Identifier | undefined, environment: REnvironmentInformation, idMap?: AstIdMap): unknown[] | undefined;
|
|
@@ -147,7 +147,7 @@ function getAliases(sourceIds, dataflow, environment) {
|
|
|
147
147
|
}
|
|
148
148
|
return [...definitions];
|
|
149
149
|
}
|
|
150
|
-
function resolveToValues(identifier, environment,
|
|
150
|
+
function resolveToValues(identifier, environment, idMap) {
|
|
151
151
|
if (identifier === undefined) {
|
|
152
152
|
return undefined;
|
|
153
153
|
}
|
|
@@ -169,7 +169,7 @@ function resolveToValues(identifier, environment, graph) {
|
|
|
169
169
|
return undefined;
|
|
170
170
|
}
|
|
171
171
|
for (const id of def.value) {
|
|
172
|
-
const value =
|
|
172
|
+
const value = idMap?.get(id)?.content;
|
|
173
173
|
if (value !== undefined) {
|
|
174
174
|
values.push(value);
|
|
175
175
|
}
|
|
@@ -185,10 +185,10 @@ function resolveToValues(identifier, environment, graph) {
|
|
|
185
185
|
* Convenience function using the variable resolver as specified within the configuration file
|
|
186
186
|
* In the future we may want to have this set once at the start of the analysis
|
|
187
187
|
*/
|
|
188
|
-
function resolveValueOfVariable(identifier, environment,
|
|
188
|
+
function resolveValueOfVariable(identifier, environment, idMap) {
|
|
189
189
|
const resolve = (0, config_1.getConfig)().solver.variables;
|
|
190
190
|
switch (resolve) {
|
|
191
|
-
case config_1.VariableResolve.Alias: return resolveToValues(identifier, environment,
|
|
191
|
+
case config_1.VariableResolve.Alias: return resolveToValues(identifier, environment, idMap);
|
|
192
192
|
case config_1.VariableResolve.Builtin: return resolveToConstants(identifier, environment);
|
|
193
193
|
case config_1.VariableResolve.Disabled: return [];
|
|
194
194
|
default: (0, assert_1.assertUnreachable)(resolve);
|
package/dataflow/extractor.d.ts
CHANGED
|
@@ -3,5 +3,15 @@ import type { DataflowProcessors } from './processor';
|
|
|
3
3
|
import type { NormalizedAst, ParentInformation } from '../r-bridge/lang-4.x/ast/model/processing/decorate';
|
|
4
4
|
import type { RParseRequests } from '../r-bridge/retriever';
|
|
5
5
|
import type { KnownParserType, Parser } from '../r-bridge/parser';
|
|
6
|
+
/**
|
|
7
|
+
* The best friend of {@link produceDataFlowGraph} and {@link processDataflowFor}.
|
|
8
|
+
* Maps every {@link RType} in the normalized AST to a processor.
|
|
9
|
+
*/
|
|
6
10
|
export declare const processors: DataflowProcessors<ParentInformation>;
|
|
11
|
+
/**
|
|
12
|
+
* This is the main function to produce the dataflow graph from a given request and normalized AST.
|
|
13
|
+
* Note, that this requires knowledge of the active parser in case the dataflow analysis uncovers other files that have to be parsed and integrated into the analysis
|
|
14
|
+
* (e.g., in the event of a `source` call).
|
|
15
|
+
* For the actual, canonical fold entry point, see {@link processDataflowFor}.
|
|
16
|
+
*/
|
|
7
17
|
export declare function produceDataFlowGraph<OtherInfo>(parser: Parser<KnownParserType>, request: RParseRequests, ast: NormalizedAst<OtherInfo & ParentInformation>): DataflowInformation;
|
package/dataflow/extractor.js
CHANGED
|
@@ -21,6 +21,10 @@ const cfg_1 = require("../util/cfg/cfg");
|
|
|
21
21
|
const edge_1 = require("./graph/edge");
|
|
22
22
|
const identify_link_to_last_call_relation_1 = require("../queries/catalog/call-context-query/identify-link-to-last-call-relation");
|
|
23
23
|
const built_in_function_definition_1 = require("./internal/process/functions/call/built-in/built-in-function-definition");
|
|
24
|
+
/**
|
|
25
|
+
* The best friend of {@link produceDataFlowGraph} and {@link processDataflowFor}.
|
|
26
|
+
* Maps every {@link RType} in the normalized AST to a processor.
|
|
27
|
+
*/
|
|
24
28
|
exports.processors = {
|
|
25
29
|
[type_1.RType.Number]: process_value_1.processValue,
|
|
26
30
|
[type_1.RType.String]: process_value_1.processValue,
|
|
@@ -68,6 +72,12 @@ function resolveLinkToSideEffects(ast, graph) {
|
|
|
68
72
|
}
|
|
69
73
|
}
|
|
70
74
|
}
|
|
75
|
+
/**
|
|
76
|
+
* This is the main function to produce the dataflow graph from a given request and normalized AST.
|
|
77
|
+
* Note, that this requires knowledge of the active parser in case the dataflow analysis uncovers other files that have to be parsed and integrated into the analysis
|
|
78
|
+
* (e.g., in the event of a `source` call).
|
|
79
|
+
* For the actual, canonical fold entry point, see {@link processDataflowFor}.
|
|
80
|
+
*/
|
|
71
81
|
function produceDataFlowGraph(parser, request, ast) {
|
|
72
82
|
const multifile = Array.isArray(request);
|
|
73
83
|
let firstRequest;
|
|
@@ -33,7 +33,7 @@ function processIfThenElse(name, args, rootId, data) {
|
|
|
33
33
|
let then;
|
|
34
34
|
let makeThenMaybe = false;
|
|
35
35
|
// we should defer this to the abstract interpretation
|
|
36
|
-
const values = (0, resolve_by_name_1.resolveValueOfVariable)(condArg?.lexeme, data.environment,
|
|
36
|
+
const values = (0, resolve_by_name_1.resolveValueOfVariable)(condArg?.lexeme, data.environment, data.completeAst.idMap);
|
|
37
37
|
const conditionIsAlwaysFalse = values?.every(d => d === false) ?? false;
|
|
38
38
|
const conditionIsAlwaysTrue = values?.every(d => d === true) ?? false;
|
|
39
39
|
if (!conditionIsAlwaysFalse) {
|