@eagleoutice/flowr 2.1.7 → 2.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/abstract-interpretation/normalized-ast-fold.d.ts +124 -0
- package/abstract-interpretation/normalized-ast-fold.js +178 -0
- package/benchmark/summarizer/first-phase/process.js +6 -5
- package/cli/repl/commands/repl-dataflow.js +5 -2
- package/cli/repl/commands/repl-normalize.js +5 -2
- package/cli/repl/commands/repl-query.js +2 -2
- package/cli/repl/server/messages/message-query.js +1 -1
- package/cli/slicer-app.js +1 -1
- package/core/steps/pipeline/pipeline.d.ts +63 -0
- package/dataflow/environments/default-builtin-config.js +45 -6
- package/dataflow/environments/environment.d.ts +46 -8
- package/dataflow/environments/environment.js +24 -1
- package/dataflow/environments/identifier.d.ts +49 -7
- package/dataflow/environments/identifier.js +11 -2
- package/dataflow/environments/resolve-by-name.d.ts +5 -0
- package/dataflow/environments/resolve-by-name.js +14 -0
- package/dataflow/extractor.js +5 -4
- package/dataflow/graph/dataflowgraph-builder.d.ts +6 -0
- package/dataflow/graph/dataflowgraph-builder.js +8 -0
- package/dataflow/graph/edge.d.ts +10 -4
- package/dataflow/graph/edge.js +12 -5
- package/dataflow/graph/graph.d.ts +41 -3
- package/dataflow/graph/graph.js +39 -34
- package/dataflow/graph/vertex.d.ts +66 -7
- package/dataflow/graph/vertex.js +15 -0
- package/dataflow/info.d.ts +79 -11
- package/dataflow/info.js +20 -0
- package/dataflow/internal/linker.d.ts +4 -2
- package/dataflow/internal/linker.js +12 -5
- package/dataflow/internal/process/functions/call/built-in/built-in-assignment.d.ts +2 -0
- package/dataflow/internal/process/functions/call/built-in/built-in-assignment.js +5 -3
- package/dataflow/internal/process/functions/call/built-in/built-in-expression-list.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-function-definition.d.ts +16 -0
- package/dataflow/internal/process/functions/call/built-in/built-in-function-definition.js +83 -6
- package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +17 -7
- package/dataflow/internal/process/functions/call/common.js +1 -1
- package/documentation/doc-util/doc-dfg.d.ts +2 -2
- package/documentation/doc-util/doc-dfg.js +11 -16
- package/documentation/doc-util/doc-normalized-ast.js +1 -1
- package/documentation/doc-util/doc-types.d.ts +1 -1
- package/documentation/doc-util/doc-types.js +21 -0
- package/documentation/print-capabilities-markdown.js +1 -1
- package/documentation/print-dataflow-graph-wiki.js +44 -7
- package/documentation/print-linting-and-testing-wiki.js +60 -26
- package/documentation/print-normalized-ast-wiki.js +107 -5
- package/documentation/print-query-wiki.js +8 -1
- package/package.json +17 -3
- package/queries/catalog/call-context-query/call-context-query-executor.js +23 -2
- package/queries/catalog/call-context-query/call-context-query-format.d.ts +29 -2
- package/queries/catalog/call-context-query/call-context-query-format.js +7 -1
- package/queries/catalog/call-context-query/cascade-action.d.ts +8 -0
- package/queries/catalog/call-context-query/cascade-action.js +13 -0
- package/queries/catalog/call-context-query/identify-link-to-last-call-relation.d.ts +11 -1
- package/queries/catalog/call-context-query/identify-link-to-last-call-relation.js +41 -4
- package/queries/catalog/dependencies-query/dependencies-query-format.js +4 -0
- package/queries/query.d.ts +4 -4
- package/queries/query.js +17 -5
- package/r-bridge/lang-4.x/ast/model/model.d.ts +3 -0
- package/r-bridge/lang-4.x/ast/model/nodes/r-number.d.ts +5 -1
- package/r-bridge/lang-4.x/ast/model/processing/node-id.d.ts +6 -1
- package/r-bridge/lang-4.x/ast/model/processing/node-id.js +6 -1
- package/r-bridge/lang-4.x/ast/model/processing/visitor.d.ts +1 -1
- package/r-bridge/lang-4.x/ast/model/processing/visitor.js +1 -1
- package/r-bridge/lang-4.x/ast/parser/json/format.js +2 -2
- package/reconstruct/reconstruct.js +1 -1
- package/slicing/static/slice-call.d.ts +7 -2
- package/slicing/static/slice-call.js +33 -44
- package/slicing/static/static-slicer.d.ts +5 -1
- package/slicing/static/static-slicer.js +22 -8
- package/slicing/static/visiting-queue.d.ts +4 -4
- package/slicing/static/visiting-queue.js +5 -3
- package/statistics/output/print-stats.js +2 -1
- package/statistics/summarizer/post-process/histogram.js +2 -1
- package/statistics/summarizer/post-process/post-process-output.js +2 -1
- package/statistics/summarizer/second-phase/process.js +3 -3
- package/util/arrays.d.ts +1 -1
- package/util/arrays.js +3 -3
- package/util/assert.d.ts +1 -1
- package/util/assert.js +3 -2
- package/util/cfg/cfg.js +4 -2
- package/util/mermaid/cfg.js +1 -1
- package/util/summarizer.js +2 -2
- package/util/version.js +1 -1
|
@@ -10,10 +10,10 @@ exports.RootId = 0;
|
|
|
10
10
|
function prepareParsedData(data) {
|
|
11
11
|
let json;
|
|
12
12
|
try {
|
|
13
|
-
json = JSON.parse(`[${data}]`);
|
|
13
|
+
json = JSON.parse(`[${data.trim()}]`);
|
|
14
14
|
}
|
|
15
15
|
catch (e) {
|
|
16
|
-
throw new Error(`Failed to parse data ${data}: ${e?.message}`);
|
|
16
|
+
throw new Error(`Failed to parse data [${data}]: ${e?.message}`);
|
|
17
17
|
}
|
|
18
18
|
(0, assert_1.guard)(Array.isArray(json), () => `Expected ${data} to be an array but was not`);
|
|
19
19
|
const ret = new Map(json.map(([line1, col1, line2, col2, id, parent, token, terminal, text]) => {
|
|
@@ -277,7 +277,7 @@ function reconstructParameter(parameter, name, defaultValue, configuration) {
|
|
|
277
277
|
return plain(`${getLexeme(parameter.name)}=${getLexeme(parameter.defaultValue)}`);
|
|
278
278
|
}
|
|
279
279
|
else if (parameter.defaultValue !== undefined && name.length === 0) {
|
|
280
|
-
return
|
|
280
|
+
return defaultValue ?? [];
|
|
281
281
|
}
|
|
282
282
|
else {
|
|
283
283
|
return name;
|
|
@@ -1,10 +1,15 @@
|
|
|
1
1
|
import type { NodeToSlice } from './slicer-types';
|
|
2
2
|
import type { VisitingQueue } from './visiting-queue';
|
|
3
3
|
import type { Fingerprint } from './fingerprint';
|
|
4
|
-
import type { DataflowGraphVertexFunctionCall } from '../../dataflow/graph/vertex';
|
|
4
|
+
import type { DataflowGraphVertexFunctionCall, DataflowGraphVertexInfo } from '../../dataflow/graph/vertex';
|
|
5
5
|
import type { REnvironmentInformation } from '../../dataflow/environments/environment';
|
|
6
6
|
import type { DataflowGraph, OutgoingEdges } from '../../dataflow/graph/graph';
|
|
7
|
+
import type { NodeId } from '../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
8
|
+
/**
|
|
9
|
+
* Returns the function call targets (definitions) by the given caller
|
|
10
|
+
*/
|
|
11
|
+
export declare function getAllFunctionCallTargets(dataflowGraph: DataflowGraph, callerInfo: DataflowGraphVertexFunctionCall, baseEnvironment: REnvironmentInformation): [Set<DataflowGraphVertexInfo>, REnvironmentInformation];
|
|
7
12
|
/** returns the new threshold hit count */
|
|
8
13
|
export declare function sliceForCall(current: NodeToSlice, callerInfo: DataflowGraphVertexFunctionCall, dataflowGraph: DataflowGraph, queue: VisitingQueue): void;
|
|
9
14
|
/** Returns true if we found at least one return edge */
|
|
10
|
-
export declare function handleReturns(queue: VisitingQueue, currentEdges: OutgoingEdges, baseEnvFingerprint: Fingerprint, baseEnvironment: REnvironmentInformation): boolean;
|
|
15
|
+
export declare function handleReturns(from: NodeId, queue: VisitingQueue, currentEdges: OutgoingEdges, baseEnvFingerprint: Fingerprint, baseEnvironment: REnvironmentInformation): boolean;
|
|
@@ -1,31 +1,37 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getAllFunctionCallTargets = getAllFunctionCallTargets;
|
|
3
4
|
exports.sliceForCall = sliceForCall;
|
|
4
5
|
exports.handleReturns = handleReturns;
|
|
5
6
|
const assert_1 = require("../../util/assert");
|
|
6
7
|
const fingerprint_1 = require("./fingerprint");
|
|
7
8
|
const linker_1 = require("../../dataflow/internal/linker");
|
|
8
|
-
const environment_1 = require("../../dataflow/environments/environment");
|
|
9
|
-
const scoping_1 = require("../../dataflow/environments/scoping");
|
|
10
|
-
const overwrite_1 = require("../../dataflow/environments/overwrite");
|
|
11
9
|
const graph_1 = require("../../dataflow/graph/graph");
|
|
12
10
|
const built_in_1 = require("../../dataflow/environments/built-in");
|
|
13
11
|
const resolve_by_name_1 = require("../../dataflow/environments/resolve-by-name");
|
|
14
12
|
const edge_1 = require("../../dataflow/graph/edge");
|
|
15
13
|
const identifier_1 = require("../../dataflow/environments/identifier");
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
14
|
+
const built_in_function_definition_1 = require("../../dataflow/internal/process/functions/call/built-in/built-in-function-definition");
|
|
15
|
+
const static_slicer_1 = require("./static-slicer");
|
|
16
|
+
/**
|
|
17
|
+
* Returns the function call targets (definitions) by the given caller
|
|
18
|
+
*/
|
|
19
|
+
function getAllFunctionCallTargets(dataflowGraph, callerInfo, baseEnvironment) {
|
|
20
|
+
// bind with call-local environments during slicing
|
|
21
|
+
const outgoingEdges = dataflowGraph.get(callerInfo.id, true);
|
|
22
|
+
(0, assert_1.guard)(outgoingEdges !== undefined, () => `outgoing edges of id: ${callerInfo.id} must be in graph but can not be found, keep in slice to be sure`);
|
|
23
|
+
// lift baseEnv on the same level
|
|
24
|
+
const activeEnvironment = (0, built_in_function_definition_1.retrieveActiveEnvironment)(callerInfo.environment, baseEnvironment);
|
|
25
|
+
const name = callerInfo.name;
|
|
26
|
+
(0, assert_1.guard)(name !== undefined, () => `name of id: ${callerInfo.id} can not be found in id map`);
|
|
27
|
+
const functionCallDefs = (0, resolve_by_name_1.resolveByName)(name, activeEnvironment, identifier_1.ReferenceType.Unknown)?.filter(d => d.definedAt !== built_in_1.BuiltIn)?.map(d => d.nodeId) ?? [];
|
|
28
|
+
for (const [target, outgoingEdge] of outgoingEdges[1].entries()) {
|
|
29
|
+
if ((0, edge_1.edgeIncludesType)(outgoingEdge.types, edge_1.EdgeType.Calls)) {
|
|
30
|
+
functionCallDefs.push(target);
|
|
26
31
|
}
|
|
27
32
|
}
|
|
28
|
-
|
|
33
|
+
const functionCallTargets = (0, linker_1.getAllLinkedFunctionDefinitions)(new Set(functionCallDefs), dataflowGraph);
|
|
34
|
+
return [functionCallTargets, activeEnvironment];
|
|
29
35
|
}
|
|
30
36
|
function includeArgumentFunctionCallClosure(arg, baseEnvironment, activeEnvironment, queue, dataflowGraph) {
|
|
31
37
|
const valueRoot = (0, graph_1.getReferenceOfArgument)(arg);
|
|
@@ -33,20 +39,21 @@ function includeArgumentFunctionCallClosure(arg, baseEnvironment, activeEnvironm
|
|
|
33
39
|
return;
|
|
34
40
|
}
|
|
35
41
|
const callTargets = (0, linker_1.getAllLinkedFunctionDefinitions)(new Set([valueRoot]), dataflowGraph);
|
|
36
|
-
linkCallTargets(false, callTargets,
|
|
42
|
+
linkCallTargets(false, callTargets, activeEnvironment, (0, fingerprint_1.envFingerprint)(activeEnvironment), queue);
|
|
37
43
|
}
|
|
38
|
-
function linkCallTargets(onlyForSideEffects, functionCallTargets,
|
|
44
|
+
function linkCallTargets(onlyForSideEffects, functionCallTargets, activeEnvironment, activeEnvironmentFingerprint, queue) {
|
|
39
45
|
for (const functionCallTarget of functionCallTargets) {
|
|
40
46
|
// all those linked within the scopes of other functions are already linked when exiting a function definition
|
|
41
|
-
for
|
|
42
|
-
|
|
43
|
-
|
|
47
|
+
/* for(const openIn of (functionCallTarget as DataflowGraphVertexFunctionDefinition).subflow.in) {
|
|
48
|
+
// only if the outgoing path does not already have a defined by linkage
|
|
49
|
+
const defs = openIn.name ? resolveByName(openIn.name, activeEnvironment, openIn.type) : undefined;
|
|
50
|
+
if(defs === undefined) {
|
|
44
51
|
continue;
|
|
45
52
|
}
|
|
46
|
-
for
|
|
53
|
+
for(const def of defs.filter(d => d.nodeId !== BuiltIn)) {
|
|
47
54
|
queue.add(def.nodeId, baseEnvironment, baseEnvPrint, onlyForSideEffects);
|
|
48
55
|
}
|
|
49
|
-
}
|
|
56
|
+
}*/
|
|
50
57
|
for (const exitPoint of functionCallTarget.exitPoints) {
|
|
51
58
|
queue.add(exitPoint, activeEnvironment, activeEnvironmentFingerprint, onlyForSideEffects);
|
|
52
59
|
}
|
|
@@ -54,23 +61,9 @@ function linkCallTargets(onlyForSideEffects, functionCallTargets, baseEnvironmen
|
|
|
54
61
|
}
|
|
55
62
|
/** returns the new threshold hit count */
|
|
56
63
|
function sliceForCall(current, callerInfo, dataflowGraph, queue) {
|
|
57
|
-
// bind with call-local environments during slicing
|
|
58
|
-
const outgoingEdges = dataflowGraph.get(callerInfo.id, true);
|
|
59
|
-
(0, assert_1.guard)(outgoingEdges !== undefined, () => `outgoing edges of id: ${callerInfo.id} must be in graph but can not be found, keep in slice to be sure`);
|
|
60
|
-
// lift baseEnv on the same level
|
|
61
64
|
const baseEnvironment = current.baseEnvironment;
|
|
62
|
-
const
|
|
63
|
-
const activeEnvironment = retrieveActiveEnvironment(callerInfo, baseEnvironment);
|
|
65
|
+
const [functionCallTargets, activeEnvironment] = getAllFunctionCallTargets(dataflowGraph, callerInfo, current.baseEnvironment);
|
|
64
66
|
const activeEnvironmentFingerprint = (0, fingerprint_1.envFingerprint)(activeEnvironment);
|
|
65
|
-
const name = callerInfo.name;
|
|
66
|
-
(0, assert_1.guard)(name !== undefined, () => `name of id: ${callerInfo.id} can not be found in id map`);
|
|
67
|
-
const functionCallDefs = (0, resolve_by_name_1.resolveByName)(name, activeEnvironment, identifier_1.ReferenceType.Unknown)?.filter(d => d.definedAt !== built_in_1.BuiltIn)?.map(d => d.nodeId) ?? [];
|
|
68
|
-
for (const [target, outgoingEdge] of outgoingEdges[1].entries()) {
|
|
69
|
-
if ((0, edge_1.edgeIncludesType)(outgoingEdge.types, edge_1.EdgeType.Calls)) {
|
|
70
|
-
functionCallDefs.push(target);
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
const functionCallTargets = (0, linker_1.getAllLinkedFunctionDefinitions)(new Set(functionCallDefs), dataflowGraph);
|
|
74
67
|
if (functionCallTargets.size === 0) {
|
|
75
68
|
/*
|
|
76
69
|
* if we do not have any call to resolve this function, we have to assume that every function passed is actually called!
|
|
@@ -81,10 +74,10 @@ function sliceForCall(current, callerInfo, dataflowGraph, queue) {
|
|
|
81
74
|
}
|
|
82
75
|
return;
|
|
83
76
|
}
|
|
84
|
-
linkCallTargets(current.onlyForSideEffects, functionCallTargets,
|
|
77
|
+
linkCallTargets(current.onlyForSideEffects, functionCallTargets, activeEnvironment, activeEnvironmentFingerprint, queue);
|
|
85
78
|
}
|
|
86
79
|
/** Returns true if we found at least one return edge */
|
|
87
|
-
function handleReturns(queue, currentEdges, baseEnvFingerprint, baseEnvironment) {
|
|
80
|
+
function handleReturns(from, queue, currentEdges, baseEnvFingerprint, baseEnvironment) {
|
|
88
81
|
const e = [...currentEdges.entries()];
|
|
89
82
|
const found = e.filter(([_, edge]) => (0, edge_1.edgeIncludesType)(edge.types, edge_1.EdgeType.Returns));
|
|
90
83
|
if (found.length === 0) {
|
|
@@ -97,12 +90,8 @@ function handleReturns(queue, currentEdges, baseEnvFingerprint, baseEnvironment)
|
|
|
97
90
|
if ((0, edge_1.edgeIncludesType)(edge.types, edge_1.EdgeType.Reads)) {
|
|
98
91
|
queue.add(target, baseEnvironment, baseEnvFingerprint, false);
|
|
99
92
|
}
|
|
100
|
-
else if ((0, edge_1.edgeIncludesType)(edge.types, edge_1.EdgeType.Argument)) {
|
|
101
|
-
|
|
102
|
-
id: target,
|
|
103
|
-
baseEnvironment,
|
|
104
|
-
onlyForSideEffects: false
|
|
105
|
-
});
|
|
93
|
+
else if ((0, edge_1.edgeIncludesType)(edge.types, edge_1.EdgeType.DefinesOnCall | edge_1.EdgeType.DefinedByOnCall | edge_1.EdgeType.Argument)) {
|
|
94
|
+
(0, static_slicer_1.updatePotentialAddition)(queue, from, target, baseEnvironment);
|
|
106
95
|
}
|
|
107
96
|
}
|
|
108
97
|
return true;
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
import type { SliceResult } from './slicer-types';
|
|
2
|
+
import { VisitingQueue } from './visiting-queue';
|
|
2
3
|
import type { DataflowGraph } from '../../dataflow/graph/graph';
|
|
3
4
|
import type { NormalizedAst } from '../../r-bridge/lang-4.x/ast/model/processing/decorate';
|
|
4
5
|
import type { SlicingCriteria } from '../criterion/parse';
|
|
6
|
+
import type { REnvironmentInformation } from '../../dataflow/environments/environment';
|
|
7
|
+
import type { NodeId } from '../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
5
8
|
export declare const slicerLogger: import("tslog").Logger<import("tslog").ILogObj>;
|
|
6
9
|
/**
|
|
7
10
|
* This returns the ids to include in the static backward slice, when slicing with the given seed id's (must be at least one).
|
|
@@ -10,7 +13,8 @@ export declare const slicerLogger: import("tslog").Logger<import("tslog").ILogOb
|
|
|
10
13
|
*
|
|
11
14
|
* @param graph - The dataflow graph to conduct the slicing on.
|
|
12
15
|
* @param ast - The normalized AST of the code (used to get static nesting information of the lexemes in case of control flow dependencies that may have no effect on the slicing scope).
|
|
13
|
-
* @param criteria - The
|
|
16
|
+
* @param criteria - The criterias to slice on.
|
|
14
17
|
* @param threshold - The maximum number of nodes to visit in the graph. If the threshold is reached, the slice will side with inclusion and drop its minimal guarantee. The limit ensures that the algorithm halts.
|
|
15
18
|
*/
|
|
16
19
|
export declare function staticSlicing(graph: DataflowGraph, { idMap }: NormalizedAst, criteria: SlicingCriteria, threshold?: number): Readonly<SliceResult>;
|
|
20
|
+
export declare function updatePotentialAddition(queue: VisitingQueue, id: NodeId, target: NodeId, baseEnvironment: REnvironmentInformation): void;
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.slicerLogger = void 0;
|
|
4
4
|
exports.staticSlicing = staticSlicing;
|
|
5
|
+
exports.updatePotentialAddition = updatePotentialAddition;
|
|
5
6
|
const assert_1 = require("../../util/assert");
|
|
6
7
|
const log_1 = require("../../util/log");
|
|
7
8
|
const fingerprint_1 = require("./fingerprint");
|
|
@@ -19,7 +20,7 @@ exports.slicerLogger = log_1.log.getSubLogger({ name: 'slicer' });
|
|
|
19
20
|
*
|
|
20
21
|
* @param graph - The dataflow graph to conduct the slicing on.
|
|
21
22
|
* @param ast - The normalized AST of the code (used to get static nesting information of the lexemes in case of control flow dependencies that may have no effect on the slicing scope).
|
|
22
|
-
* @param criteria - The
|
|
23
|
+
* @param criteria - The criterias to slice on.
|
|
23
24
|
* @param threshold - The maximum number of nodes to visit in the graph. If the threshold is reached, the slice will side with inclusion and drop its minimal guarantee. The limit ensures that the algorithm halts.
|
|
24
25
|
*/
|
|
25
26
|
function staticSlicing(graph, { idMap }, criteria, threshold = 75) {
|
|
@@ -71,7 +72,7 @@ function staticSlicing(graph, { idMap }, criteria, threshold = 75) {
|
|
|
71
72
|
if (currentVertex.tag === vertex_1.VertexType.FunctionCall && !currentVertex.onlyBuiltin) {
|
|
72
73
|
(0, slice_call_1.sliceForCall)(current, currentVertex, graph, queue);
|
|
73
74
|
}
|
|
74
|
-
const ret = (0, slice_call_1.handleReturns)(queue, currentEdges, baseEnvFingerprint, baseEnvironment);
|
|
75
|
+
const ret = (0, slice_call_1.handleReturns)(id, queue, currentEdges, baseEnvFingerprint, baseEnvironment);
|
|
75
76
|
if (ret) {
|
|
76
77
|
continue;
|
|
77
78
|
}
|
|
@@ -84,12 +85,8 @@ function staticSlicing(graph, { idMap }, criteria, threshold = 75) {
|
|
|
84
85
|
if (t === 3 /* TraverseEdge.Always */) {
|
|
85
86
|
queue.add(target, baseEnvironment, baseEnvFingerprint, false);
|
|
86
87
|
}
|
|
87
|
-
else if (t === 2 /* TraverseEdge.
|
|
88
|
-
|
|
89
|
-
if (n) {
|
|
90
|
-
queue.add(target, n.baseEnvironment, (0, fingerprint_1.envFingerprint)(n.baseEnvironment), n.onlyForSideEffects);
|
|
91
|
-
queue.potentialArguments.delete(target);
|
|
92
|
-
}
|
|
88
|
+
else if (t === 2 /* TraverseEdge.OnlyIfBoth */) {
|
|
89
|
+
updatePotentialAddition(queue, id, target, baseEnvironment);
|
|
93
90
|
}
|
|
94
91
|
else if (t === 1 /* TraverseEdge.SideEffect */) {
|
|
95
92
|
queue.add(target, baseEnvironment, baseEnvFingerprint, true);
|
|
@@ -98,4 +95,21 @@ function staticSlicing(graph, { idMap }, criteria, threshold = 75) {
|
|
|
98
95
|
}
|
|
99
96
|
return { ...queue.status(), decodedCriteria };
|
|
100
97
|
}
|
|
98
|
+
function updatePotentialAddition(queue, id, target, baseEnvironment) {
|
|
99
|
+
const n = queue.potentialAdditions.get(target);
|
|
100
|
+
if (n) {
|
|
101
|
+
const [addedBy, { baseEnvironment, onlyForSideEffects }] = n;
|
|
102
|
+
if (addedBy !== id) {
|
|
103
|
+
queue.add(target, baseEnvironment, (0, fingerprint_1.envFingerprint)(baseEnvironment), onlyForSideEffects);
|
|
104
|
+
queue.potentialAdditions.delete(target);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
else {
|
|
108
|
+
queue.potentialAdditions.set(target, [id, {
|
|
109
|
+
id: target,
|
|
110
|
+
baseEnvironment,
|
|
111
|
+
onlyForSideEffects: false
|
|
112
|
+
}]);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
101
115
|
//# sourceMappingURL=static-slicer.js.map
|
|
@@ -4,10 +4,10 @@ import type { NodeId } from '../../r-bridge/lang-4.x/ast/model/processing/node-i
|
|
|
4
4
|
export declare class VisitingQueue {
|
|
5
5
|
private readonly threshold;
|
|
6
6
|
private timesHitThreshold;
|
|
7
|
-
private seen;
|
|
8
|
-
private idThreshold;
|
|
9
|
-
private queue;
|
|
10
|
-
|
|
7
|
+
private readonly seen;
|
|
8
|
+
private readonly idThreshold;
|
|
9
|
+
private readonly queue;
|
|
10
|
+
potentialAdditions: Map<NodeId, [NodeId, NodeToSlice]>;
|
|
11
11
|
constructor(threshold: number);
|
|
12
12
|
/**
|
|
13
13
|
* Adds a node to the queue if it has not been seen before.
|
|
@@ -3,14 +3,16 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.VisitingQueue = void 0;
|
|
4
4
|
const fingerprint_1 = require("./fingerprint");
|
|
5
5
|
const static_slicer_1 = require("./static-slicer");
|
|
6
|
+
const assert_1 = require("../../util/assert");
|
|
6
7
|
class VisitingQueue {
|
|
7
8
|
threshold;
|
|
8
9
|
timesHitThreshold = 0;
|
|
9
10
|
seen = new Map();
|
|
10
11
|
idThreshold = new Map();
|
|
11
12
|
queue = [];
|
|
12
|
-
// the set of potential
|
|
13
|
-
|
|
13
|
+
// the set of potential additions holds nodes which may be added if a second edge deems them relevant (e.g., found with the `defined-by-on-call` edge)
|
|
14
|
+
// additionally it holds which node id added the addition so we can separate their inclusion on the structure
|
|
15
|
+
potentialAdditions = new Map();
|
|
14
16
|
constructor(threshold) {
|
|
15
17
|
this.threshold = threshold;
|
|
16
18
|
}
|
|
@@ -48,7 +50,7 @@ class VisitingQueue {
|
|
|
48
50
|
status() {
|
|
49
51
|
return {
|
|
50
52
|
timesHitThreshold: this.timesHitThreshold,
|
|
51
|
-
result: new Set(this.seen.values())
|
|
53
|
+
result: new Set([...this.seen.values()].filter(assert_1.isNotUndefined))
|
|
52
54
|
};
|
|
53
55
|
}
|
|
54
56
|
}
|
|
@@ -7,9 +7,10 @@ exports.printFeatureStatisticsEntry = printFeatureStatisticsEntry;
|
|
|
7
7
|
const ansi_1 = require("../../util/ansi");
|
|
8
8
|
const json_1 = require("../../util/json");
|
|
9
9
|
const feature_1 = require("../features/feature");
|
|
10
|
+
const arrays_1 = require("../../util/arrays");
|
|
10
11
|
function minMaxAvgAndMedian(data) {
|
|
11
12
|
data = data.sort((a, b) => a - b);
|
|
12
|
-
const sum =
|
|
13
|
+
const sum = (0, arrays_1.arraySum)(data);
|
|
13
14
|
return {
|
|
14
15
|
sum,
|
|
15
16
|
min: data[0],
|
|
@@ -11,6 +11,7 @@ const bimap_1 = require("../../../util/bimap");
|
|
|
11
11
|
const defaultmap_1 = require("../../../util/defaultmap");
|
|
12
12
|
const assert_1 = require("../../../util/assert");
|
|
13
13
|
const summarizer_1 = require("../../../util/summarizer");
|
|
14
|
+
const arrays_1 = require("../../../util/arrays");
|
|
14
15
|
/**
|
|
15
16
|
* Produces column-wise histogram-information based on a {@link ClusterReport}.
|
|
16
17
|
*
|
|
@@ -79,7 +80,7 @@ function histograms2table(histograms, countAsDensity = false) {
|
|
|
79
80
|
(0, assert_1.guard)(histograms.length > 0, 'there must be at least one histogram to convert to a table');
|
|
80
81
|
const mostBins = guardForLargestBinSize(histograms);
|
|
81
82
|
const header = ['bin', 'from', 'to', ...histograms.map(h => JSON.stringify(h.name))];
|
|
82
|
-
const sums = histograms.map(h =>
|
|
83
|
+
const sums = histograms.map(h => (0, arrays_1.arraySum)(h.bins));
|
|
83
84
|
const rows = [];
|
|
84
85
|
for (let binIndex = 0; binIndex < mostBins; binIndex++) {
|
|
85
86
|
const row = new Array(histograms.length + 3);
|
|
@@ -14,6 +14,7 @@ const ansi_1 = require("../../../util/ansi");
|
|
|
14
14
|
const feature_1 = require("../../features/feature");
|
|
15
15
|
const decorate_1 = require("../../../r-bridge/lang-4.x/ast/model/processing/decorate");
|
|
16
16
|
const file_provider_1 = require("../../output/file-provider");
|
|
17
|
+
const arrays_1 = require("../../../util/arrays");
|
|
17
18
|
/**
|
|
18
19
|
* Post process the collections in a given folder, reducing them in a memory preserving way.
|
|
19
20
|
*
|
|
@@ -79,7 +80,7 @@ function printClusterReport(report, limit = 1000) {
|
|
|
79
80
|
const shortStats = [...report.valueInfoMap.entries()].map(([name, values]) => {
|
|
80
81
|
return {
|
|
81
82
|
name,
|
|
82
|
-
count: [...values.values()]
|
|
83
|
+
count: (0, arrays_1.arraySum)([...values.values()]),
|
|
83
84
|
unique: values.size()
|
|
84
85
|
};
|
|
85
86
|
}).sort((a, b) => b.count - a.count).slice(0, limit);
|
|
@@ -50,7 +50,7 @@ function postProcessMeta(config, filepath, outputPath, logger, metaFeatureInform
|
|
|
50
50
|
out.write(`file,successfulParsed,${(0, summarizer_1.summarizedMeasurement2CsvHeader)('processing')},failedRequests,${(0, summarizer_1.summarizedMeasurement2CsvHeader)('line-length')},${(0, summarizer_1.summarizedMeasurement2CsvHeader)('lines')},${(0, summarizer_1.summarizedMeasurement2CsvHeader)('characters')},numberOfNormalizedNodes\n`);
|
|
51
51
|
for (const [file, info] of metaFeatureInformation) {
|
|
52
52
|
// we could retrieve these by summing later as well :thinking: however, this makes it more explicit
|
|
53
|
-
const characters = (0, arrays_1.
|
|
53
|
+
const characters = (0, arrays_1.arraySum)(info.stats.lines[0]);
|
|
54
54
|
out.write(`${JSON.stringify(file)},${info.stats.successfulParsed},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)(info.stats.processingTimeMs))},`
|
|
55
55
|
+ `${info.stats.failedRequests.length},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)(info.stats.lines[0]))},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)([info.stats.lines[0].length]))},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)([characters]))},${info.stats.numberOfNormalizedNodes[0]}\n`);
|
|
56
56
|
fileStatisticsSummary.successfulParsed.push(info.stats.successfulParsed);
|
|
@@ -60,8 +60,8 @@ function postProcessMeta(config, filepath, outputPath, logger, metaFeatureInform
|
|
|
60
60
|
fileStatisticsSummary.characters.push(characters);
|
|
61
61
|
fileStatisticsSummary.numberOfNormalizedNodes.push(info.stats.numberOfNormalizedNodes[0]);
|
|
62
62
|
}
|
|
63
|
-
out.write(`all,${(0, arrays_1.
|
|
64
|
-
+ `${(0, arrays_1.
|
|
63
|
+
out.write(`all,${(0, arrays_1.arraySum)(fileStatisticsSummary.successfulParsed)},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)(fileStatisticsSummary.processingTimeMs))},`
|
|
64
|
+
+ `${(0, arrays_1.arraySum)(fileStatisticsSummary.failedRequests)},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)(fileStatisticsSummary.lines.flat()))},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)(fileStatisticsSummary.lines.map(l => l.length)))},${(0, summarizer_1.summarizedMeasurement2Csv)((0, summarizer_1.summarizeMeasurement)(fileStatisticsSummary.characters))},${(0, arrays_1.arraySum)(fileStatisticsSummary.numberOfNormalizedNodes)}\n`);
|
|
65
65
|
out.close();
|
|
66
66
|
}
|
|
67
67
|
/**
|
package/util/arrays.d.ts
CHANGED
|
@@ -46,7 +46,7 @@ export declare function getUniqueCombinationsOfSize<T>(array: T[], minSize?: num
|
|
|
46
46
|
/**
|
|
47
47
|
* Returns the sum of all elements in the given array
|
|
48
48
|
*/
|
|
49
|
-
export declare function
|
|
49
|
+
export declare function arraySum(arr: readonly number[]): number;
|
|
50
50
|
/**
|
|
51
51
|
* Converts an array into a bag data-structure (in the form of a map mapping the entries/keys to their counts)
|
|
52
52
|
*/
|
package/util/arrays.js
CHANGED
|
@@ -5,7 +5,7 @@ exports.partitionArray = partitionArray;
|
|
|
5
5
|
exports.allPermutations = allPermutations;
|
|
6
6
|
exports.partition = partition;
|
|
7
7
|
exports.getUniqueCombinationsOfSize = getUniqueCombinationsOfSize;
|
|
8
|
-
exports.
|
|
8
|
+
exports.arraySum = arraySum;
|
|
9
9
|
exports.array2bag = array2bag;
|
|
10
10
|
exports.arrayEqual = arrayEqual;
|
|
11
11
|
const assert_1 = require("./assert");
|
|
@@ -143,7 +143,7 @@ function* getUniqueCombinationsOfSize(array, minSize = 0, maxSize = array.length
|
|
|
143
143
|
/**
|
|
144
144
|
* Returns the sum of all elements in the given array
|
|
145
145
|
*/
|
|
146
|
-
function
|
|
146
|
+
function arraySum(arr) {
|
|
147
147
|
let sum = 0;
|
|
148
148
|
for (const elem of arr) {
|
|
149
149
|
sum += elem;
|
|
@@ -167,7 +167,7 @@ function arrayEqual(a, b) {
|
|
|
167
167
|
if (a.length !== b.length) {
|
|
168
168
|
return false;
|
|
169
169
|
}
|
|
170
|
-
for (let i = 0; i < a.length; ++
|
|
170
|
+
for (let i = 0; i < a.length; i++) {
|
|
171
171
|
if (a[i] !== b[i]) {
|
|
172
172
|
return false;
|
|
173
173
|
}
|
package/util/assert.d.ts
CHANGED
|
@@ -6,7 +6,7 @@ export declare function isNotNull<T>(x: T | null): x is T;
|
|
|
6
6
|
export type GuardMessage = string | (() => string);
|
|
7
7
|
/**
|
|
8
8
|
* @param assertion - will be asserted
|
|
9
|
-
* @param message - if a string, will
|
|
9
|
+
* @param message - if a string, we will use it as the error message, if it is a function, we will call it to produce the error message (can be used to avoid costly message generations)
|
|
10
10
|
* @throws GuardError - if the assertion fails
|
|
11
11
|
*/
|
|
12
12
|
export declare function guard(assertion: boolean | undefined, message?: GuardMessage): asserts assertion;
|
package/util/assert.js
CHANGED
|
@@ -6,7 +6,7 @@ exports.isNotUndefined = isNotUndefined;
|
|
|
6
6
|
exports.isUndefined = isUndefined;
|
|
7
7
|
exports.isNotNull = isNotNull;
|
|
8
8
|
exports.guard = guard;
|
|
9
|
-
/*
|
|
9
|
+
/* v8 ignore next */
|
|
10
10
|
function assertUnreachable(x) {
|
|
11
11
|
throw new Error(`Unexpected object: ${JSON.stringify(x)}`);
|
|
12
12
|
}
|
|
@@ -27,10 +27,11 @@ class GuardError extends Error {
|
|
|
27
27
|
}
|
|
28
28
|
/**
|
|
29
29
|
* @param assertion - will be asserted
|
|
30
|
-
* @param message - if a string, will
|
|
30
|
+
* @param message - if a string, we will use it as the error message, if it is a function, we will call it to produce the error message (can be used to avoid costly message generations)
|
|
31
31
|
* @throws GuardError - if the assertion fails
|
|
32
32
|
*/
|
|
33
33
|
function guard(assertion, message = 'Assertion failed') {
|
|
34
|
+
/* v8 ignore next 3 */
|
|
34
35
|
if (!assertion) {
|
|
35
36
|
throw new GuardError(typeof message === 'string' ? message : message());
|
|
36
37
|
}
|
package/util/cfg/cfg.js
CHANGED
|
@@ -381,9 +381,11 @@ function cfgAccess(access, name, accessors) {
|
|
|
381
381
|
}
|
|
382
382
|
function cfgUnaryOp(unary, operand) {
|
|
383
383
|
const graph = operand.graph;
|
|
384
|
-
const result = { ...operand, graph, exitPoints: [unary.info.id] };
|
|
385
384
|
graph.addVertex({ id: unary.info.id, name: unary.type, type: "end-marker" /* CfgVertexType.EndMarker */ });
|
|
386
|
-
|
|
385
|
+
for (const entry of operand.exitPoints) {
|
|
386
|
+
graph.addEdge(unary.info.id, entry, { label: 'FD' });
|
|
387
|
+
}
|
|
388
|
+
return { ...operand, graph, exitPoints: [unary.info.id] };
|
|
387
389
|
}
|
|
388
390
|
function cfgExprList(_node, _grouping, expressions) {
|
|
389
391
|
const result = { graph: new ControlFlowGraph(), breaks: [], nexts: [], returns: [], exitPoints: [], entryPoints: [] };
|
package/util/mermaid/cfg.js
CHANGED
|
@@ -16,7 +16,7 @@ function cfgToMermaid(cfg, normalizedAst, prefix = '') {
|
|
|
16
16
|
output += ` n${id}[${name}]\n`;
|
|
17
17
|
}
|
|
18
18
|
else {
|
|
19
|
-
output += ` n${id}((
|
|
19
|
+
output += String(id).endsWith('-exit') ? ` n${id}((${id}))\n` : ` n${id}[[${id}]]\n`;
|
|
20
20
|
}
|
|
21
21
|
}
|
|
22
22
|
for (const [from, targets] of cfg.graph.edges()) {
|
package/util/summarizer.js
CHANGED
|
@@ -27,11 +27,11 @@ function summarizeMeasurement(data, totalNumberOfDataPoints) {
|
|
|
27
27
|
const min = sorted[0];
|
|
28
28
|
const max = sorted[sorted.length - 1];
|
|
29
29
|
const median = sorted[Math.floor(sorted.length / 2)];
|
|
30
|
-
const total = (0, arrays_1.
|
|
30
|
+
const total = (0, arrays_1.arraySum)(sorted);
|
|
31
31
|
const length = totalNumberOfDataPoints ?? sorted.length;
|
|
32
32
|
const mean = total / length;
|
|
33
33
|
// sqrt(sum(x-mean)^2 / n)
|
|
34
|
-
const std = Math.sqrt(sorted.map(x => (x - mean) ** 2)
|
|
34
|
+
const std = Math.sqrt((0, arrays_1.arraySum)(sorted.map(x => (x - mean) ** 2)) / length);
|
|
35
35
|
return { min, max, median, mean, std, total };
|
|
36
36
|
}
|
|
37
37
|
//# sourceMappingURL=summarizer.js.map
|
package/util/version.js
CHANGED
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.flowrVersion = flowrVersion;
|
|
4
4
|
const semver_1 = require("semver");
|
|
5
5
|
// this is automatically replaced with the current version by release-it
|
|
6
|
-
const version = '2.1.
|
|
6
|
+
const version = '2.1.9';
|
|
7
7
|
function flowrVersion() {
|
|
8
8
|
return new semver_1.SemVer(version);
|
|
9
9
|
}
|