@eagleoutice/flowr 2.0.7 → 2.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/benchmark/stats/print.js +19 -15
- package/benchmark/stats/size-of.js +1 -1
- package/benchmark/summarizer/summarizer.js +9 -8
- package/cli/benchmark-app.js +7 -5
- package/cli/benchmark-helper-app.js +3 -0
- package/cli/repl/server/connection.d.ts +4 -0
- package/cli/repl/server/connection.js +25 -11
- package/dataflow/environments/diff.js +2 -1
- package/dataflow/environments/environment.d.ts +3 -2
- package/dataflow/environments/environment.js +15 -2
- package/dataflow/graph/graph.d.ts +1 -0
- package/dataflow/graph/graph.js +3 -1
- package/dataflow/graph/vertex.d.ts +1 -1
- package/dataflow/internal/linker.d.ts +1 -1
- package/dataflow/internal/linker.js +2 -2
- package/dataflow/internal/process/functions/call/named-call-handling.js +1 -0
- package/package.json +1 -1
- package/slicing/static/slice-call.js +12 -6
- package/slicing/static/static-slicer.js +7 -5
- package/slicing/static/visiting-queue.d.ts +1 -1
- package/slicing/static/visiting-queue.js +1 -1
- package/util/version.js +1 -1
package/benchmark/stats/print.js
CHANGED
|
@@ -10,6 +10,9 @@ function formatNanoseconds(nanoseconds) {
|
|
|
10
10
|
if (nanoseconds < 0) {
|
|
11
11
|
return '??';
|
|
12
12
|
}
|
|
13
|
+
else if (!Number.isFinite(nanoseconds)) {
|
|
14
|
+
return nanoseconds > 0 ? '∞' : '-∞';
|
|
15
|
+
}
|
|
13
16
|
const wholeNanos = typeof nanoseconds === 'bigint' ? nanoseconds : BigInt(Math.round(nanoseconds));
|
|
14
17
|
const nanos = wholeNanos % BigInt(1e+6);
|
|
15
18
|
const wholeMillis = wholeNanos / BigInt(1e+6);
|
|
@@ -145,7 +148,21 @@ Dataflow:
|
|
|
145
148
|
}
|
|
146
149
|
exports.stats2string = stats2string;
|
|
147
150
|
function ultimateStats2String(stats) {
|
|
148
|
-
|
|
151
|
+
const slice = stats.totalSlices > 0 ? `Slice summary for:
|
|
152
|
+
Total: ${formatSummarizedTimeMeasure(stats.perSliceMeasurements.get('total'))}
|
|
153
|
+
Slice creation: ${formatSummarizedTimeMeasure(stats.perSliceMeasurements.get('static slicing'))}
|
|
154
|
+
Slice creation per token in slice: ${formatSummarizedTimeMeasure(stats.sliceTimePerToken.normalized)}
|
|
155
|
+
Slice creation per R token in slice:${formatSummarizedTimeMeasure(stats.sliceTimePerToken.raw)}
|
|
156
|
+
Reconstruction: ${formatSummarizedTimeMeasure(stats.perSliceMeasurements.get('reconstruct code'))}
|
|
157
|
+
Reconstruction per token in slice: ${formatSummarizedTimeMeasure(stats.reconstructTimePerToken.normalized)}
|
|
158
|
+
Reconstruction per R token in slice:${formatSummarizedTimeMeasure(stats.reconstructTimePerToken.raw)}
|
|
159
|
+
Total per token in slice: ${formatSummarizedTimeMeasure(stats.totalPerSliceTimePerToken.normalized)}
|
|
160
|
+
Total per R token in slice: ${formatSummarizedTimeMeasure(stats.totalPerSliceTimePerToken.raw)}
|
|
161
|
+
Failed to Re-Parse: ${pad(stats.failedToRepParse)}/${stats.totalSlices}
|
|
162
|
+
Times hit Threshold: ${pad(stats.timesHitThreshold)}/${stats.totalSlices}
|
|
163
|
+
${reduction2String('Reductions', stats.reduction)}
|
|
164
|
+
${reduction2String('Reductions without comments and empty lines', stats.reductionNoFluff)}` : 'No slices';
|
|
165
|
+
// Used Slice Criteria Sizes: ${formatSummarizedMeasure(stats.perSliceMeasurements.sliceCriteriaSizes)}
|
|
149
166
|
return `
|
|
150
167
|
Summarized: ${stats.totalRequests} requests and ${stats.totalSlices} slices
|
|
151
168
|
Shell init time: ${formatSummarizedTimeMeasure(stats.commonMeasurements.get('initialize R session'))}
|
|
@@ -161,20 +178,7 @@ Dataflow creation per R token:${formatSummarizedTimeMeasure(stats.dataflowTimePe
|
|
|
161
178
|
Total common time per token: ${formatSummarizedTimeMeasure(stats.totalCommonTimePerToken.normalized)}
|
|
162
179
|
Total common time per R token:${formatSummarizedTimeMeasure(stats.totalCommonTimePerToken.raw)}
|
|
163
180
|
|
|
164
|
-
|
|
165
|
-
Total: ${formatSummarizedTimeMeasure(stats.perSliceMeasurements.get('total'))}
|
|
166
|
-
Slice creation: ${formatSummarizedTimeMeasure(stats.perSliceMeasurements.get('static slicing'))}
|
|
167
|
-
Slice creation per token in slice: ${formatSummarizedTimeMeasure(stats.sliceTimePerToken.normalized)}
|
|
168
|
-
Slice creation per R token in slice:${formatSummarizedTimeMeasure(stats.sliceTimePerToken.raw)}
|
|
169
|
-
Reconstruction: ${formatSummarizedTimeMeasure(stats.perSliceMeasurements.get('reconstruct code'))}
|
|
170
|
-
Reconstruction per token in slice: ${formatSummarizedTimeMeasure(stats.reconstructTimePerToken.normalized)}
|
|
171
|
-
Reconstruction per R token in slice:${formatSummarizedTimeMeasure(stats.reconstructTimePerToken.raw)}
|
|
172
|
-
Total per token in slice: ${formatSummarizedTimeMeasure(stats.totalPerSliceTimePerToken.normalized)}
|
|
173
|
-
Total per R token in slice: ${formatSummarizedTimeMeasure(stats.totalPerSliceTimePerToken.raw)}
|
|
174
|
-
Failed to Re-Parse: ${pad(stats.failedToRepParse)}/${stats.totalSlices}
|
|
175
|
-
Times hit Threshold: ${pad(stats.timesHitThreshold)}/${stats.totalSlices}
|
|
176
|
-
${reduction2String('Reductions', stats.reduction)}
|
|
177
|
-
${reduction2String('Reductions without comments and empty lines', stats.reductionNoFluff)}
|
|
181
|
+
${slice}
|
|
178
182
|
|
|
179
183
|
Shell close: ${formatSummarizedTimeMeasure(stats.commonMeasurements.get('close R session'))}
|
|
180
184
|
Total: ${formatSummarizedTimeMeasure(stats.commonMeasurements.get('total'))}
|
|
@@ -21,20 +21,21 @@ class BenchmarkSummarizer extends summarizer_1.Summarizer {
|
|
|
21
21
|
async preparationPhase() {
|
|
22
22
|
this.removeIfExists(this.summaryFile());
|
|
23
23
|
this.removeIfExists(this.config.intermediateOutputPath);
|
|
24
|
-
fs_1.default.mkdirSync(this.config.intermediateOutputPath);
|
|
25
|
-
|
|
24
|
+
fs_1.default.mkdirSync(this.config.intermediateOutputPath, { recursive: true });
|
|
25
|
+
let fileNum = 0;
|
|
26
26
|
const outputPathsPerRun = new defaultmap_1.DefaultMap(() => []);
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
const outputDir = path_1.default.join(this.config.intermediateOutputPath, path_1.default.
|
|
30
|
-
fs_1.default.mkdirSync(outputDir);
|
|
27
|
+
// recursively find all files in all the input path subdirectories
|
|
28
|
+
for await (const file of (0, files_1.getAllFiles)(this.config.inputPath)) {
|
|
29
|
+
const outputDir = path_1.default.join(this.config.intermediateOutputPath, path_1.default.relative(this.config.inputPath, file));
|
|
30
|
+
fs_1.default.mkdirSync(outputDir, { recursive: true });
|
|
31
31
|
const textOutputPath = path_1.default.join(outputDir, 'summary.log');
|
|
32
32
|
// generate measurements for each run
|
|
33
|
-
await (0, files_1.readLineByLine)(
|
|
33
|
+
await (0, files_1.readLineByLine)(file, (line, lineNumber) => {
|
|
34
34
|
const runOutputPath = path_1.default.join(outputDir, `run-${lineNumber}.json`);
|
|
35
35
|
outputPathsPerRun.get(lineNumber).push(runOutputPath);
|
|
36
|
-
return (0, input_1.processRunMeasurement)(line,
|
|
36
|
+
return (0, input_1.processRunMeasurement)(line, fileNum, lineNumber, textOutputPath, runOutputPath);
|
|
37
37
|
});
|
|
38
|
+
fileNum++;
|
|
38
39
|
}
|
|
39
40
|
// generate combined measurements for each file per run
|
|
40
41
|
for (const [run, paths] of outputPathsPerRun.entries()) {
|
package/cli/benchmark-app.js
CHANGED
|
@@ -32,13 +32,15 @@ function removeIfExists(summarizedRaw) {
|
|
|
32
32
|
}
|
|
33
33
|
async function benchmark() {
|
|
34
34
|
removeIfExists(options.output);
|
|
35
|
-
fs_1.default.mkdirSync(options.output);
|
|
35
|
+
fs_1.default.mkdirSync(options.output, { recursive: true });
|
|
36
36
|
console.log(`Storing output in ${options.output}`);
|
|
37
37
|
console.log(`Using ${options.parallel} parallel executors`);
|
|
38
38
|
// we do not use the limit argument to be able to pick the limit randomly
|
|
39
39
|
const files = [];
|
|
40
|
-
for
|
|
41
|
-
|
|
40
|
+
for (const input of options.input) {
|
|
41
|
+
for await (const file of (0, files_1.allRFiles)(input)) {
|
|
42
|
+
files.push({ request: file, baseDir: input });
|
|
43
|
+
}
|
|
42
44
|
}
|
|
43
45
|
if (options.limit) {
|
|
44
46
|
log_1.log.info(`limiting to ${options.limit} files`);
|
|
@@ -48,9 +50,9 @@ async function benchmark() {
|
|
|
48
50
|
const limit = options.limit ?? files.length;
|
|
49
51
|
const verboseAdd = options.verbose ? ['--verbose'] : [];
|
|
50
52
|
const args = files.map((f, i) => [
|
|
51
|
-
'--input', f.content,
|
|
53
|
+
'--input', f.request.content,
|
|
52
54
|
'--file-id', `${i}`,
|
|
53
|
-
'--output', path_1.default.join(options.output,
|
|
55
|
+
'--output', path_1.default.join(options.output, path_1.default.relative(f.baseDir, `${f.request.content}.json`)),
|
|
54
56
|
'--slice', options.slice, ...verboseAdd
|
|
55
57
|
]);
|
|
56
58
|
const runs = options.runs ?? 1;
|
|
@@ -10,6 +10,7 @@ const json_1 = require("../util/json");
|
|
|
10
10
|
const script_1 = require("./common/script");
|
|
11
11
|
const slicer_1 = require("../benchmark/slicer");
|
|
12
12
|
const all_variables_1 = require("../slicing/criterion/filters/all-variables");
|
|
13
|
+
const path_1 = __importDefault(require("path"));
|
|
13
14
|
const options = (0, script_1.processCommandLineArgs)('benchmark-helper', [], {
|
|
14
15
|
subtitle: 'Will slice for all possible variables, signal by exit code if slicing was successful, and can be run standalone',
|
|
15
16
|
examples: [
|
|
@@ -30,6 +31,7 @@ async function benchmark() {
|
|
|
30
31
|
// prefix for printing to console, includes file id and run number if present
|
|
31
32
|
const prefix = `[${options.input}${options['file-id'] !== undefined ? ` (file ${options['file-id']}, run ${options['run-num']})` : ''}]`;
|
|
32
33
|
console.log(`${prefix} Appending output to ${options.output}`);
|
|
34
|
+
fs_1.default.mkdirSync(path_1.default.parse(options.output).dir, { recursive: true });
|
|
33
35
|
// ensure the file exists
|
|
34
36
|
const fileStat = fs_1.default.statSync(options.input);
|
|
35
37
|
(0, assert_1.guard)(fileStat.isFile(), `File ${options.input} does not exist or is no file`);
|
|
@@ -61,6 +63,7 @@ async function benchmark() {
|
|
|
61
63
|
stats
|
|
62
64
|
};
|
|
63
65
|
// append line by line
|
|
66
|
+
console.log(`Appending benchmark of ${options.input} to ${options.output}`);
|
|
64
67
|
fs_1.default.appendFileSync(options.output, `${JSON.stringify(output, json_1.jsonReplacer)}\n`);
|
|
65
68
|
}
|
|
66
69
|
catch (e) {
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import type { Socket } from './net';
|
|
2
|
+
import { DEFAULT_SLICING_PIPELINE } from '../../../core/steps/pipeline/default-pipelines';
|
|
2
3
|
import type { RShell } from '../../../r-bridge/shell';
|
|
4
|
+
import type { PipelineOutput } from '../../../core/steps/pipeline/pipeline';
|
|
5
|
+
import type { DeepPartial } from 'ts-essentials';
|
|
3
6
|
/**
|
|
4
7
|
* Each connection handles a single client, answering to its requests.
|
|
5
8
|
* There is no need to construct this class manually, {@link FlowRServer} will do it for you.
|
|
@@ -19,3 +22,4 @@ export declare class FlowRServerConnection {
|
|
|
19
22
|
private handleSliceRequest;
|
|
20
23
|
private handleRepl;
|
|
21
24
|
}
|
|
25
|
+
export declare function sanitizeAnalysisResults(results: Partial<PipelineOutput<typeof DEFAULT_SLICING_PIPELINE>>): DeepPartial<PipelineOutput<typeof DEFAULT_SLICING_PIPELINE>>;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.FlowRServerConnection = void 0;
|
|
3
|
+
exports.sanitizeAnalysisResults = exports.FlowRServerConnection = void 0;
|
|
4
4
|
const send_1 = require("./send");
|
|
5
5
|
const validate_1 = require("./validate");
|
|
6
6
|
const analysis_1 = require("./messages/analysis");
|
|
@@ -103,6 +103,7 @@ class FlowRServerConnection {
|
|
|
103
103
|
cfg = (0, cfg_1.extractCFG)(results.normalize);
|
|
104
104
|
}
|
|
105
105
|
const config = () => ({ context: message.filename ?? 'unknown', getId: (0, quads_1.defaultQuadIdGenerator)() });
|
|
106
|
+
const sanitizedResults = sanitizeAnalysisResults(results);
|
|
106
107
|
if (message.format === 'n-quads') {
|
|
107
108
|
(0, send_1.sendMessage)(this.socket, {
|
|
108
109
|
type: 'response-file-analysis',
|
|
@@ -110,9 +111,9 @@ class FlowRServerConnection {
|
|
|
110
111
|
id: message.id,
|
|
111
112
|
cfg: cfg ? (0, cfg_1.cfg2quads)(cfg, config()) : undefined,
|
|
112
113
|
results: {
|
|
113
|
-
parse: await (0, print_1.printStepResult)(_00_parse_1.PARSE_WITH_R_SHELL_STEP,
|
|
114
|
-
normalize: await (0, print_1.printStepResult)(_10_normalize_1.NORMALIZE,
|
|
115
|
-
dataflow: await (0, print_1.printStepResult)(_20_dataflow_1.STATIC_DATAFLOW,
|
|
114
|
+
parse: await (0, print_1.printStepResult)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, sanitizedResults.parse, 5 /* StepOutputFormat.RdfQuads */, config()),
|
|
115
|
+
normalize: await (0, print_1.printStepResult)(_10_normalize_1.NORMALIZE, sanitizedResults.normalize, 5 /* StepOutputFormat.RdfQuads */, config()),
|
|
116
|
+
dataflow: await (0, print_1.printStepResult)(_20_dataflow_1.STATIC_DATAFLOW, sanitizedResults.dataflow, 5 /* StepOutputFormat.RdfQuads */, config())
|
|
116
117
|
}
|
|
117
118
|
});
|
|
118
119
|
}
|
|
@@ -122,13 +123,7 @@ class FlowRServerConnection {
|
|
|
122
123
|
format: 'json',
|
|
123
124
|
id: message.id,
|
|
124
125
|
cfg,
|
|
125
|
-
results:
|
|
126
|
-
...results,
|
|
127
|
-
normalize: {
|
|
128
|
-
...results.normalize,
|
|
129
|
-
idMap: undefined
|
|
130
|
-
}
|
|
131
|
-
}
|
|
126
|
+
results: sanitizedResults
|
|
132
127
|
});
|
|
133
128
|
}
|
|
134
129
|
}
|
|
@@ -215,4 +210,23 @@ class FlowRServerConnection {
|
|
|
215
210
|
}
|
|
216
211
|
}
|
|
217
212
|
exports.FlowRServerConnection = FlowRServerConnection;
|
|
213
|
+
function sanitizeAnalysisResults(results) {
|
|
214
|
+
return {
|
|
215
|
+
...results,
|
|
216
|
+
normalize: {
|
|
217
|
+
...results.normalize,
|
|
218
|
+
idMap: undefined
|
|
219
|
+
},
|
|
220
|
+
dataflow: {
|
|
221
|
+
...results.dataflow,
|
|
222
|
+
graph: {
|
|
223
|
+
...results.dataflow?.graph,
|
|
224
|
+
functionCache: undefined,
|
|
225
|
+
// @ts-expect-error this is private, but we want to sanitize it for the purpose of json serialization
|
|
226
|
+
_idMap: undefined
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
exports.sanitizeAnalysisResults = sanitizeAnalysisResults;
|
|
218
232
|
//# sourceMappingURL=connection.js.map
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.diffEnvironmentInformation = exports.diffEnvironment = exports.diffIdentifierReferences = void 0;
|
|
4
4
|
const diff_1 = require("../../util/diff");
|
|
5
|
+
const environment_1 = require("./environment");
|
|
5
6
|
const json_1 = require("../../util/json");
|
|
6
7
|
const info_1 = require("../info");
|
|
7
8
|
const built_in_1 = require("./built-in");
|
|
@@ -75,7 +76,7 @@ exports.diffEnvironment = diffEnvironment;
|
|
|
75
76
|
function diffEnvironmentInformation(a, b, info) {
|
|
76
77
|
if (a === undefined || b === undefined) {
|
|
77
78
|
if (a !== b) {
|
|
78
|
-
info.report.addComment(`${info.position}Different environments: ${JSON.stringify(a,
|
|
79
|
+
info.report.addComment(`${info.position}Different environments: ${JSON.stringify(a, environment_1.builtInEnvJsonReplacer)} vs. ${JSON.stringify(b, environment_1.builtInEnvJsonReplacer)}`);
|
|
79
80
|
}
|
|
80
81
|
return;
|
|
81
82
|
}
|
|
@@ -12,7 +12,7 @@ export declare function makeAllMaybe(references: readonly IdentifierReference[]
|
|
|
12
12
|
export type EnvironmentMemory = Map<Identifier, IdentifierDefinition[]>;
|
|
13
13
|
export interface IEnvironment {
|
|
14
14
|
/** unique and internally generated identifier -- will not be used for comparison but assists debugging for tracking identities */
|
|
15
|
-
readonly id:
|
|
15
|
+
readonly id: number;
|
|
16
16
|
/** Lexical parent of the environment, if any (can be manipulated by R code) */
|
|
17
17
|
parent: IEnvironment;
|
|
18
18
|
/**
|
|
@@ -21,7 +21,7 @@ export interface IEnvironment {
|
|
|
21
21
|
memory: EnvironmentMemory;
|
|
22
22
|
}
|
|
23
23
|
export declare class Environment implements IEnvironment {
|
|
24
|
-
readonly id:
|
|
24
|
+
readonly id: number;
|
|
25
25
|
parent: IEnvironment;
|
|
26
26
|
memory: Map<Identifier, IdentifierDefinition[]>;
|
|
27
27
|
constructor(parent: IEnvironment);
|
|
@@ -41,3 +41,4 @@ export interface REnvironmentInformation {
|
|
|
41
41
|
}
|
|
42
42
|
export declare const BuiltInEnvironment: Environment;
|
|
43
43
|
export declare function initializeCleanEnvironments(fullBuiltIns?: boolean): REnvironmentInformation;
|
|
44
|
+
export declare function builtInEnvJsonReplacer(k: unknown, v: unknown): unknown;
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.initializeCleanEnvironments = exports.BuiltInEnvironment = exports.Environment = exports.makeAllMaybe = exports.makeReferenceMaybe = void 0;
|
|
3
|
+
exports.builtInEnvJsonReplacer = exports.initializeCleanEnvironments = exports.BuiltInEnvironment = exports.Environment = exports.makeAllMaybe = exports.makeReferenceMaybe = void 0;
|
|
4
4
|
const built_in_1 = require("./built-in");
|
|
5
5
|
const resolve_by_name_1 = require("./resolve-by-name");
|
|
6
|
+
const json_1 = require("../../util/json");
|
|
6
7
|
function makeReferenceMaybe(ref, graph, environments, includeDefs, defaultCd = undefined) {
|
|
7
8
|
const node = graph.get(ref.nodeId, true);
|
|
8
9
|
if (includeDefs) {
|
|
@@ -38,7 +39,7 @@ function makeAllMaybe(references, graph, environments, includeDefs, defaultCd =
|
|
|
38
39
|
exports.makeAllMaybe = makeAllMaybe;
|
|
39
40
|
let environmentIdCounter = 0;
|
|
40
41
|
class Environment {
|
|
41
|
-
id =
|
|
42
|
+
id = environmentIdCounter++;
|
|
42
43
|
parent;
|
|
43
44
|
memory;
|
|
44
45
|
constructor(parent) {
|
|
@@ -64,4 +65,16 @@ function initializeCleanEnvironments(fullBuiltIns = true) {
|
|
|
64
65
|
};
|
|
65
66
|
}
|
|
66
67
|
exports.initializeCleanEnvironments = initializeCleanEnvironments;
|
|
68
|
+
function builtInEnvJsonReplacer(k, v) {
|
|
69
|
+
if (v === exports.BuiltInEnvironment) {
|
|
70
|
+
return '<BuiltInEnvironment>';
|
|
71
|
+
}
|
|
72
|
+
else if (v === EmptyBuiltInEnvironment) {
|
|
73
|
+
return '<EmptyBuiltInEnvironment>';
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
return (0, json_1.jsonReplacer)(k, v);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
exports.builtInEnvJsonReplacer = builtInEnvJsonReplacer;
|
|
67
80
|
//# sourceMappingURL=environment.js.map
|
|
@@ -47,6 +47,7 @@ type EdgeData<Edge extends DataflowGraphEdge> = Omit<Edge, 'from' | 'to' | 'type
|
|
|
47
47
|
export declare class DataflowGraph<Vertex extends DataflowGraphVertexInfo = DataflowGraphVertexInfo, Edge extends DataflowGraphEdge = DataflowGraphEdge> {
|
|
48
48
|
private static DEFAULT_ENVIRONMENT;
|
|
49
49
|
private _idMap;
|
|
50
|
+
readonly functionCache: Map<NodeId, Set<DataflowGraphVertexInfo>>;
|
|
50
51
|
constructor(idMap: AstIdMap | undefined);
|
|
51
52
|
/** Contains the vertices of the root level graph (i.e., included those vertices from the complete graph, that are nested within function definitions) */
|
|
52
53
|
private rootVertices;
|
package/dataflow/graph/graph.js
CHANGED
|
@@ -42,6 +42,8 @@ function extractEdgeIds(from, to) {
|
|
|
42
42
|
class DataflowGraph {
|
|
43
43
|
static DEFAULT_ENVIRONMENT = undefined;
|
|
44
44
|
_idMap;
|
|
45
|
+
// this should be linked separately
|
|
46
|
+
functionCache = new Map();
|
|
45
47
|
constructor(idMap) {
|
|
46
48
|
DataflowGraph.DEFAULT_ENVIRONMENT ??= (0, environment_1.initializeCleanEnvironments)();
|
|
47
49
|
this._idMap = idMap;
|
|
@@ -155,7 +157,7 @@ class DataflowGraph {
|
|
|
155
157
|
if (oldVertex !== undefined) {
|
|
156
158
|
return this;
|
|
157
159
|
}
|
|
158
|
-
const fallback = vertex.tag === "variable-definition" /* VertexType.VariableDefinition */ || vertex.tag === "use" /* VertexType.Use */ || vertex.tag === "value" /* VertexType.Value */ ? undefined : DataflowGraph.DEFAULT_ENVIRONMENT;
|
|
160
|
+
const fallback = vertex.tag === "variable-definition" /* VertexType.VariableDefinition */ || vertex.tag === "use" /* VertexType.Use */ || vertex.tag === "value" /* VertexType.Value */ || (vertex.tag === "function-call" /* VertexType.FunctionCall */ && vertex.onlyBuiltin) ? undefined : DataflowGraph.DEFAULT_ENVIRONMENT;
|
|
159
161
|
// keep a clone of the original environment
|
|
160
162
|
const environment = vertex.environment === undefined ? fallback : (0, clone_1.cloneEnvironmentInformation)(vertex.environment);
|
|
161
163
|
this.vertexInformation.set(vertex.id, {
|
|
@@ -62,7 +62,7 @@ export interface DataflowGraphVertexFunctionCall extends DataflowGraphVertexBase
|
|
|
62
62
|
args: FunctionArgument[];
|
|
63
63
|
/** a performance flag to indicate that the respective call is _only_ calling a builtin function without any df graph attached */
|
|
64
64
|
onlyBuiltin: boolean;
|
|
65
|
-
|
|
65
|
+
environment: REnvironmentInformation | undefined;
|
|
66
66
|
}
|
|
67
67
|
/**
|
|
68
68
|
* Arguments required to construct a vertex which represents the definition of a variable in the dataflow graph.
|
|
@@ -17,7 +17,7 @@ export declare function linkFunctionCalls(graph: DataflowGraph, idMap: AstIdMap,
|
|
|
17
17
|
functionCall: NodeId;
|
|
18
18
|
called: readonly DataflowGraphVertexInfo[];
|
|
19
19
|
}[];
|
|
20
|
-
export declare function getAllLinkedFunctionDefinitions(functionDefinitionReadIds: ReadonlySet<NodeId>, dataflowGraph: DataflowGraph):
|
|
20
|
+
export declare function getAllLinkedFunctionDefinitions(functionDefinitionReadIds: ReadonlySet<NodeId>, dataflowGraph: DataflowGraph): Set<DataflowGraphVertexInfo>;
|
|
21
21
|
/**
|
|
22
22
|
* This method links a set of read variables to definitions in an environment.
|
|
23
23
|
*
|
|
@@ -133,7 +133,7 @@ exports.linkFunctionCalls = linkFunctionCalls;
|
|
|
133
133
|
function getAllLinkedFunctionDefinitions(functionDefinitionReadIds, dataflowGraph) {
|
|
134
134
|
const potential = [...functionDefinitionReadIds];
|
|
135
135
|
const visited = new Set();
|
|
136
|
-
const result = new
|
|
136
|
+
const result = new Set();
|
|
137
137
|
while (potential.length > 0) {
|
|
138
138
|
const currentId = potential.pop();
|
|
139
139
|
// do not traverse builtins
|
|
@@ -156,7 +156,7 @@ function getAllLinkedFunctionDefinitions(functionDefinitionReadIds, dataflowGrap
|
|
|
156
156
|
const followBits = 1 /* EdgeType.Reads */ | 2 /* EdgeType.DefinedBy */ | 32 /* EdgeType.DefinedByOnCall */;
|
|
157
157
|
const followEdges = outgoingEdges.filter(([_, e]) => (0, edge_1.edgeIncludesType)(e.types, followBits));
|
|
158
158
|
if (currentInfo[0].subflow !== undefined) {
|
|
159
|
-
result.
|
|
159
|
+
result.add(currentInfo[0]);
|
|
160
160
|
}
|
|
161
161
|
// trace all joined reads
|
|
162
162
|
potential.push(...followEdges.map(([target]) => target).filter(id => !visited.has(id)));
|
|
@@ -45,6 +45,7 @@ function processNamedCall(name, args, rootId, data) {
|
|
|
45
45
|
const v = information.graph.getVertex(rootId);
|
|
46
46
|
if (v?.tag === "function-call" /* VertexType.FunctionCall */) {
|
|
47
47
|
v.onlyBuiltin = true;
|
|
48
|
+
v.environment = undefined;
|
|
48
49
|
}
|
|
49
50
|
}
|
|
50
51
|
return information ?? (0, info_1.initializeCleanDataflowInformation)(rootId, data);
|
package/package.json
CHANGED
|
@@ -4,6 +4,7 @@ exports.handleReturns = exports.sliceForCall = void 0;
|
|
|
4
4
|
const assert_1 = require("../../util/assert");
|
|
5
5
|
const fingerprint_1 = require("./fingerprint");
|
|
6
6
|
const linker_1 = require("../../dataflow/internal/linker");
|
|
7
|
+
const environment_1 = require("../../dataflow/environments/environment");
|
|
7
8
|
const scoping_1 = require("../../dataflow/environments/scoping");
|
|
8
9
|
const overwrite_1 = require("../../dataflow/environments/overwrite");
|
|
9
10
|
const built_in_1 = require("../../dataflow/environments/built-in");
|
|
@@ -11,12 +12,13 @@ const resolve_by_name_1 = require("../../dataflow/environments/resolve-by-name")
|
|
|
11
12
|
const edge_1 = require("../../dataflow/graph/edge");
|
|
12
13
|
function retrieveActiveEnvironment(callerInfo, baseEnvironment) {
|
|
13
14
|
let callerEnvironment = callerInfo.environment;
|
|
14
|
-
|
|
15
|
-
|
|
15
|
+
const level = callerEnvironment?.level ?? 0;
|
|
16
|
+
if (baseEnvironment.level !== level) {
|
|
17
|
+
while (baseEnvironment.level < level) {
|
|
16
18
|
baseEnvironment = (0, scoping_1.pushLocalEnvironment)(baseEnvironment);
|
|
17
19
|
}
|
|
18
|
-
while (baseEnvironment.level >
|
|
19
|
-
callerEnvironment = (0, scoping_1.pushLocalEnvironment)(callerEnvironment);
|
|
20
|
+
while (baseEnvironment.level > level) {
|
|
21
|
+
callerEnvironment = (0, scoping_1.pushLocalEnvironment)(callerEnvironment ?? (0, environment_1.initializeCleanEnvironments)(true));
|
|
20
22
|
}
|
|
21
23
|
}
|
|
22
24
|
return (0, overwrite_1.overwriteEnvironment)(baseEnvironment, callerEnvironment);
|
|
@@ -40,7 +42,7 @@ function sliceForCall(current, callerInfo, dataflowGraph, queue) {
|
|
|
40
42
|
}
|
|
41
43
|
}
|
|
42
44
|
const functionCallTargets = (0, linker_1.getAllLinkedFunctionDefinitions)(new Set(functionCallDefs), dataflowGraph);
|
|
43
|
-
for (const
|
|
45
|
+
for (const functionCallTarget of functionCallTargets) {
|
|
44
46
|
// all those linked within the scopes of other functions are already linked when exiting a function definition
|
|
45
47
|
for (const openIn of functionCallTarget.subflow.in) {
|
|
46
48
|
const defs = openIn.name ? (0, resolve_by_name_1.resolveByName)(openIn.name, activeEnvironment) : undefined;
|
|
@@ -72,7 +74,11 @@ function handleReturns(queue, currentEdges, baseEnvFingerprint, baseEnvironment)
|
|
|
72
74
|
queue.add(target, baseEnvironment, baseEnvFingerprint, false);
|
|
73
75
|
}
|
|
74
76
|
else if ((0, edge_1.edgeIncludesType)(edge.types, 64 /* EdgeType.Argument */)) {
|
|
75
|
-
queue.potentialArguments.
|
|
77
|
+
queue.potentialArguments.set(target, {
|
|
78
|
+
id: target,
|
|
79
|
+
baseEnvironment,
|
|
80
|
+
onlyForSideEffects: false
|
|
81
|
+
});
|
|
76
82
|
}
|
|
77
83
|
}
|
|
78
84
|
return true;
|
|
@@ -9,7 +9,6 @@ const slice_call_1 = require("./slice-call");
|
|
|
9
9
|
const parse_1 = require("../criterion/parse");
|
|
10
10
|
const environment_1 = require("../../dataflow/environments/environment");
|
|
11
11
|
const edge_1 = require("../../dataflow/graph/edge");
|
|
12
|
-
const built_in_1 = require("../../dataflow/environments/built-in");
|
|
13
12
|
exports.slicerLogger = log_1.log.getSubLogger({ name: 'slicer' });
|
|
14
13
|
/**
|
|
15
14
|
* This returns the ids to include in the slice, when slicing with the given seed id's (must be at least one).
|
|
@@ -63,16 +62,19 @@ function staticSlicing(graph, ast, criteria, threshold = 75) {
|
|
|
63
62
|
}
|
|
64
63
|
}
|
|
65
64
|
for (const [target, { types }] of currentEdges) {
|
|
66
|
-
if (
|
|
65
|
+
if ((0, edge_1.edgeIncludesType)(types, 256 /* EdgeType.NonStandardEvaluation */)) {
|
|
67
66
|
continue;
|
|
68
67
|
}
|
|
69
68
|
const t = (0, edge_1.shouldTraverseEdge)(types);
|
|
70
69
|
if (t === 3 /* TraverseEdge.Always */) {
|
|
71
70
|
queue.add(target, baseEnvironment, baseEnvFingerprint, false);
|
|
72
71
|
}
|
|
73
|
-
else if (t === 2 /* TraverseEdge.DefinedByOnCall */
|
|
74
|
-
queue.
|
|
75
|
-
|
|
72
|
+
else if (t === 2 /* TraverseEdge.DefinedByOnCall */) {
|
|
73
|
+
const n = queue.potentialArguments.get(target);
|
|
74
|
+
if (n) {
|
|
75
|
+
queue.add(target, n.baseEnvironment, (0, fingerprint_1.envFingerprint)(n.baseEnvironment), n.onlyForSideEffects);
|
|
76
|
+
queue.potentialArguments.delete(target);
|
|
77
|
+
}
|
|
76
78
|
}
|
|
77
79
|
else if (t === 1 /* TraverseEdge.SideEffect */) {
|
|
78
80
|
queue.add(target, baseEnvironment, baseEnvFingerprint, true);
|
|
@@ -7,7 +7,7 @@ export declare class VisitingQueue {
|
|
|
7
7
|
private seen;
|
|
8
8
|
private idThreshold;
|
|
9
9
|
private queue;
|
|
10
|
-
potentialArguments:
|
|
10
|
+
potentialArguments: Map<NodeId, NodeToSlice>;
|
|
11
11
|
constructor(threshold: number);
|
|
12
12
|
/**
|
|
13
13
|
* Adds a node to the queue if it has not been seen before.
|
|
@@ -10,7 +10,7 @@ class VisitingQueue {
|
|
|
10
10
|
idThreshold = new Map();
|
|
11
11
|
queue = [];
|
|
12
12
|
// the set of potential arguments holds arguments which may be added if found with the `defined-by-on-call` edge
|
|
13
|
-
potentialArguments = new
|
|
13
|
+
potentialArguments = new Map();
|
|
14
14
|
constructor(threshold) {
|
|
15
15
|
this.threshold = threshold;
|
|
16
16
|
}
|
package/util/version.js
CHANGED
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.flowrVersion = void 0;
|
|
4
4
|
const semver_1 = require("semver");
|
|
5
5
|
// this is automatically replaced with the current version by release-it
|
|
6
|
-
const version = '2.0.
|
|
6
|
+
const version = '2.0.9';
|
|
7
7
|
function flowrVersion() {
|
|
8
8
|
return new semver_1.SemVer(version);
|
|
9
9
|
}
|