@eagleoutice/flowr 1.4.2 → 1.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/print/slice-diff-ansi.js +7 -7
- package/core/steps.d.ts +352 -11
- package/core/steps.js +4 -4
- package/index.d.ts +0 -2
- package/index.js +0 -2
- package/package.json +76 -216
- package/util/{summarizer/summarizer.d.ts → summarizer.d.ts} +15 -1
- package/util/summarizer.js +37 -0
- package/util/version.d.ts +2 -0
- package/util/version.js +11 -0
- package/benchmark/index.d.ts +0 -3
- package/benchmark/index.js +0 -20
- package/benchmark/slicer.d.ts +0 -98
- package/benchmark/slicer.js +0 -213
- package/benchmark/stats/index.d.ts +0 -10
- package/benchmark/stats/index.js +0 -27
- package/benchmark/stats/print.d.ts +0 -7
- package/benchmark/stats/print.js +0 -155
- package/benchmark/stats/stats.d.ts +0 -40
- package/benchmark/stats/stats.js +0 -6
- package/benchmark/stopwatch.d.ts +0 -35
- package/benchmark/stopwatch.js +0 -79
- package/cli/benchmark-app.d.ts +0 -10
- package/cli/benchmark-app.js +0 -67
- package/cli/benchmark-helper-app.d.ts +0 -9
- package/cli/benchmark-helper-app.js +0 -69
- package/cli/common/features.d.ts +0 -3
- package/cli/common/features.js +0 -30
- package/cli/common/index.d.ts +0 -2
- package/cli/common/index.js +0 -19
- package/cli/common/options.d.ts +0 -20
- package/cli/common/options.js +0 -85
- package/cli/common/script.d.ts +0 -21
- package/cli/common/script.js +0 -61
- package/cli/common/scripts-info.d.ts +0 -25
- package/cli/common/scripts-info.js +0 -83
- package/cli/export-quads-app.d.ts +0 -7
- package/cli/export-quads-app.js +0 -42
- package/cli/repl/commands/cfg.d.ts +0 -3
- package/cli/repl/commands/cfg.js +0 -37
- package/cli/repl/commands/commands.d.ts +0 -13
- package/cli/repl/commands/commands.js +0 -142
- package/cli/repl/commands/dataflow.d.ts +0 -3
- package/cli/repl/commands/dataflow.js +0 -34
- package/cli/repl/commands/execute.d.ts +0 -4
- package/cli/repl/commands/execute.js +0 -27
- package/cli/repl/commands/index.d.ts +0 -2
- package/cli/repl/commands/index.js +0 -19
- package/cli/repl/commands/main.d.ts +0 -39
- package/cli/repl/commands/main.js +0 -14
- package/cli/repl/commands/normalize.d.ts +0 -3
- package/cli/repl/commands/normalize.js +0 -34
- package/cli/repl/commands/parse.d.ts +0 -2
- package/cli/repl/commands/parse.js +0 -109
- package/cli/repl/commands/quit.d.ts +0 -2
- package/cli/repl/commands/quit.js +0 -15
- package/cli/repl/commands/version.d.ts +0 -16
- package/cli/repl/commands/version.js +0 -33
- package/cli/repl/core.d.ts +0 -41
- package/cli/repl/core.js +0 -172
- package/cli/repl/execute.d.ts +0 -28
- package/cli/repl/execute.js +0 -79
- package/cli/repl/index.d.ts +0 -5
- package/cli/repl/index.js +0 -22
- package/cli/repl/prompt.d.ts +0 -2
- package/cli/repl/prompt.js +0 -9
- package/cli/repl/server/connection.d.ts +0 -21
- package/cli/repl/server/connection.js +0 -213
- package/cli/repl/server/messages/analysis.d.ts +0 -71
- package/cli/repl/server/messages/analysis.js +0 -21
- package/cli/repl/server/messages/error.d.ts +0 -11
- package/cli/repl/server/messages/error.js +0 -3
- package/cli/repl/server/messages/hello.d.ts +0 -20
- package/cli/repl/server/messages/hello.js +0 -3
- package/cli/repl/server/messages/index.d.ts +0 -1
- package/cli/repl/server/messages/index.js +0 -3
- package/cli/repl/server/messages/messages.d.ts +0 -35
- package/cli/repl/server/messages/messages.js +0 -40
- package/cli/repl/server/messages/repl.d.ts +0 -33
- package/cli/repl/server/messages/repl.js +0 -37
- package/cli/repl/server/messages/slice.d.ts +0 -25
- package/cli/repl/server/messages/slice.js +0 -37
- package/cli/repl/server/net.d.ts +0 -49
- package/cli/repl/server/net.js +0 -63
- package/cli/repl/server/send.d.ts +0 -4
- package/cli/repl/server/send.js +0 -18
- package/cli/repl/server/server.d.ts +0 -20
- package/cli/repl/server/server.js +0 -66
- package/cli/repl/server/validate.d.ts +0 -15
- package/cli/repl/server/validate.js +0 -34
- package/cli/slicer-app.d.ts +0 -11
- package/cli/slicer-app.js +0 -81
- package/cli/statistics-app.d.ts +0 -11
- package/cli/statistics-app.js +0 -98
- package/cli/statistics-helper-app.d.ts +0 -11
- package/cli/statistics-helper-app.js +0 -82
- package/cli/summarizer-app.d.ts +0 -18
- package/cli/summarizer-app.js +0 -66
- package/flowr.d.ts +0 -29
- package/flowr.js +0 -142
- package/statistics/features/common-syntax-probability.d.ts +0 -31
- package/statistics/features/common-syntax-probability.js +0 -156
- package/statistics/features/feature.d.ts +0 -175
- package/statistics/features/feature.js +0 -30
- package/statistics/features/index.d.ts +0 -1
- package/statistics/features/index.js +0 -18
- package/statistics/features/post-processing.d.ts +0 -12
- package/statistics/features/post-processing.js +0 -21
- package/statistics/features/supported/assignments/assignments.d.ts +0 -11
- package/statistics/features/supported/assignments/assignments.js +0 -53
- package/statistics/features/supported/assignments/index.d.ts +0 -1
- package/statistics/features/supported/assignments/index.js +0 -6
- package/statistics/features/supported/assignments/post-process.d.ts +0 -3
- package/statistics/features/supported/assignments/post-process.js +0 -125
- package/statistics/features/supported/comments/comments.d.ts +0 -18
- package/statistics/features/supported/comments/comments.js +0 -133
- package/statistics/features/supported/comments/index.d.ts +0 -1
- package/statistics/features/supported/comments/index.js +0 -6
- package/statistics/features/supported/comments/post-process.d.ts +0 -3
- package/statistics/features/supported/comments/post-process.js +0 -50
- package/statistics/features/supported/control-flow/control-flow.d.ts +0 -17
- package/statistics/features/supported/control-flow/control-flow.js +0 -67
- package/statistics/features/supported/control-flow/index.d.ts +0 -1
- package/statistics/features/supported/control-flow/index.js +0 -6
- package/statistics/features/supported/control-flow/post-process.d.ts +0 -3
- package/statistics/features/supported/control-flow/post-process.js +0 -65
- package/statistics/features/supported/data-access/data-access.d.ts +0 -15
- package/statistics/features/supported/data-access/data-access.js +0 -118
- package/statistics/features/supported/data-access/index.d.ts +0 -1
- package/statistics/features/supported/data-access/index.js +0 -6
- package/statistics/features/supported/data-access/post-process.d.ts +0 -3
- package/statistics/features/supported/data-access/post-process.js +0 -107
- package/statistics/features/supported/defined-functions/defined-functions.d.ts +0 -35
- package/statistics/features/supported/defined-functions/defined-functions.js +0 -139
- package/statistics/features/supported/defined-functions/index.d.ts +0 -1
- package/statistics/features/supported/defined-functions/index.js +0 -6
- package/statistics/features/supported/defined-functions/post-process.d.ts +0 -6
- package/statistics/features/supported/defined-functions/post-process.js +0 -177
- package/statistics/features/supported/expression-list/expression-list.d.ts +0 -9
- package/statistics/features/supported/expression-list/expression-list.js +0 -36
- package/statistics/features/supported/expression-list/index.d.ts +0 -1
- package/statistics/features/supported/expression-list/index.js +0 -6
- package/statistics/features/supported/expression-list/post-process.d.ts +0 -3
- package/statistics/features/supported/expression-list/post-process.js +0 -44
- package/statistics/features/supported/index.d.ts +0 -10
- package/statistics/features/supported/index.js +0 -27
- package/statistics/features/supported/loops/index.d.ts +0 -1
- package/statistics/features/supported/loops/index.js +0 -6
- package/statistics/features/supported/loops/loops.d.ts +0 -20
- package/statistics/features/supported/loops/loops.js +0 -79
- package/statistics/features/supported/loops/post-process.d.ts +0 -3
- package/statistics/features/supported/loops/post-process.js +0 -72
- package/statistics/features/supported/used-functions/index.d.ts +0 -1
- package/statistics/features/supported/used-functions/index.js +0 -6
- package/statistics/features/supported/used-functions/post-process.d.ts +0 -6
- package/statistics/features/supported/used-functions/post-process.js +0 -179
- package/statistics/features/supported/used-functions/used-functions.d.ts +0 -24
- package/statistics/features/supported/used-functions/used-functions.js +0 -95
- package/statistics/features/supported/used-packages/index.d.ts +0 -1
- package/statistics/features/supported/used-packages/index.js +0 -6
- package/statistics/features/supported/used-packages/post-process.d.ts +0 -3
- package/statistics/features/supported/used-packages/post-process.js +0 -121
- package/statistics/features/supported/used-packages/used-packages.d.ts +0 -16
- package/statistics/features/supported/used-packages/used-packages.js +0 -130
- package/statistics/features/supported/values/index.d.ts +0 -1
- package/statistics/features/supported/values/index.js +0 -6
- package/statistics/features/supported/values/post-process.d.ts +0 -3
- package/statistics/features/supported/values/post-process.js +0 -72
- package/statistics/features/supported/values/values.d.ts +0 -14
- package/statistics/features/supported/values/values.js +0 -101
- package/statistics/features/supported/variables/index.d.ts +0 -1
- package/statistics/features/supported/variables/index.js +0 -6
- package/statistics/features/supported/variables/post-process.d.ts +0 -9
- package/statistics/features/supported/variables/post-process.js +0 -122
- package/statistics/features/supported/variables/variables.d.ts +0 -15
- package/statistics/features/supported/variables/variables.js +0 -70
- package/statistics/index.d.ts +0 -6
- package/statistics/index.js +0 -24
- package/statistics/meta-statistics.d.ts +0 -33
- package/statistics/meta-statistics.js +0 -17
- package/statistics/output/file-provider.d.ts +0 -37
- package/statistics/output/file-provider.js +0 -97
- package/statistics/output/index.d.ts +0 -4
- package/statistics/output/index.js +0 -21
- package/statistics/output/print-stats.d.ts +0 -17
- package/statistics/output/print-stats.js +0 -69
- package/statistics/output/statistics-file.d.ts +0 -37
- package/statistics/output/statistics-file.js +0 -69
- package/statistics/statistics.d.ts +0 -24
- package/statistics/statistics.js +0 -108
- package/util/summarizer/auto-detect.d.ts +0 -2
- package/util/summarizer/auto-detect.js +0 -32
- package/util/summarizer/benchmark/data.d.ts +0 -66
- package/util/summarizer/benchmark/data.js +0 -13
- package/util/summarizer/benchmark/first-phase/input.d.ts +0 -3
- package/util/summarizer/benchmark/first-phase/input.js +0 -75
- package/util/summarizer/benchmark/first-phase/process.d.ts +0 -11
- package/util/summarizer/benchmark/first-phase/process.js +0 -217
- package/util/summarizer/benchmark/second-phase/graph.d.ts +0 -2
- package/util/summarizer/benchmark/second-phase/graph.js +0 -54
- package/util/summarizer/benchmark/second-phase/process.d.ts +0 -6
- package/util/summarizer/benchmark/second-phase/process.js +0 -126
- package/util/summarizer/benchmark/summarizer.d.ts +0 -31
- package/util/summarizer/benchmark/summarizer.js +0 -58
- package/util/summarizer/statistics/first-phase/process.d.ts +0 -6
- package/util/summarizer/statistics/first-phase/process.js +0 -81
- package/util/summarizer/statistics/post-process/clusterer.d.ts +0 -26
- package/util/summarizer/statistics/post-process/clusterer.js +0 -43
- package/util/summarizer/statistics/post-process/file-based-count.d.ts +0 -17
- package/util/summarizer/statistics/post-process/file-based-count.js +0 -49
- package/util/summarizer/statistics/post-process/histogram.d.ts +0 -59
- package/util/summarizer/statistics/post-process/histogram.js +0 -128
- package/util/summarizer/statistics/post-process/index.d.ts +0 -4
- package/util/summarizer/statistics/post-process/index.js +0 -21
- package/util/summarizer/statistics/post-process/post-process-output.d.ts +0 -16
- package/util/summarizer/statistics/post-process/post-process-output.js +0 -103
- package/util/summarizer/statistics/second-phase/process.d.ts +0 -11
- package/util/summarizer/statistics/second-phase/process.js +0 -117
- package/util/summarizer/statistics/summarizer.d.ts +0 -35
- package/util/summarizer/statistics/summarizer.js +0 -135
- package/util/summarizer/summarizer.js +0 -13
- /package/{statistics/output → util}/ansi.d.ts +0 -0
- /package/{statistics/output → util}/ansi.js +0 -0
|
@@ -1,217 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
-
};
|
|
28
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
exports.summarizeReductions = exports.summarizeSummarizedMeasurement = exports.summarizeMeasurement = exports.summarizeSlicerStats = void 0;
|
|
30
|
-
const tmp = __importStar(require("tmp"));
|
|
31
|
-
const assert_1 = require("../../../assert");
|
|
32
|
-
const log_1 = require("../../../log");
|
|
33
|
-
const defaultmap_1 = require("../../../defaultmap");
|
|
34
|
-
const r_bridge_1 = require("../../../../r-bridge");
|
|
35
|
-
const strings_1 = require("../../../strings");
|
|
36
|
-
const fs_1 = __importDefault(require("fs"));
|
|
37
|
-
const arrays_1 = require("../../../arrays");
|
|
38
|
-
const tempfile = (() => {
|
|
39
|
-
let _tempfile = undefined;
|
|
40
|
-
return () => {
|
|
41
|
-
if (_tempfile === undefined) {
|
|
42
|
-
_tempfile = tmp.fileSync({ postfix: '.R', keep: false });
|
|
43
|
-
process.on('beforeExit', () => _tempfile?.removeCallback());
|
|
44
|
-
}
|
|
45
|
-
return _tempfile;
|
|
46
|
-
};
|
|
47
|
-
})();
|
|
48
|
-
function safeDivPercentage(a, b) {
|
|
49
|
-
if (isNaN(a) || isNaN(b)) {
|
|
50
|
-
return undefined;
|
|
51
|
-
}
|
|
52
|
-
else if (b === 0) {
|
|
53
|
-
return a === 0 ? 0 : undefined;
|
|
54
|
-
}
|
|
55
|
-
else {
|
|
56
|
-
const result = 1 - (a / b);
|
|
57
|
-
if (isNaN(result)) {
|
|
58
|
-
log_1.log.error(`NaN for ${a} and ${b}\n`);
|
|
59
|
-
return undefined;
|
|
60
|
-
}
|
|
61
|
-
else {
|
|
62
|
-
return result;
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
function calculateReductionForSlice(input, dataflow, perSlice) {
|
|
67
|
-
return {
|
|
68
|
-
numberOfLines: safeDivPercentage(perSlice.lines, input.numberOfLines),
|
|
69
|
-
numberOfLinesNoAutoSelection: safeDivPercentage(perSlice.lines - perSlice.autoSelected, input.numberOfLines),
|
|
70
|
-
numberOfCharacters: safeDivPercentage(perSlice.characters, input.numberOfCharacters),
|
|
71
|
-
numberOfNonWhitespaceCharacters: safeDivPercentage(perSlice.nonWhitespaceCharacters, input.numberOfNonWhitespaceCharacters),
|
|
72
|
-
numberOfRTokens: safeDivPercentage(perSlice.tokens, input.numberOfRTokens),
|
|
73
|
-
numberOfNormalizedTokens: safeDivPercentage(perSlice.normalizedTokens, input.numberOfNormalizedTokens),
|
|
74
|
-
numberOfDataflowNodes: safeDivPercentage(perSlice.dataflowNodes, dataflow.numberOfNodes)
|
|
75
|
-
};
|
|
76
|
-
}
|
|
77
|
-
/**
|
|
78
|
-
* Summarizes the given stats by calculating the min, max, median, mean, and the standard deviation for each measurement.
|
|
79
|
-
* @see Slicer
|
|
80
|
-
*/
|
|
81
|
-
async function summarizeSlicerStats(stats, report = () => {
|
|
82
|
-
}) {
|
|
83
|
-
const perSliceStats = stats.perSliceMeasurements;
|
|
84
|
-
const collect = new defaultmap_1.DefaultMap(() => []);
|
|
85
|
-
const sizeOfSliceCriteria = [];
|
|
86
|
-
const reParseShellSession = new r_bridge_1.RShell();
|
|
87
|
-
const reductions = [];
|
|
88
|
-
let failedOutputs = 0;
|
|
89
|
-
const sliceSize = {
|
|
90
|
-
lines: [],
|
|
91
|
-
autoSelected: [],
|
|
92
|
-
characters: [],
|
|
93
|
-
nonWhitespaceCharacters: [],
|
|
94
|
-
tokens: [],
|
|
95
|
-
normalizedTokens: [],
|
|
96
|
-
dataflowNodes: []
|
|
97
|
-
};
|
|
98
|
-
let timesHitThreshold = 0;
|
|
99
|
-
for (const [criteria, perSliceStat] of perSliceStats) {
|
|
100
|
-
report(criteria, perSliceStat);
|
|
101
|
-
for (const measure of perSliceStat.measurements) {
|
|
102
|
-
collect.get(measure[0]).push(Number(measure[1]));
|
|
103
|
-
}
|
|
104
|
-
sizeOfSliceCriteria.push(perSliceStat.slicingCriteria.length);
|
|
105
|
-
timesHitThreshold += perSliceStat.timesHitThreshold > 0 ? 1 : 0;
|
|
106
|
-
const { code: output, autoSelected } = perSliceStat.reconstructedCode;
|
|
107
|
-
sliceSize.autoSelected.push(autoSelected);
|
|
108
|
-
const lines = output.split('\n').length;
|
|
109
|
-
sliceSize.lines.push(lines);
|
|
110
|
-
sliceSize.characters.push(output.length);
|
|
111
|
-
const nonWhitespace = (0, strings_1.withoutWhitespace)(output).length;
|
|
112
|
-
sliceSize.nonWhitespaceCharacters.push(nonWhitespace);
|
|
113
|
-
// reparse the output to get the number of tokens
|
|
114
|
-
try {
|
|
115
|
-
// there seem to be encoding issues, therefore, we dump to a temp file
|
|
116
|
-
fs_1.default.writeFileSync(tempfile().name, output);
|
|
117
|
-
const reParsed = await (0, r_bridge_1.retrieveNormalizedAstFromRCode)({ request: 'file', content: tempfile().name }, reParseShellSession);
|
|
118
|
-
let numberOfNormalizedTokens = 0;
|
|
119
|
-
(0, r_bridge_1.visitAst)(reParsed.ast, _ => {
|
|
120
|
-
numberOfNormalizedTokens++;
|
|
121
|
-
return false;
|
|
122
|
-
});
|
|
123
|
-
sliceSize.normalizedTokens.push(numberOfNormalizedTokens);
|
|
124
|
-
const numberOfRTokens = await (0, r_bridge_1.retrieveNumberOfRTokensOfLastParse)(reParseShellSession);
|
|
125
|
-
sliceSize.tokens.push(numberOfRTokens);
|
|
126
|
-
reductions.push(calculateReductionForSlice(stats.input, stats.dataflow, {
|
|
127
|
-
lines: lines,
|
|
128
|
-
characters: output.length,
|
|
129
|
-
nonWhitespaceCharacters: nonWhitespace,
|
|
130
|
-
autoSelected: autoSelected,
|
|
131
|
-
tokens: numberOfRTokens,
|
|
132
|
-
normalizedTokens: numberOfNormalizedTokens,
|
|
133
|
-
dataflowNodes: perSliceStat.numberOfDataflowNodesSliced
|
|
134
|
-
}));
|
|
135
|
-
}
|
|
136
|
-
catch (e) {
|
|
137
|
-
console.error(` ! Failed to re-parse the output of the slicer for ${JSON.stringify(criteria)}`); //, e
|
|
138
|
-
console.error(` Code: ${output}\n`);
|
|
139
|
-
failedOutputs++;
|
|
140
|
-
}
|
|
141
|
-
sliceSize.dataflowNodes.push(perSliceStat.numberOfDataflowNodesSliced);
|
|
142
|
-
}
|
|
143
|
-
// summarize all measurements:
|
|
144
|
-
const summarized = new Map();
|
|
145
|
-
for (const [criterion, measurements] of collect.entries()) {
|
|
146
|
-
summarized.set(criterion, summarizeMeasurement(measurements));
|
|
147
|
-
}
|
|
148
|
-
reParseShellSession.close();
|
|
149
|
-
return {
|
|
150
|
-
...stats,
|
|
151
|
-
perSliceMeasurements: {
|
|
152
|
-
numberOfSlices: perSliceStats.size,
|
|
153
|
-
sliceCriteriaSizes: summarizeMeasurement(sizeOfSliceCriteria),
|
|
154
|
-
measurements: summarized,
|
|
155
|
-
failedToRepParse: failedOutputs,
|
|
156
|
-
timesHitThreshold,
|
|
157
|
-
reduction: {
|
|
158
|
-
numberOfLines: summarizeMeasurement(reductions.map(r => r.numberOfLines).filter(assert_1.isNotUndefined)),
|
|
159
|
-
numberOfLinesNoAutoSelection: summarizeMeasurement(reductions.map(r => r.numberOfLinesNoAutoSelection).filter(assert_1.isNotUndefined)),
|
|
160
|
-
numberOfCharacters: summarizeMeasurement(reductions.map(r => r.numberOfCharacters).filter(assert_1.isNotUndefined)),
|
|
161
|
-
numberOfNonWhitespaceCharacters: summarizeMeasurement(reductions.map(r => r.numberOfNonWhitespaceCharacters).filter(assert_1.isNotUndefined)),
|
|
162
|
-
numberOfRTokens: summarizeMeasurement(reductions.map(r => r.numberOfRTokens).filter(assert_1.isNotUndefined)),
|
|
163
|
-
numberOfNormalizedTokens: summarizeMeasurement(reductions.map(r => r.numberOfNormalizedTokens).filter(assert_1.isNotUndefined)),
|
|
164
|
-
numberOfDataflowNodes: summarizeMeasurement(reductions.map(r => r.numberOfDataflowNodes).filter(assert_1.isNotUndefined))
|
|
165
|
-
},
|
|
166
|
-
sliceSize: {
|
|
167
|
-
lines: summarizeMeasurement(sliceSize.lines),
|
|
168
|
-
characters: summarizeMeasurement(sliceSize.characters),
|
|
169
|
-
nonWhitespaceCharacters: summarizeMeasurement(sliceSize.nonWhitespaceCharacters),
|
|
170
|
-
autoSelected: summarizeMeasurement(sliceSize.autoSelected),
|
|
171
|
-
tokens: summarizeMeasurement(sliceSize.tokens),
|
|
172
|
-
normalizedTokens: summarizeMeasurement(sliceSize.normalizedTokens),
|
|
173
|
-
dataflowNodes: summarizeMeasurement(sliceSize.dataflowNodes)
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
};
|
|
177
|
-
}
|
|
178
|
-
exports.summarizeSlicerStats = summarizeSlicerStats;
|
|
179
|
-
function summarizeMeasurement(data, totalNumberOfDataPoints) {
|
|
180
|
-
// just to avoid in-place modification
|
|
181
|
-
const sorted = [...data].sort((a, b) => a - b);
|
|
182
|
-
const min = sorted[0];
|
|
183
|
-
const max = sorted[sorted.length - 1];
|
|
184
|
-
const median = sorted[Math.floor(sorted.length / 2)];
|
|
185
|
-
const total = (0, arrays_1.sum)(sorted);
|
|
186
|
-
const length = totalNumberOfDataPoints ?? sorted.length;
|
|
187
|
-
const mean = total / length;
|
|
188
|
-
// sqrt(sum(x-mean)^2 / n)
|
|
189
|
-
const std = Math.sqrt(sorted.map(x => (x - mean) ** 2).reduce((a, b) => a + b, 0) / length);
|
|
190
|
-
return { min, max, median, mean, std, total };
|
|
191
|
-
}
|
|
192
|
-
exports.summarizeMeasurement = summarizeMeasurement;
|
|
193
|
-
function summarizeSummarizedMeasurement(data) {
|
|
194
|
-
const min = data.map(d => d.min).filter(assert_1.isNotUndefined).reduce((a, b) => Math.min(a, b), Infinity);
|
|
195
|
-
const max = data.map(d => d.max).filter(assert_1.isNotUndefined).reduce((a, b) => Math.max(a, b), -Infinity);
|
|
196
|
-
// get most average
|
|
197
|
-
const median = data.map(d => d.median).filter(assert_1.isNotUndefined).reduce((a, b) => a + b, 0) / data.length;
|
|
198
|
-
const mean = data.map(d => d.mean).filter(assert_1.isNotUndefined).reduce((a, b) => a + b, 0) / data.length;
|
|
199
|
-
// Method 1 of https://www.statology.org/averaging-standard-deviations/
|
|
200
|
-
const std = Math.sqrt(data.map(d => d.std ** 2).filter(assert_1.isNotUndefined).reduce((a, b) => a + b, 0) / data.length);
|
|
201
|
-
const total = data.map(d => d.total).filter(assert_1.isNotUndefined).reduce((a, b) => a + b, 0);
|
|
202
|
-
return { min, max, median, mean, std, total };
|
|
203
|
-
}
|
|
204
|
-
exports.summarizeSummarizedMeasurement = summarizeSummarizedMeasurement;
|
|
205
|
-
function summarizeReductions(reductions) {
|
|
206
|
-
return {
|
|
207
|
-
numberOfDataflowNodes: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfDataflowNodes)),
|
|
208
|
-
numberOfLines: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfLines)),
|
|
209
|
-
numberOfCharacters: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfCharacters)),
|
|
210
|
-
numberOfNonWhitespaceCharacters: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfNonWhitespaceCharacters)),
|
|
211
|
-
numberOfLinesNoAutoSelection: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfLinesNoAutoSelection)),
|
|
212
|
-
numberOfNormalizedTokens: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfNormalizedTokens)),
|
|
213
|
-
numberOfRTokens: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfRTokens))
|
|
214
|
-
};
|
|
215
|
-
}
|
|
216
|
-
exports.summarizeReductions = summarizeReductions;
|
|
217
|
-
//# sourceMappingURL=process.js.map
|
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.writeGraphOutput = void 0;
|
|
7
|
-
const fs_1 = __importDefault(require("fs"));
|
|
8
|
-
const json_1 = require("../../../json");
|
|
9
|
-
function writeGraphOutput(ultimate, outputGraphPath) {
|
|
10
|
-
console.log(`Producing benchmark graph data (${outputGraphPath})...`);
|
|
11
|
-
const data = [];
|
|
12
|
-
for (const { name, measurements } of [{ name: 'per-file', measurements: ultimate.commonMeasurements }, { name: 'per-slice', measurements: ultimate.perSliceMeasurements }]) {
|
|
13
|
-
for (const [point, measurement] of measurements) {
|
|
14
|
-
if (point === 'close R session' || point === 'initialize R session') {
|
|
15
|
-
continue;
|
|
16
|
-
}
|
|
17
|
-
const pointName = point === 'total' ? `total ${name}` : point;
|
|
18
|
-
data.push({
|
|
19
|
-
name: pointName[0].toUpperCase() + pointName.slice(1),
|
|
20
|
-
unit: 'ms',
|
|
21
|
-
value: Number(measurement.mean / 1e6),
|
|
22
|
-
range: Number(measurement.std / 1e6),
|
|
23
|
-
extra: `median: ${(measurement.median / 1e6).toFixed(2)}ms`
|
|
24
|
-
});
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
data.push({
|
|
28
|
-
name: 'failed to reconstruct/re-parse',
|
|
29
|
-
unit: '#',
|
|
30
|
-
value: ultimate.failedToRepParse,
|
|
31
|
-
extra: `out of ${ultimate.totalSlices} slices`
|
|
32
|
-
});
|
|
33
|
-
data.push({
|
|
34
|
-
name: 'times hit threshold',
|
|
35
|
-
unit: '#',
|
|
36
|
-
value: ultimate.timesHitThreshold
|
|
37
|
-
});
|
|
38
|
-
data.push({
|
|
39
|
-
name: 'reduction (characters)',
|
|
40
|
-
unit: '#',
|
|
41
|
-
value: ultimate.reduction.numberOfCharacters.mean,
|
|
42
|
-
extra: `std: ${ultimate.reduction.numberOfCharacters.std}`
|
|
43
|
-
});
|
|
44
|
-
data.push({
|
|
45
|
-
name: 'reduction (normalized tokens)',
|
|
46
|
-
unit: '#',
|
|
47
|
-
value: ultimate.reduction.numberOfNormalizedTokens.mean,
|
|
48
|
-
extra: `std: ${ultimate.reduction.numberOfNormalizedTokens.std}`
|
|
49
|
-
});
|
|
50
|
-
// write the output file
|
|
51
|
-
fs_1.default.writeFileSync(outputGraphPath, JSON.stringify(data, json_1.jsonReplacer));
|
|
52
|
-
}
|
|
53
|
-
exports.writeGraphOutput = writeGraphOutput;
|
|
54
|
-
//# sourceMappingURL=graph.js.map
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
|
-
import type { SummarizedSlicerStats, UltimateSlicerStats } from '../data';
|
|
3
|
-
export declare function summarizeAllSummarizedStats(stats: SummarizedSlicerStats[]): UltimateSlicerStats;
|
|
4
|
-
export declare function summarizeAllUltimateStats(stats: UltimateSlicerStats[]): UltimateSlicerStats;
|
|
5
|
-
export declare function processNextSummary(line: Buffer, allSummarized: SummarizedSlicerStats[]): void;
|
|
6
|
-
export declare function processNextUltimateSummary(line: Buffer, allSummarized: UltimateSlicerStats[]): void;
|
|
@@ -1,126 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.processNextUltimateSummary = exports.processNextSummary = exports.summarizeAllUltimateStats = exports.summarizeAllSummarizedStats = void 0;
|
|
4
|
-
const defaultmap_1 = require("../../../defaultmap");
|
|
5
|
-
const benchmark_1 = require("../../../../benchmark");
|
|
6
|
-
const assert_1 = require("../../../assert");
|
|
7
|
-
const process_1 = require("../first-phase/process");
|
|
8
|
-
function summarizeAllSummarizedStats(stats) {
|
|
9
|
-
const commonMeasurements = new defaultmap_1.DefaultMap(() => []);
|
|
10
|
-
const perSliceMeasurements = new defaultmap_1.DefaultMap(() => []);
|
|
11
|
-
const reductions = [];
|
|
12
|
-
const inputs = [];
|
|
13
|
-
const dataflows = [];
|
|
14
|
-
let failedToRepParse = 0;
|
|
15
|
-
let timesHitThreshold = 0;
|
|
16
|
-
let totalSlices = 0;
|
|
17
|
-
for (const stat of stats) {
|
|
18
|
-
for (const [k, v] of stat.commonMeasurements) {
|
|
19
|
-
commonMeasurements.get(k).push(Number(v));
|
|
20
|
-
}
|
|
21
|
-
for (const [k, v] of stat.perSliceMeasurements.measurements) {
|
|
22
|
-
perSliceMeasurements.get(k).push(v);
|
|
23
|
-
}
|
|
24
|
-
reductions.push(stat.perSliceMeasurements.reduction);
|
|
25
|
-
inputs.push(stat.input);
|
|
26
|
-
dataflows.push(stat.dataflow);
|
|
27
|
-
failedToRepParse += stat.perSliceMeasurements.failedToRepParse;
|
|
28
|
-
totalSlices += stat.perSliceMeasurements.numberOfSlices;
|
|
29
|
-
timesHitThreshold += stat.perSliceMeasurements.timesHitThreshold;
|
|
30
|
-
}
|
|
31
|
-
return {
|
|
32
|
-
totalRequests: stats.length,
|
|
33
|
-
totalSlices: totalSlices,
|
|
34
|
-
commonMeasurements: new Map([...commonMeasurements.entries()].map(([k, v]) => [k, (0, benchmark_1.summarizeMeasurement)(v)])),
|
|
35
|
-
perSliceMeasurements: new Map([...perSliceMeasurements.entries()].map(([k, v]) => [k, (0, benchmark_1.summarizeSummarizedMeasurement)(v)])),
|
|
36
|
-
failedToRepParse,
|
|
37
|
-
timesHitThreshold,
|
|
38
|
-
reduction: (0, process_1.summarizeReductions)(reductions),
|
|
39
|
-
input: {
|
|
40
|
-
numberOfLines: (0, benchmark_1.summarizeMeasurement)(inputs.map(i => i.numberOfLines)),
|
|
41
|
-
numberOfCharacters: (0, benchmark_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharacters)),
|
|
42
|
-
numberOfNonWhitespaceCharacters: (0, benchmark_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonWhitespaceCharacters)),
|
|
43
|
-
numberOfRTokens: (0, benchmark_1.summarizeMeasurement)(inputs.map(i => i.numberOfRTokens)),
|
|
44
|
-
numberOfNormalizedTokens: (0, benchmark_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokens))
|
|
45
|
-
},
|
|
46
|
-
dataflow: {
|
|
47
|
-
numberOfNodes: (0, benchmark_1.summarizeMeasurement)(dataflows.map(d => d.numberOfNodes)),
|
|
48
|
-
numberOfFunctionDefinitions: (0, benchmark_1.summarizeMeasurement)(dataflows.map(d => d.numberOfFunctionDefinitions)),
|
|
49
|
-
numberOfCalls: (0, benchmark_1.summarizeMeasurement)(dataflows.map(d => d.numberOfCalls)),
|
|
50
|
-
numberOfEdges: (0, benchmark_1.summarizeMeasurement)(dataflows.map(d => d.numberOfEdges))
|
|
51
|
-
}
|
|
52
|
-
};
|
|
53
|
-
}
|
|
54
|
-
exports.summarizeAllSummarizedStats = summarizeAllSummarizedStats;
|
|
55
|
-
function summarizeAllUltimateStats(stats) {
|
|
56
|
-
return {
|
|
57
|
-
// these should be deterministic, so we don't technically need to use max, but we do just in case something unexpected happens :)
|
|
58
|
-
totalRequests: Math.max(...stats.map(s => s.totalRequests)),
|
|
59
|
-
totalSlices: Math.max(...stats.map(s => s.totalSlices)),
|
|
60
|
-
failedToRepParse: Math.max(...stats.map(s => s.failedToRepParse)),
|
|
61
|
-
timesHitThreshold: Math.max(...stats.map(s => s.timesHitThreshold)),
|
|
62
|
-
// average out / summarize other measurements
|
|
63
|
-
commonMeasurements: new Map(benchmark_1.CommonSlicerMeasurements.map(m => [m, (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.commonMeasurements.get(m)))])),
|
|
64
|
-
perSliceMeasurements: new Map(benchmark_1.PerSliceMeasurements.map(m => [m, (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.perSliceMeasurements.get(m)))])),
|
|
65
|
-
reduction: (0, process_1.summarizeReductions)(stats.map(s => s.reduction)),
|
|
66
|
-
input: {
|
|
67
|
-
numberOfLines: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfLines)),
|
|
68
|
-
numberOfCharacters: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharacters)),
|
|
69
|
-
numberOfNonWhitespaceCharacters: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharacters)),
|
|
70
|
-
numberOfRTokens: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokens)),
|
|
71
|
-
numberOfNormalizedTokens: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokens))
|
|
72
|
-
},
|
|
73
|
-
dataflow: {
|
|
74
|
-
numberOfNodes: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfNodes)),
|
|
75
|
-
numberOfFunctionDefinitions: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfFunctionDefinitions)),
|
|
76
|
-
numberOfCalls: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfCalls)),
|
|
77
|
-
numberOfEdges: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfEdges))
|
|
78
|
-
}
|
|
79
|
-
};
|
|
80
|
-
}
|
|
81
|
-
exports.summarizeAllUltimateStats = summarizeAllUltimateStats;
|
|
82
|
-
function processNextSummary(line, allSummarized) {
|
|
83
|
-
let got = JSON.parse(line.toString());
|
|
84
|
-
got = {
|
|
85
|
-
summarize: {
|
|
86
|
-
input: got.summarize.input,
|
|
87
|
-
request: got.summarize.request,
|
|
88
|
-
dataflow: got.summarize.dataflow,
|
|
89
|
-
commonMeasurements: new Map(got.summarize.commonMeasurements
|
|
90
|
-
.map(([k, v]) => {
|
|
91
|
-
(0, assert_1.guard)(v.endsWith('n'), 'Expected a bigint');
|
|
92
|
-
return [k, BigInt(v.slice(0, -1))];
|
|
93
|
-
})),
|
|
94
|
-
perSliceMeasurements: {
|
|
95
|
-
numberOfSlices: got.summarize.perSliceMeasurements.numberOfSlices,
|
|
96
|
-
sliceCriteriaSizes: got.summarize.perSliceMeasurements.sliceCriteriaSizes,
|
|
97
|
-
measurements: new Map(got.summarize.perSliceMeasurements.measurements),
|
|
98
|
-
reduction: got.summarize.perSliceMeasurements.reduction,
|
|
99
|
-
timesHitThreshold: got.summarize.perSliceMeasurements.timesHitThreshold,
|
|
100
|
-
failedToRepParse: got.summarize.perSliceMeasurements.failedToRepParse,
|
|
101
|
-
sliceSize: got.summarize.perSliceMeasurements.sliceSize
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
};
|
|
105
|
-
allSummarized.push(got.summarize);
|
|
106
|
-
}
|
|
107
|
-
exports.processNextSummary = processNextSummary;
|
|
108
|
-
function processNextUltimateSummary(line, allSummarized) {
|
|
109
|
-
let got = JSON.parse(line.toString());
|
|
110
|
-
got = {
|
|
111
|
-
summarize: {
|
|
112
|
-
totalRequests: got.summarize.totalRequests,
|
|
113
|
-
totalSlices: got.summarize.totalSlices,
|
|
114
|
-
commonMeasurements: new Map(got.summarize.commonMeasurements),
|
|
115
|
-
perSliceMeasurements: new Map(got.summarize.perSliceMeasurements),
|
|
116
|
-
failedToRepParse: got.summarize.failedToRepParse,
|
|
117
|
-
timesHitThreshold: got.summarize.timesHitThreshold,
|
|
118
|
-
reduction: got.summarize.reduction,
|
|
119
|
-
input: got.summarize.input,
|
|
120
|
-
dataflow: got.summarize.dataflow,
|
|
121
|
-
}
|
|
122
|
-
};
|
|
123
|
-
allSummarized.push(got.summarize);
|
|
124
|
-
}
|
|
125
|
-
exports.processNextUltimateSummary = processNextUltimateSummary;
|
|
126
|
-
//# sourceMappingURL=process.js.map
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* This module is tasked with processing the results of the benchmarking (see {@link SlicerStats}).
|
|
3
|
-
* @module
|
|
4
|
-
*/
|
|
5
|
-
import type { CommonSummarizerConfiguration } from '../summarizer';
|
|
6
|
-
import { Summarizer } from '../summarizer';
|
|
7
|
-
import type { UltimateSlicerStats } from './data';
|
|
8
|
-
export interface BenchmarkSummarizerConfiguration extends CommonSummarizerConfiguration {
|
|
9
|
-
/**
|
|
10
|
-
* If given, produce graph data output (e.g. for the benchmark visualization) to the given path
|
|
11
|
-
*/
|
|
12
|
-
graphOutputPath?: string;
|
|
13
|
-
/**
|
|
14
|
-
* The input path to read from
|
|
15
|
-
*/
|
|
16
|
-
inputPath: string;
|
|
17
|
-
/**
|
|
18
|
-
* Path for the intermediate results of the preparation phase
|
|
19
|
-
*/
|
|
20
|
-
intermediateOutputPath: string;
|
|
21
|
-
/**
|
|
22
|
-
* Path for the final results of the summarization phase
|
|
23
|
-
*/
|
|
24
|
-
outputPath: string;
|
|
25
|
-
}
|
|
26
|
-
export declare class BenchmarkSummarizer extends Summarizer<UltimateSlicerStats, BenchmarkSummarizerConfiguration> {
|
|
27
|
-
constructor(config: BenchmarkSummarizerConfiguration);
|
|
28
|
-
preparationPhase(): Promise<void>;
|
|
29
|
-
summarizePhase(): Promise<UltimateSlicerStats>;
|
|
30
|
-
private removeIfExists;
|
|
31
|
-
}
|
|
@@ -1,58 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.BenchmarkSummarizer = void 0;
|
|
7
|
-
const summarizer_1 = require("../summarizer");
|
|
8
|
-
const fs_1 = __importDefault(require("fs"));
|
|
9
|
-
const input_1 = require("./first-phase/input");
|
|
10
|
-
const json_1 = require("../../json");
|
|
11
|
-
const benchmark_1 = require("../../../benchmark");
|
|
12
|
-
const process_1 = require("./second-phase/process");
|
|
13
|
-
const graph_1 = require("./second-phase/graph");
|
|
14
|
-
const files_1 = require("../../files");
|
|
15
|
-
const path_1 = __importDefault(require("path"));
|
|
16
|
-
class BenchmarkSummarizer extends summarizer_1.Summarizer {
|
|
17
|
-
constructor(config) {
|
|
18
|
-
super(config);
|
|
19
|
-
}
|
|
20
|
-
async preparationPhase() {
|
|
21
|
-
this.removeIfExists(`${this.config.intermediateOutputPath}.json`);
|
|
22
|
-
this.removeIfExists(this.config.intermediateOutputPath);
|
|
23
|
-
fs_1.default.mkdirSync(this.config.intermediateOutputPath);
|
|
24
|
-
const dirContent = fs_1.default.readdirSync(this.config.inputPath);
|
|
25
|
-
for (let i = 0; i < dirContent.length; i++) {
|
|
26
|
-
const filePath = path_1.default.join(this.config.inputPath, dirContent[i]);
|
|
27
|
-
const outputPath = path_1.default.join(this.config.intermediateOutputPath, dirContent[i]);
|
|
28
|
-
// generate measurements for each run
|
|
29
|
-
await (0, files_1.readLineByLine)(filePath, (line, lineNumber) => (0, input_1.processRunMeasurement)(line, i, lineNumber, `${outputPath}.log`, outputPath));
|
|
30
|
-
// generate combined measurements for the file
|
|
31
|
-
(0, input_1.processSummarizedFileMeasurement)(filePath, outputPath, `${this.config.intermediateOutputPath}.json`);
|
|
32
|
-
}
|
|
33
|
-
this.log('Done summarizing');
|
|
34
|
-
}
|
|
35
|
-
// eslint-disable-next-line @typescript-eslint/require-await -- just to obey the structure
|
|
36
|
-
async summarizePhase() {
|
|
37
|
-
this.log(`Summarizing all summaries from ${this.config.inputPath}...`);
|
|
38
|
-
this.removeIfExists(this.config.outputPath);
|
|
39
|
-
const summaries = [];
|
|
40
|
-
(0, files_1.readLineByLineSync)(`${this.config.intermediateOutputPath}.json`, (l) => (0, process_1.processNextUltimateSummary)(l, summaries));
|
|
41
|
-
const ultimate = (0, process_1.summarizeAllUltimateStats)(summaries);
|
|
42
|
-
this.log(`Writing ultimate summary to ${this.config.outputPath}`);
|
|
43
|
-
fs_1.default.writeFileSync(this.config.outputPath, JSON.stringify(ultimate, json_1.jsonReplacer));
|
|
44
|
-
console.log((0, benchmark_1.ultimateStats2String)(ultimate));
|
|
45
|
-
if (this.config.graphOutputPath) {
|
|
46
|
-
(0, graph_1.writeGraphOutput)(ultimate, this.config.graphOutputPath);
|
|
47
|
-
}
|
|
48
|
-
return ultimate;
|
|
49
|
-
}
|
|
50
|
-
removeIfExists(path) {
|
|
51
|
-
if (path && fs_1.default.existsSync(path)) {
|
|
52
|
-
this.log(`Removing existing ${path}`);
|
|
53
|
-
fs_1.default.rmSync(path, { recursive: true });
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
exports.BenchmarkSummarizer = BenchmarkSummarizer;
|
|
58
|
-
//# sourceMappingURL=summarizer.js.map
|
|
@@ -1,81 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.FileMigrator = void 0;
|
|
7
|
-
const fs_1 = __importDefault(require("fs"));
|
|
8
|
-
const path_1 = __importDefault(require("path"));
|
|
9
|
-
const assert_1 = require("../../../assert");
|
|
10
|
-
class FileMigrator {
|
|
11
|
-
writeHandles = new Map();
|
|
12
|
-
finished = false;
|
|
13
|
-
async migrate(sourceFolderContent, targetFolder, originalFile) {
|
|
14
|
-
(0, assert_1.guard)(!this.finished, () => 'migrator is already marked as finished!');
|
|
15
|
-
if (!fs_1.default.existsSync(targetFolder)) {
|
|
16
|
-
fs_1.default.mkdirSync(targetFolder, { recursive: true });
|
|
17
|
-
}
|
|
18
|
-
const promises = [];
|
|
19
|
-
for (const [filepath, content] of sourceFolderContent.entries()) {
|
|
20
|
-
const target = path_1.default.join(targetFolder, filepath);
|
|
21
|
-
let targetStream = this.writeHandles.get(target);
|
|
22
|
-
if (targetStream === undefined) {
|
|
23
|
-
if (!fs_1.default.existsSync(path_1.default.dirname(target))) {
|
|
24
|
-
fs_1.default.mkdirSync(path_1.default.dirname(target), { recursive: true });
|
|
25
|
-
}
|
|
26
|
-
targetStream = fs_1.default.createWriteStream(target, { flags: 'a' });
|
|
27
|
-
this.writeHandles.set(target, targetStream);
|
|
28
|
-
}
|
|
29
|
-
// before we write said content we have to group {value: string, context: string} by context (while we can safely assume that there is only one context per file,
|
|
30
|
-
// i want to be sure
|
|
31
|
-
let data;
|
|
32
|
-
// regex matches failed due to encoding errors
|
|
33
|
-
if (filepath.endsWith('meta/stats.txt') || filepath.endsWith('meta/features.txt')) {
|
|
34
|
-
data = `{"file":"${originalFile ?? ''}","content":${content.trimEnd()}}\n`;
|
|
35
|
-
}
|
|
36
|
-
else {
|
|
37
|
-
const grouped = groupByContext(content);
|
|
38
|
-
data = grouped === undefined ? content : grouped.map(s => JSON.stringify(s)).join('\n') + '\n';
|
|
39
|
-
}
|
|
40
|
-
promises.push(new Promise((resolve, reject) => targetStream.write(data, 'utf-8', err => {
|
|
41
|
-
if (err) {
|
|
42
|
-
reject(err);
|
|
43
|
-
}
|
|
44
|
-
else {
|
|
45
|
-
resolve();
|
|
46
|
-
}
|
|
47
|
-
})));
|
|
48
|
-
}
|
|
49
|
-
await Promise.all(promises);
|
|
50
|
-
}
|
|
51
|
-
finish() {
|
|
52
|
-
for (const handle of this.writeHandles.values()) {
|
|
53
|
-
handle.close();
|
|
54
|
-
}
|
|
55
|
-
this.finished = true;
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
exports.FileMigrator = FileMigrator;
|
|
59
|
-
function groupByContext(input) {
|
|
60
|
-
if (input === undefined) {
|
|
61
|
-
return [];
|
|
62
|
-
}
|
|
63
|
-
const parsed = input.split('\n').filter(s => s && s !== '').map(s => JSON.parse(s));
|
|
64
|
-
const grouped = new Map();
|
|
65
|
-
for (const content of parsed) {
|
|
66
|
-
if (!Array.isArray(content)) {
|
|
67
|
-
// in this case it is a meta file or other which does not have to be grouped
|
|
68
|
-
return undefined;
|
|
69
|
-
}
|
|
70
|
-
const [value, context] = content;
|
|
71
|
-
const get = grouped.get(context);
|
|
72
|
-
if (get === undefined) {
|
|
73
|
-
grouped.set(context, [value]);
|
|
74
|
-
}
|
|
75
|
-
else {
|
|
76
|
-
get.push(value);
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
return [...grouped.entries()].map(([context, values]) => [values, context]);
|
|
80
|
-
}
|
|
81
|
-
//# sourceMappingURL=process.js.map
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
import { DefaultMap } from '../../../defaultmap';
|
|
2
|
-
import type { NodeId } from '../../../../r-bridge';
|
|
3
|
-
import type { MergeableRecord } from '../../../objects';
|
|
4
|
-
export type ContextsWithCount = DefaultMap<NodeId, number>;
|
|
5
|
-
export type ClusterContextIdMap = DefaultMap<string | undefined, NodeId>;
|
|
6
|
-
export type ClusterValueInfoMap = DefaultMap<string, ContextsWithCount>;
|
|
7
|
-
/** Produced by {@link clusterStatisticsOutput} */
|
|
8
|
-
export interface ClusterReport extends MergeableRecord {
|
|
9
|
-
/** The input file which has been clustered */
|
|
10
|
-
readonly filepath: string;
|
|
11
|
-
/** Maps each context encountered (i.e., every file which contains something associated with the feature) to a unique id, used in the {@link ClusterReport#valueInfoMap | valueInfoMap}. */
|
|
12
|
-
contextIdMap: ClusterContextIdMap;
|
|
13
|
-
/**
|
|
14
|
-
* Counts which contexts contained which values of a feature.
|
|
15
|
-
* For example, that `<-` occurred in files with ids `[12, 42, 19, 19]` (i.e., the context with the id 19 contained it twice).
|
|
16
|
-
*/
|
|
17
|
-
valueInfoMap: ClusterValueInfoMap;
|
|
18
|
-
}
|
|
19
|
-
/**
|
|
20
|
-
* Takes a statistics file like `statistics-out/top-2023-01-01-00-00-00/Assignments/assignmentOperator.txt` and clusters the values by context
|
|
21
|
-
*
|
|
22
|
-
* @param filepath - Filepath of the statistics file
|
|
23
|
-
* @param contextIdMap - The id map to use, can use an existing one to reuse ids for same contexts spreading over multiple input files.
|
|
24
|
-
* `undefined` is used for unknown contexts. This map allows us to reference contexts with a way shorter identifier (vs. the full file path).
|
|
25
|
-
*/
|
|
26
|
-
export declare function clusterStatisticsOutput(filepath: string, contextIdMap?: ClusterContextIdMap): ClusterReport;
|
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.clusterStatisticsOutput = void 0;
|
|
7
|
-
/**
|
|
8
|
-
* Reading the given file line by line and expecting constructs of {@link StatisticsOutputFormat},
|
|
9
|
-
* this module is responsible for identifying interesting groups of same data.
|
|
10
|
-
*
|
|
11
|
-
* @module
|
|
12
|
-
*/
|
|
13
|
-
const n_readlines_1 = __importDefault(require("n-readlines"));
|
|
14
|
-
const defaultmap_1 = require("../../../defaultmap");
|
|
15
|
-
const r_bridge_1 = require("../../../../r-bridge");
|
|
16
|
-
/**
|
|
17
|
-
* Takes a statistics file like `statistics-out/top-2023-01-01-00-00-00/Assignments/assignmentOperator.txt` and clusters the values by context
|
|
18
|
-
*
|
|
19
|
-
* @param filepath - Filepath of the statistics file
|
|
20
|
-
* @param contextIdMap - The id map to use, can use an existing one to reuse ids for same contexts spreading over multiple input files.
|
|
21
|
-
* `undefined` is used for unknown contexts. This map allows us to reference contexts with a way shorter identifier (vs. the full file path).
|
|
22
|
-
*/
|
|
23
|
-
function clusterStatisticsOutput(filepath, contextIdMap = new defaultmap_1.DefaultMap((0, r_bridge_1.deterministicCountingIdGenerator)())) {
|
|
24
|
-
const lineReader = new n_readlines_1.default(filepath);
|
|
25
|
-
// for each value we store the context ids it was seen in (may list the same context multiple times if more often) - this serves as a counter as well
|
|
26
|
-
const valueInfoMap = new defaultmap_1.DefaultMap(() => new defaultmap_1.DefaultMap(() => 0));
|
|
27
|
-
let line;
|
|
28
|
-
// eslint-disable-next-line no-cond-assign
|
|
29
|
-
while (line = lineReader.next()) {
|
|
30
|
-
const json = JSON.parse(line.toString());
|
|
31
|
-
const contextId = contextIdMap.get(json[1]);
|
|
32
|
-
const value = valueInfoMap.get(json[0]);
|
|
33
|
-
// step the counter accordingly
|
|
34
|
-
value.set(contextId, value.get(contextId) + 1);
|
|
35
|
-
}
|
|
36
|
-
return {
|
|
37
|
-
filepath,
|
|
38
|
-
contextIdMap,
|
|
39
|
-
valueInfoMap
|
|
40
|
-
};
|
|
41
|
-
}
|
|
42
|
-
exports.clusterStatisticsOutput = clusterStatisticsOutput;
|
|
43
|
-
//# sourceMappingURL=clusterer.js.map
|