@eagleoutice/flowr 1.4.1 → 1.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. package/README.md +3 -3
  2. package/abstract-interpretation/processor.js +9 -3
  3. package/benchmark/slicer.d.ts +0 -3
  4. package/benchmark/slicer.js +4 -16
  5. package/benchmark/stats/print.js +0 -2
  6. package/benchmark/stats/stats.d.ts +2 -3
  7. package/benchmark/stats/stats.js +1 -1
  8. package/cli/benchmark-app.d.ts +1 -0
  9. package/cli/benchmark-app.js +21 -6
  10. package/cli/benchmark-helper-app.d.ts +2 -0
  11. package/cli/benchmark-helper-app.js +15 -6
  12. package/cli/common/options.js +4 -1
  13. package/cli/common/scripts-info.d.ts +1 -0
  14. package/cli/common/scripts-info.js +15 -1
  15. package/cli/export-quads-app.js +1 -5
  16. package/cli/repl/commands/cfg.js +2 -2
  17. package/cli/repl/commands/commands.d.ts +3 -1
  18. package/cli/repl/commands/commands.js +67 -28
  19. package/cli/repl/commands/dataflow.js +2 -2
  20. package/cli/repl/commands/normalize.js +2 -2
  21. package/cli/repl/commands/parse.js +12 -12
  22. package/cli/repl/commands/quit.js +4 -1
  23. package/cli/repl/core.d.ts +4 -2
  24. package/cli/repl/core.js +63 -7
  25. package/cli/repl/execute.d.ts +1 -1
  26. package/cli/repl/execute.js +3 -3
  27. package/cli/repl/server/connection.js +3 -8
  28. package/cli/repl/server/net.js +2 -2
  29. package/cli/statistics-helper-app.js +0 -1
  30. package/cli/summarizer-app.js +1 -2
  31. package/config.d.ts +16 -0
  32. package/config.js +75 -0
  33. package/core/input.d.ts +1 -1
  34. package/core/output.d.ts +1 -2
  35. package/core/print/parse-printer.d.ts +1 -2
  36. package/core/print/parse-printer.js +6 -4
  37. package/core/slicer.js +4 -8
  38. package/core/steps.d.ts +7 -24
  39. package/core/steps.js +5 -12
  40. package/dataflow/environments/environment.js +8 -0
  41. package/dataflow/environments/register.js +1 -0
  42. package/dataflow/extractor.d.ts +2 -2
  43. package/dataflow/extractor.js +10 -2
  44. package/dataflow/internal/process/functions/function-call.js +7 -1
  45. package/dataflow/internal/process/functions/source.d.ts +8 -0
  46. package/dataflow/internal/process/functions/source.js +81 -0
  47. package/dataflow/processor.d.ts +10 -1
  48. package/flowr.d.ts +2 -0
  49. package/flowr.js +14 -9
  50. package/package.json +41 -18
  51. package/r-bridge/lang-4.x/ast/index.d.ts +1 -0
  52. package/r-bridge/lang-4.x/ast/index.js +3 -0
  53. package/r-bridge/lang-4.x/ast/model/processing/decorate.d.ts +2 -0
  54. package/r-bridge/lang-4.x/ast/model/processing/decorate.js +6 -1
  55. package/r-bridge/lang-4.x/ast/parser/json/format.d.ts +14 -0
  56. package/r-bridge/lang-4.x/ast/parser/json/format.js +26 -0
  57. package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +7 -0
  58. package/r-bridge/lang-4.x/ast/parser/json/parser.js +57 -0
  59. package/r-bridge/lang-4.x/ast/parser/xml/data.d.ts +0 -3
  60. package/r-bridge/lang-4.x/ast/parser/xml/index.d.ts +0 -2
  61. package/r-bridge/lang-4.x/ast/parser/xml/index.js +0 -2
  62. package/r-bridge/lang-4.x/ast/parser/xml/input-format.d.ts +5 -1
  63. package/r-bridge/lang-4.x/ast/parser/xml/input-format.js +7 -10
  64. package/r-bridge/lang-4.x/ast/parser/xml/internal/access.js +2 -2
  65. package/r-bridge/lang-4.x/ast/parser/xml/internal/control/if-then-else.js +1 -1
  66. package/r-bridge/lang-4.x/ast/parser/xml/internal/control/if-then.js +2 -2
  67. package/r-bridge/lang-4.x/ast/parser/xml/internal/expression/expression.js +4 -4
  68. package/r-bridge/lang-4.x/ast/parser/xml/internal/functions/argument.js +2 -2
  69. package/r-bridge/lang-4.x/ast/parser/xml/internal/functions/call.js +4 -4
  70. package/r-bridge/lang-4.x/ast/parser/xml/internal/functions/definition.js +2 -2
  71. package/r-bridge/lang-4.x/ast/parser/xml/internal/functions/parameter.js +2 -2
  72. package/r-bridge/lang-4.x/ast/parser/xml/internal/index.d.ts +0 -1
  73. package/r-bridge/lang-4.x/ast/parser/xml/internal/index.js +0 -1
  74. package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/break.js +2 -2
  75. package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/for.js +3 -6
  76. package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/next.js +2 -2
  77. package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/repeat.js +2 -2
  78. package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/while.js +2 -2
  79. package/r-bridge/lang-4.x/ast/parser/xml/internal/meta.d.ts +6 -11
  80. package/r-bridge/lang-4.x/ast/parser/xml/internal/meta.js +15 -23
  81. package/r-bridge/lang-4.x/ast/parser/xml/internal/operators/binary.js +6 -6
  82. package/r-bridge/lang-4.x/ast/parser/xml/internal/operators/unary.js +3 -3
  83. package/r-bridge/lang-4.x/ast/parser/xml/internal/other/comment.js +2 -2
  84. package/r-bridge/lang-4.x/ast/parser/xml/internal/other/line-directive.js +2 -2
  85. package/r-bridge/lang-4.x/ast/parser/xml/internal/structure/elements.js +3 -3
  86. package/r-bridge/lang-4.x/ast/parser/xml/internal/structure/root.js +3 -4
  87. package/r-bridge/lang-4.x/ast/parser/xml/internal/structure/single-element.js +2 -2
  88. package/r-bridge/lang-4.x/ast/parser/xml/internal/values/number.js +2 -2
  89. package/r-bridge/lang-4.x/ast/parser/xml/internal/values/string.js +2 -2
  90. package/r-bridge/lang-4.x/ast/parser/xml/internal/values/symbol.js +4 -4
  91. package/r-bridge/lang-4.x/values.d.ts +0 -1
  92. package/r-bridge/lang-4.x/values.js +14 -6
  93. package/r-bridge/retriever.d.ts +25 -21
  94. package/r-bridge/retriever.js +73 -23
  95. package/r-bridge/shell-executor.d.ts +3 -17
  96. package/r-bridge/shell-executor.js +9 -78
  97. package/r-bridge/shell.d.ts +5 -27
  98. package/r-bridge/shell.js +31 -92
  99. package/statistics/features/supported/assignments/post-process.js +6 -6
  100. package/statistics/features/supported/comments/post-process.js +2 -2
  101. package/statistics/features/supported/data-access/post-process.js +4 -4
  102. package/statistics/features/supported/defined-functions/post-process.js +4 -4
  103. package/statistics/features/supported/expression-list/post-process.js +3 -3
  104. package/statistics/features/supported/loops/post-process.js +3 -3
  105. package/statistics/features/supported/used-functions/post-process.js +5 -5
  106. package/statistics/features/supported/used-packages/post-process.js +5 -5
  107. package/statistics/features/supported/values/post-process.js +3 -3
  108. package/statistics/features/supported/variables/post-process.js +3 -3
  109. package/statistics/output/ansi.js +1 -1
  110. package/statistics/statistics.js +7 -8
  111. package/util/args.d.ts +8 -4
  112. package/util/args.js +11 -4
  113. package/util/cfg/visitor.js +1 -1
  114. package/util/files.d.ts +6 -0
  115. package/util/files.js +11 -1
  116. package/util/log.js +3 -0
  117. package/util/summarizer/benchmark/first-phase/input.d.ts +2 -1
  118. package/util/summarizer/benchmark/first-phase/input.js +20 -4
  119. package/util/summarizer/benchmark/first-phase/process.d.ts +2 -1
  120. package/util/summarizer/benchmark/first-phase/process.js +14 -5
  121. package/util/summarizer/benchmark/second-phase/graph.js +1 -1
  122. package/util/summarizer/benchmark/second-phase/process.d.ts +2 -0
  123. package/util/summarizer/benchmark/second-phase/process.js +48 -11
  124. package/util/summarizer/benchmark/summarizer.d.ts +0 -4
  125. package/util/summarizer/benchmark/summarizer.js +16 -7
  126. package/util/summarizer/statistics/first-phase/process.js +8 -8
  127. package/util/summarizer/statistics/post-process/post-process-output.js +4 -5
  128. package/r-bridge/lang-4.x/ast/parser/xml/config.d.ts +0 -25
  129. package/r-bridge/lang-4.x/ast/parser/xml/config.js +0 -16
  130. package/r-bridge/lang-4.x/ast/parser/xml/internal/xml-to-json.d.ts +0 -9
  131. package/r-bridge/lang-4.x/ast/parser/xml/internal/xml-to-json.js +0 -51
  132. package/r-bridge/lang-4.x/ast/parser/xml/parser.d.ts +0 -17
  133. package/r-bridge/lang-4.x/ast/parser/xml/parser.js +0 -30
package/util/args.js CHANGED
@@ -7,14 +7,18 @@
7
7
  Object.defineProperty(exports, "__esModule", { value: true });
8
8
  exports.splitAtEscapeSensitive = void 0;
9
9
  /**
10
- * This splits an input string on the given split string (e.g., ` `) but checks if the string is quoted or escaped.
10
+ * This splits an input string on the given split string (e.g., ` `), but checks if the string is quoted or escaped.
11
11
  *
12
- * Given an input string like `a "b c" d` with a space character as split this splits the arguments similar to common shell interpreters (i.e., `a`, `b c`, and `d`).
12
+ * Given an input string like `a "b c" d`, with a space character as split, and escapeQuote set to true,
13
+ * this splits the arguments similar to common shell interpreters (i.e., `a`, `b c`, and `d`).
14
+ *
15
+ * When escapeQuote is set to false instead, we keep quotation marks in the result (i.e., `a`, `"b c"`, and `d`.).
13
16
  *
14
17
  * @param inputString - The string to split
15
- * @param split - The **single** character to split on (can not be the backslash or quote)
18
+ * @param escapeQuote - Keep quotes in args
19
+ * @param split - The **single** character to split on (can not be backslash or quote)
16
20
  */
17
- function splitAtEscapeSensitive(inputString, split = ' ') {
21
+ function splitAtEscapeSensitive(inputString, escapeQuote = true, split = ' ') {
18
22
  const args = [];
19
23
  let current = '';
20
24
  let inQuotes = false;
@@ -50,6 +54,9 @@ function splitAtEscapeSensitive(inputString, split = ' ') {
50
54
  }
51
55
  else if (c === '"' || c === "'") {
52
56
  inQuotes = !inQuotes;
57
+ if (!escapeQuote) {
58
+ current += c;
59
+ }
53
60
  }
54
61
  else if (c === '\\') {
55
62
  escaped = true;
@@ -49,7 +49,7 @@ class ControlFlowGraphExecutionTraceVisitor {
49
49
  return predecessors;
50
50
  }
51
51
  visit(cfg) {
52
- const visited = new Set;
52
+ const visited = new Set();
53
53
  for (const id of cfg.entryPoints) {
54
54
  const node = cfg.graph.vertices().get(id);
55
55
  (0, assert_1.guard)(node !== undefined, `Node with id ${id} not present`);
package/util/files.d.ts CHANGED
@@ -52,3 +52,9 @@ export declare function readLineByLine(filePath: string, onLine: (line: Buffer,
52
52
  * See {@link readLineByLine} for an asynchronous version.
53
53
  */
54
54
  export declare function readLineByLineSync(filePath: string, onLine: (line: Buffer, lineNumber: number) => void): void;
55
+ /**
56
+ * Chops off the last part of the given directory path after a path separator, essentially returning the path's parent directory.
57
+ * If an absolute path is passed, the returned path is also absolute.
58
+ * @param directory - The directory whose parent to return
59
+ */
60
+ export declare function getParentDirectory(directory: string): string;
package/util/files.js CHANGED
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
26
26
  return (mod && mod.__esModule) ? mod : { "default": mod };
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.readLineByLineSync = exports.readLineByLine = exports.writeTableAsCsv = exports.allRFilesFrom = exports.allRFiles = exports.getAllFiles = void 0;
29
+ exports.getParentDirectory = exports.readLineByLineSync = exports.readLineByLine = exports.writeTableAsCsv = exports.allRFilesFrom = exports.allRFiles = exports.getAllFiles = void 0;
30
30
  const fs_1 = __importStar(require("fs"));
31
31
  const path_1 = __importDefault(require("path"));
32
32
  const log_1 = require("./log");
@@ -141,4 +141,14 @@ function readLineByLineSync(filePath, onLine) {
141
141
  }
142
142
  }
143
143
  exports.readLineByLineSync = readLineByLineSync;
144
+ /**
145
+ * Chops off the last part of the given directory path after a path separator, essentially returning the path's parent directory.
146
+ * If an absolute path is passed, the returned path is also absolute.
147
+ * @param directory - The directory whose parent to return
148
+ */
149
+ function getParentDirectory(directory) {
150
+ // apparently this is somehow the best way to do it in node, what
151
+ return directory.split(path_1.default.sep).slice(0, -1).join(path_1.default.sep);
152
+ }
153
+ exports.getParentDirectory = getParentDirectory;
144
154
  //# sourceMappingURL=files.js.map
package/util/log.js CHANGED
@@ -34,6 +34,9 @@ class FlowrLogger extends tslog_1.Logger {
34
34
  exports.FlowrLogger = FlowrLogger;
35
35
  function getActiveLog() {
36
36
  return new FlowrLogger({
37
+ // set the default minimum level as Warn, and let all apps
38
+ // (like the REPL) update it to whatever they want it to be
39
+ minLevel: 4 /* LogLevel.Warn */,
37
40
  type: 'pretty',
38
41
  name: 'main',
39
42
  stylePrettyLogs: true,
@@ -1,2 +1,3 @@
1
1
  /// <reference types="node" />
2
- export declare function processNestMeasurement(line: Buffer, counter: number, summarizedText: string, outputPath: string): Promise<void>;
2
+ export declare function processRunMeasurement(line: Buffer, fileNum: number, lineNum: number, summarizedText: string, outputPath: string): Promise<void>;
3
+ export declare function processSummarizedFileMeasurement(file: string, summariesFile: string, outputPath: string): void;
@@ -3,18 +3,22 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.processNestMeasurement = void 0;
6
+ exports.processSummarizedFileMeasurement = exports.processRunMeasurement = void 0;
7
7
  const benchmark_1 = require("../../../../benchmark");
8
8
  const assert_1 = require("../../../assert");
9
9
  const statistics_1 = require("../../../../statistics");
10
10
  const fs_1 = __importDefault(require("fs"));
11
11
  const json_1 = require("../../../json");
12
- async function processNestMeasurement(line, counter, summarizedText, outputPath) {
12
+ const files_1 = require("../../../files");
13
+ const process_1 = require("../second-phase/process");
14
+ async function processRunMeasurement(line, fileNum, lineNum, summarizedText, outputPath) {
13
15
  let got = JSON.parse(line.toString());
14
- console.log(`[${counter}] Summarize for ${got.filename}`);
16
+ console.log(`[file ${fileNum}, line ${lineNum}] Summarize for ${got.filename}`);
15
17
  // now we have to recover the maps and bigints :C
16
18
  got = {
17
19
  filename: got.filename,
20
+ 'file-id': got['file-id'],
21
+ 'run-num': got['run-num'],
18
22
  stats: {
19
23
  input: got.stats.input,
20
24
  request: got.stats.request,
@@ -37,12 +41,24 @@ async function processNestMeasurement(line, counter, summarizedText, outputPath)
37
41
  console.log(` - Append raw summary to ${outputPath}`);
38
42
  fs_1.default.appendFileSync(outputPath, `${JSON.stringify({
39
43
  filename: got.filename,
44
+ 'file-id': got['file-id'],
45
+ 'run-num': got['run-num'],
40
46
  summarize: summarized
41
47
  }, json_1.jsonReplacer)}\n`);
42
48
  console.log(` - Append textual summary to ${summarizedText}`);
43
49
  fs_1.default.appendFileSync(summarizedText, `${(0, benchmark_1.stats2string)(summarized)}\n`);
44
50
  }
45
- exports.processNestMeasurement = processNestMeasurement;
51
+ exports.processRunMeasurement = processRunMeasurement;
52
+ function processSummarizedFileMeasurement(file, summariesFile, outputPath) {
53
+ console.log(`Summarize all runs for ${file}`);
54
+ const summaries = [];
55
+ (0, files_1.readLineByLineSync)(summariesFile, l => (0, process_1.processNextSummary)(l, summaries));
56
+ fs_1.default.appendFileSync(outputPath, `${JSON.stringify({
57
+ filename: file,
58
+ summarize: (0, process_1.summarizeAllSummarizedStats)(summaries)
59
+ }, json_1.jsonReplacer)}\n`);
60
+ }
61
+ exports.processSummarizedFileMeasurement = processSummarizedFileMeasurement;
46
62
  function mapPerSliceStats(k, v) {
47
63
  return [k, {
48
64
  reconstructedCode: v.reconstructedCode,
@@ -1,4 +1,4 @@
1
- import type { SummarizedMeasurement, SummarizedSlicerStats } from '../data';
1
+ import type { Reduction, SummarizedMeasurement, SummarizedSlicerStats } from '../data';
2
2
  import type { PerSliceStats, SlicerStats } from '../../../../benchmark';
3
3
  import type { SlicingCriteria } from '../../../../slicing';
4
4
  /**
@@ -8,3 +8,4 @@ import type { SlicingCriteria } from '../../../../slicing';
8
8
  export declare function summarizeSlicerStats(stats: SlicerStats, report?: (criteria: SlicingCriteria, stats: PerSliceStats) => void): Promise<Readonly<SummarizedSlicerStats>>;
9
9
  export declare function summarizeMeasurement(data: number[], totalNumberOfDataPoints?: number): SummarizedMeasurement;
10
10
  export declare function summarizeSummarizedMeasurement(data: SummarizedMeasurement[]): SummarizedMeasurement;
11
+ export declare function summarizeReductions(reductions: Reduction<SummarizedMeasurement>[]): Reduction<SummarizedMeasurement>;
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
26
26
  return (mod && mod.__esModule) ? mod : { "default": mod };
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.summarizeSummarizedMeasurement = exports.summarizeMeasurement = exports.summarizeSlicerStats = void 0;
29
+ exports.summarizeReductions = exports.summarizeSummarizedMeasurement = exports.summarizeMeasurement = exports.summarizeSlicerStats = void 0;
30
30
  const tmp = __importStar(require("tmp"));
31
31
  const assert_1 = require("../../../assert");
32
32
  const log_1 = require("../../../log");
@@ -84,7 +84,6 @@ async function summarizeSlicerStats(stats, report = () => {
84
84
  const collect = new defaultmap_1.DefaultMap(() => []);
85
85
  const sizeOfSliceCriteria = [];
86
86
  const reParseShellSession = new r_bridge_1.RShell();
87
- reParseShellSession.tryToInjectHomeLibPath();
88
87
  const reductions = [];
89
88
  let failedOutputs = 0;
90
89
  const sliceSize = {
@@ -96,7 +95,6 @@ async function summarizeSlicerStats(stats, report = () => {
96
95
  normalizedTokens: [],
97
96
  dataflowNodes: []
98
97
  };
99
- let first = true;
100
98
  let timesHitThreshold = 0;
101
99
  for (const [criteria, perSliceStat] of perSliceStats) {
102
100
  report(criteria, perSliceStat);
@@ -116,8 +114,7 @@ async function summarizeSlicerStats(stats, report = () => {
116
114
  try {
117
115
  // there seem to be encoding issues, therefore, we dump to a temp file
118
116
  fs_1.default.writeFileSync(tempfile().name, output);
119
- const reParsed = await (0, r_bridge_1.retrieveNormalizedAstFromRCode)({ request: 'file', content: tempfile().name, ensurePackageInstalled: first }, reParseShellSession);
120
- first = false;
117
+ const reParsed = await (0, r_bridge_1.retrieveNormalizedAstFromRCode)({ request: 'file', content: tempfile().name }, reParseShellSession);
121
118
  let numberOfNormalizedTokens = 0;
122
119
  (0, r_bridge_1.visitAst)(reParsed.ast, _ => {
123
120
  numberOfNormalizedTokens++;
@@ -205,4 +202,16 @@ function summarizeSummarizedMeasurement(data) {
205
202
  return { min, max, median, mean, std, total };
206
203
  }
207
204
  exports.summarizeSummarizedMeasurement = summarizeSummarizedMeasurement;
205
+ function summarizeReductions(reductions) {
206
+ return {
207
+ numberOfDataflowNodes: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfDataflowNodes)),
208
+ numberOfLines: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfLines)),
209
+ numberOfCharacters: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfCharacters)),
210
+ numberOfNonWhitespaceCharacters: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfNonWhitespaceCharacters)),
211
+ numberOfLinesNoAutoSelection: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfLinesNoAutoSelection)),
212
+ numberOfNormalizedTokens: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfNormalizedTokens)),
213
+ numberOfRTokens: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfRTokens))
214
+ };
215
+ }
216
+ exports.summarizeReductions = summarizeReductions;
208
217
  //# sourceMappingURL=process.js.map
@@ -11,7 +11,7 @@ function writeGraphOutput(ultimate, outputGraphPath) {
11
11
  const data = [];
12
12
  for (const { name, measurements } of [{ name: 'per-file', measurements: ultimate.commonMeasurements }, { name: 'per-slice', measurements: ultimate.perSliceMeasurements }]) {
13
13
  for (const [point, measurement] of measurements) {
14
- if (point === 'close R session' || point === 'initialize R session' || point === 'inject home path' || point === 'ensure installation of xmlparsedata' || point === 'retrieve token map') {
14
+ if (point === 'close R session' || point === 'initialize R session') {
15
15
  continue;
16
16
  }
17
17
  const pointName = point === 'total' ? `total ${name}` : point;
@@ -1,4 +1,6 @@
1
1
  /// <reference types="node" />
2
2
  import type { SummarizedSlicerStats, UltimateSlicerStats } from '../data';
3
3
  export declare function summarizeAllSummarizedStats(stats: SummarizedSlicerStats[]): UltimateSlicerStats;
4
+ export declare function summarizeAllUltimateStats(stats: UltimateSlicerStats[]): UltimateSlicerStats;
4
5
  export declare function processNextSummary(line: Buffer, allSummarized: SummarizedSlicerStats[]): void;
6
+ export declare function processNextUltimateSummary(line: Buffer, allSummarized: UltimateSlicerStats[]): void;
@@ -1,9 +1,10 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.processNextSummary = exports.summarizeAllSummarizedStats = void 0;
3
+ exports.processNextUltimateSummary = exports.processNextSummary = exports.summarizeAllUltimateStats = exports.summarizeAllSummarizedStats = void 0;
4
4
  const defaultmap_1 = require("../../../defaultmap");
5
5
  const benchmark_1 = require("../../../../benchmark");
6
6
  const assert_1 = require("../../../assert");
7
+ const process_1 = require("../first-phase/process");
7
8
  function summarizeAllSummarizedStats(stats) {
8
9
  const commonMeasurements = new defaultmap_1.DefaultMap(() => []);
9
10
  const perSliceMeasurements = new defaultmap_1.DefaultMap(() => []);
@@ -34,15 +35,7 @@ function summarizeAllSummarizedStats(stats) {
34
35
  perSliceMeasurements: new Map([...perSliceMeasurements.entries()].map(([k, v]) => [k, (0, benchmark_1.summarizeSummarizedMeasurement)(v)])),
35
36
  failedToRepParse,
36
37
  timesHitThreshold,
37
- reduction: {
38
- numberOfDataflowNodes: (0, benchmark_1.summarizeSummarizedMeasurement)(reductions.map(r => r.numberOfDataflowNodes)),
39
- numberOfLines: (0, benchmark_1.summarizeSummarizedMeasurement)(reductions.map(r => r.numberOfLines)),
40
- numberOfCharacters: (0, benchmark_1.summarizeSummarizedMeasurement)(reductions.map(r => r.numberOfCharacters)),
41
- numberOfNonWhitespaceCharacters: (0, benchmark_1.summarizeSummarizedMeasurement)(reductions.map(r => r.numberOfNonWhitespaceCharacters)),
42
- numberOfLinesNoAutoSelection: (0, benchmark_1.summarizeSummarizedMeasurement)(reductions.map(r => r.numberOfLinesNoAutoSelection)),
43
- numberOfNormalizedTokens: (0, benchmark_1.summarizeSummarizedMeasurement)(reductions.map(r => r.numberOfNormalizedTokens)),
44
- numberOfRTokens: (0, benchmark_1.summarizeSummarizedMeasurement)(reductions.map(r => r.numberOfRTokens))
45
- },
38
+ reduction: (0, process_1.summarizeReductions)(reductions),
46
39
  input: {
47
40
  numberOfLines: (0, benchmark_1.summarizeMeasurement)(inputs.map(i => i.numberOfLines)),
48
41
  numberOfCharacters: (0, benchmark_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharacters)),
@@ -59,10 +52,36 @@ function summarizeAllSummarizedStats(stats) {
59
52
  };
60
53
  }
61
54
  exports.summarizeAllSummarizedStats = summarizeAllSummarizedStats;
55
+ function summarizeAllUltimateStats(stats) {
56
+ return {
57
+ // these should be deterministic, so we don't technically need to use max, but we do just in case something unexpected happens :)
58
+ totalRequests: Math.max(...stats.map(s => s.totalRequests)),
59
+ totalSlices: Math.max(...stats.map(s => s.totalSlices)),
60
+ failedToRepParse: Math.max(...stats.map(s => s.failedToRepParse)),
61
+ timesHitThreshold: Math.max(...stats.map(s => s.timesHitThreshold)),
62
+ // average out / summarize other measurements
63
+ commonMeasurements: new Map(benchmark_1.CommonSlicerMeasurements.map(m => [m, (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.commonMeasurements.get(m)))])),
64
+ perSliceMeasurements: new Map(benchmark_1.PerSliceMeasurements.map(m => [m, (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.perSliceMeasurements.get(m)))])),
65
+ reduction: (0, process_1.summarizeReductions)(stats.map(s => s.reduction)),
66
+ input: {
67
+ numberOfLines: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfLines)),
68
+ numberOfCharacters: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharacters)),
69
+ numberOfNonWhitespaceCharacters: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharacters)),
70
+ numberOfRTokens: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokens)),
71
+ numberOfNormalizedTokens: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokens))
72
+ },
73
+ dataflow: {
74
+ numberOfNodes: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfNodes)),
75
+ numberOfFunctionDefinitions: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfFunctionDefinitions)),
76
+ numberOfCalls: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfCalls)),
77
+ numberOfEdges: (0, benchmark_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfEdges))
78
+ }
79
+ };
80
+ }
81
+ exports.summarizeAllUltimateStats = summarizeAllUltimateStats;
62
82
  function processNextSummary(line, allSummarized) {
63
83
  let got = JSON.parse(line.toString());
64
84
  got = {
65
- filename: got.filename,
66
85
  summarize: {
67
86
  input: got.summarize.input,
68
87
  request: got.summarize.request,
@@ -86,4 +105,22 @@ function processNextSummary(line, allSummarized) {
86
105
  allSummarized.push(got.summarize);
87
106
  }
88
107
  exports.processNextSummary = processNextSummary;
108
+ function processNextUltimateSummary(line, allSummarized) {
109
+ let got = JSON.parse(line.toString());
110
+ got = {
111
+ summarize: {
112
+ totalRequests: got.summarize.totalRequests,
113
+ totalSlices: got.summarize.totalSlices,
114
+ commonMeasurements: new Map(got.summarize.commonMeasurements),
115
+ perSliceMeasurements: new Map(got.summarize.perSliceMeasurements),
116
+ failedToRepParse: got.summarize.failedToRepParse,
117
+ timesHitThreshold: got.summarize.timesHitThreshold,
118
+ reduction: got.summarize.reduction,
119
+ input: got.summarize.input,
120
+ dataflow: got.summarize.dataflow,
121
+ }
122
+ };
123
+ allSummarized.push(got.summarize);
124
+ }
125
+ exports.processNextUltimateSummary = processNextUltimateSummary;
89
126
  //# sourceMappingURL=process.js.map
@@ -18,10 +18,6 @@ export interface BenchmarkSummarizerConfiguration extends CommonSummarizerConfig
18
18
  * Path for the intermediate results of the preparation phase
19
19
  */
20
20
  intermediateOutputPath: string;
21
- /**
22
- * Path for the final results of the summarization phase
23
- */
24
- outputLogPath?: string;
25
21
  /**
26
22
  * Path for the final results of the summarization phase
27
23
  */
@@ -12,24 +12,33 @@ const benchmark_1 = require("../../../benchmark");
12
12
  const process_1 = require("./second-phase/process");
13
13
  const graph_1 = require("./second-phase/graph");
14
14
  const files_1 = require("../../files");
15
+ const path_1 = __importDefault(require("path"));
15
16
  class BenchmarkSummarizer extends summarizer_1.Summarizer {
16
17
  constructor(config) {
17
18
  super(config);
18
19
  }
19
20
  async preparationPhase() {
21
+ this.removeIfExists(`${this.config.intermediateOutputPath}.json`);
20
22
  this.removeIfExists(this.config.intermediateOutputPath);
21
- this.removeIfExists(this.config.outputLogPath);
22
- await (0, files_1.readLineByLine)(this.config.inputPath, (line, lineNumber) => (0, input_1.processNestMeasurement)(line, lineNumber, `${this.config.intermediateOutputPath}.log`, this.config.intermediateOutputPath));
23
+ fs_1.default.mkdirSync(this.config.intermediateOutputPath);
24
+ const dirContent = fs_1.default.readdirSync(this.config.inputPath);
25
+ for (let i = 0; i < dirContent.length; i++) {
26
+ const filePath = path_1.default.join(this.config.inputPath, dirContent[i]);
27
+ const outputPath = path_1.default.join(this.config.intermediateOutputPath, dirContent[i]);
28
+ // generate measurements for each run
29
+ await (0, files_1.readLineByLine)(filePath, (line, lineNumber) => (0, input_1.processRunMeasurement)(line, i, lineNumber, `${outputPath}.log`, outputPath));
30
+ // generate combined measurements for the file
31
+ (0, input_1.processSummarizedFileMeasurement)(filePath, outputPath, `${this.config.intermediateOutputPath}.json`);
32
+ }
23
33
  this.log('Done summarizing');
24
34
  }
25
35
  // eslint-disable-next-line @typescript-eslint/require-await -- just to obey the structure
26
36
  async summarizePhase() {
27
37
  this.log(`Summarizing all summaries from ${this.config.inputPath}...`);
28
38
  this.removeIfExists(this.config.outputPath);
29
- const allSummarized = [];
30
- (0, files_1.readLineByLineSync)(this.config.intermediateOutputPath, line => (0, process_1.processNextSummary)(line, allSummarized));
31
- // summarizedRaw
32
- const ultimate = (0, process_1.summarizeAllSummarizedStats)(allSummarized);
39
+ const summaries = [];
40
+ (0, files_1.readLineByLineSync)(`${this.config.intermediateOutputPath}.json`, (l) => (0, process_1.processNextUltimateSummary)(l, summaries));
41
+ const ultimate = (0, process_1.summarizeAllUltimateStats)(summaries);
33
42
  this.log(`Writing ultimate summary to ${this.config.outputPath}`);
34
43
  fs_1.default.writeFileSync(this.config.outputPath, JSON.stringify(ultimate, json_1.jsonReplacer));
35
44
  console.log((0, benchmark_1.ultimateStats2String)(ultimate));
@@ -41,7 +50,7 @@ class BenchmarkSummarizer extends summarizer_1.Summarizer {
41
50
  removeIfExists(path) {
42
51
  if (path && fs_1.default.existsSync(path)) {
43
52
  this.log(`Removing existing ${path}`);
44
- fs_1.default.unlinkSync(path);
53
+ fs_1.default.rmSync(path, { recursive: true });
45
54
  }
46
55
  }
47
56
  }
@@ -4,26 +4,26 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.FileMigrator = void 0;
7
- const node_fs_1 = __importDefault(require("node:fs"));
8
- const node_path_1 = __importDefault(require("node:path"));
7
+ const fs_1 = __importDefault(require("fs"));
8
+ const path_1 = __importDefault(require("path"));
9
9
  const assert_1 = require("../../../assert");
10
10
  class FileMigrator {
11
11
  writeHandles = new Map();
12
12
  finished = false;
13
13
  async migrate(sourceFolderContent, targetFolder, originalFile) {
14
14
  (0, assert_1.guard)(!this.finished, () => 'migrator is already marked as finished!');
15
- if (!node_fs_1.default.existsSync(targetFolder)) {
16
- node_fs_1.default.mkdirSync(targetFolder, { recursive: true });
15
+ if (!fs_1.default.existsSync(targetFolder)) {
16
+ fs_1.default.mkdirSync(targetFolder, { recursive: true });
17
17
  }
18
18
  const promises = [];
19
19
  for (const [filepath, content] of sourceFolderContent.entries()) {
20
- const target = node_path_1.default.join(targetFolder, filepath);
20
+ const target = path_1.default.join(targetFolder, filepath);
21
21
  let targetStream = this.writeHandles.get(target);
22
22
  if (targetStream === undefined) {
23
- if (!node_fs_1.default.existsSync(node_path_1.default.dirname(target))) {
24
- node_fs_1.default.mkdirSync(node_path_1.default.dirname(target), { recursive: true });
23
+ if (!fs_1.default.existsSync(path_1.default.dirname(target))) {
24
+ fs_1.default.mkdirSync(path_1.default.dirname(target), { recursive: true });
25
25
  }
26
- targetStream = node_fs_1.default.createWriteStream(target, { flags: 'a' });
26
+ targetStream = fs_1.default.createWriteStream(target, { flags: 'a' });
27
27
  this.writeHandles.set(target, targetStream);
28
28
  }
29
29
  // before we write said content we have to group {value: string, context: string} by context (while we can safely assume that there is only one context per file,
@@ -9,7 +9,6 @@ const path_1 = __importDefault(require("path"));
9
9
  const log_1 = require("../../../log");
10
10
  const fs_1 = __importDefault(require("fs"));
11
11
  const clusterer_1 = require("./clusterer");
12
- const statistics_2 = require("../../../../statistics");
13
12
  const r_bridge_1 = require("../../../../r-bridge");
14
13
  const defaultmap_1 = require("../../../defaultmap");
15
14
  /**
@@ -61,7 +60,7 @@ function processFeatureFolder(filepath, feature) {
61
60
  return reports;
62
61
  }
63
62
  function processFeatureSubKey(featurePath, subKey, contextIdMap) {
64
- const targetPath = path_1.default.join(featurePath, `${subKey}${statistics_2.defaultStatisticsFileSuffix}`);
63
+ const targetPath = path_1.default.join(featurePath, `${subKey}${statistics_1.defaultStatisticsFileSuffix}`);
65
64
  if (!fs_1.default.existsSync(targetPath)) {
66
65
  log_1.log.warn(`Folder for ${subKey} does not exist at ${targetPath} skipping post processing of this key`);
67
66
  return undefined;
@@ -93,9 +92,9 @@ function printClusterReport(report, limit = 1000) {
93
92
  const strId = `${name}`.padEnd(longestName, ' ');
94
93
  const strCount = count.toLocaleString().padStart(longestCount, ' ');
95
94
  const strUnique = unique.toLocaleString().padStart(longestUnique, ' ');
96
- const uniqueSuffix = `\t (${strUnique} ${statistics_2.formatter.format('unique', { color: 7 /* Colors.White */, effect: statistics_2.ColorEffect.Foreground })})`;
97
- console.log(`\t${statistics_2.formatter.format(strId, { style: 1 /* FontStyles.Bold */ })}\t ${strCount} ` +
98
- `${statistics_2.formatter.format('total', { color: 7 /* Colors.White */, effect: statistics_2.ColorEffect.Foreground })}`
95
+ const uniqueSuffix = `\t (${strUnique} ${statistics_1.formatter.format('unique', { color: 7 /* Colors.White */, effect: statistics_1.ColorEffect.Foreground })})`;
96
+ console.log(`\t${statistics_1.formatter.format(strId, { style: 1 /* FontStyles.Bold */ })}\t ${strCount} ` +
97
+ `${statistics_1.formatter.format('total', { color: 7 /* Colors.White */, effect: statistics_1.ColorEffect.Foreground })}`
99
98
  + (count !== unique ? uniqueSuffix : ''));
100
99
  }
101
100
  return shortStats.map(({ name }) => name);
@@ -1,25 +0,0 @@
1
- import type { MergeableRecord } from '../../../../../util/objects';
2
- import type { TokenMap } from '../../../../retriever';
3
- /**
4
- * Configures the settings for the xml parser (like what names should be used to represent the given objects,
5
- * and what replacements are active with `xmlparsedata` on the R-side of things).
6
- *
7
- * @see DEFAULT_XML_PARSER_CONFIG
8
- */
9
- export interface XmlParserConfig extends MergeableRecord {
10
- /** attributes (line, col, ...) are referenced by the given name */
11
- attributeName: string;
12
- /** the content (lexeme) is referenced by the given name */
13
- contentName: string;
14
- /** the children are referenced by the given name */
15
- childrenName: string;
16
- /** Mapping from xml tag name to the real operation of the node */
17
- tokenMap: TokenMap;
18
- }
19
- /**
20
- * Default configuration to be used for the {@link XmlParserConfig}.
21
- * <p>
22
- * Note, that this does not include a sensible {@link XmlParserConfig#tokenMap}, as this must be provided by the corresponding
23
- * shell environment.
24
- */
25
- export declare const DEFAULT_XML_PARSER_CONFIG: XmlParserConfig;
@@ -1,16 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.DEFAULT_XML_PARSER_CONFIG = void 0;
4
- /**
5
- * Default configuration to be used for the {@link XmlParserConfig}.
6
- * <p>
7
- * Note, that this does not include a sensible {@link XmlParserConfig#tokenMap}, as this must be provided by the corresponding
8
- * shell environment.
9
- */
10
- exports.DEFAULT_XML_PARSER_CONFIG = {
11
- attributeName: '@a',
12
- contentName: '@v',
13
- childrenName: '@c',
14
- tokenMap: { /* this should not be used, but just so that we can omit null-checks */}
15
- };
16
- //# sourceMappingURL=config.js.map
@@ -1,9 +0,0 @@
1
- import type { XmlParserConfig } from '../config';
2
- import type { XmlBasedJson } from '../input-format';
3
- /**
4
- * Parse the xml presented by R into a json object that will be used for conversion
5
- *
6
- * @param config - The configuration to use (i.e., what names should be used for the attributes, children, ...)
7
- * @param xmlString - The xml input to parse
8
- */
9
- export declare function xlm2jsonObject(config: XmlParserConfig, xmlString: string): Promise<XmlBasedJson>;
@@ -1,51 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || function (mod) {
19
- if (mod && mod.__esModule) return mod;
20
- var result = {};
21
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
- __setModuleDefault(result, mod);
23
- return result;
24
- };
25
- Object.defineProperty(exports, "__esModule", { value: true });
26
- exports.xlm2jsonObject = void 0;
27
- const xml2js = __importStar(require("xml2js"));
28
- /**
29
- * Parse the xml presented by R into a json object that will be used for conversion
30
- *
31
- * @param config - The configuration to use (i.e., what names should be used for the attributes, children, ...)
32
- * @param xmlString - The xml input to parse
33
- */
34
- function xlm2jsonObject(config, xmlString) {
35
- return xml2js.parseStringPromise(xmlString, {
36
- attrkey: config.attributeName,
37
- charkey: config.contentName,
38
- childkey: config.childrenName,
39
- charsAsChildren: false,
40
- explicitChildren: true,
41
- mergeAttrs: false,
42
- // we need this for semicolons etc., while we keep the old broken components we ignore them completely
43
- preserveChildrenOrder: true,
44
- trim: true,
45
- includeWhiteChars: true,
46
- normalize: false,
47
- strict: true
48
- });
49
- }
50
- exports.xlm2jsonObject = xlm2jsonObject;
51
- //# sourceMappingURL=xml-to-json.js.map
@@ -1,17 +0,0 @@
1
- import type { NormalizedAst, IdGenerator, NoInfo } from '../../model';
2
- import type { XmlParserHooks } from './hooks';
3
- import type { DeepPartial } from 'ts-essentials';
4
- import type { TokenMap } from '../../../../retriever';
5
- export declare const parseLog: import("tslog").Logger<import("tslog").ILogObj>;
6
- /**
7
- * The main entry point to normalize the given R ast.
8
- * You probably want to use {@link retrieveNormalizedAstFromRCode} to directly normalize a piece of code.
9
- *
10
- * @param xmlString - The XML string obtained probably by {@link retrieveXmlFromRCode} for normalization.
11
- * @param tokenMap - The token replacement map in effect by the XML parser
12
- * @param hooks - Optional hooks to customize the normalization process (see {@link XmlParserHooks} for details)
13
- * @param getId - The function to be used to generate unique ids for the nodes of the ast. It is up to you to ensure that the ids are unique!
14
- *
15
- * @returns The normalized and decorated AST (i.e., as a doubly linked tree)
16
- */
17
- export declare function normalize(xmlString: string, tokenMap: TokenMap, hooks?: DeepPartial<XmlParserHooks>, getId?: IdGenerator<NoInfo>): Promise<NormalizedAst>;
@@ -1,30 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.normalize = exports.parseLog = void 0;
4
- const objects_1 = require("../../../../../util/objects");
5
- const log_1 = require("../../../../../util/log");
6
- const config_1 = require("./config");
7
- const internal_1 = require("./internal");
8
- const model_1 = require("../../model");
9
- const hooks_1 = require("./hooks");
10
- exports.parseLog = log_1.log.getSubLogger({ name: 'ast-parser' });
11
- /**
12
- * The main entry point to normalize the given R ast.
13
- * You probably want to use {@link retrieveNormalizedAstFromRCode} to directly normalize a piece of code.
14
- *
15
- * @param xmlString - The XML string obtained probably by {@link retrieveXmlFromRCode} for normalization.
16
- * @param tokenMap - The token replacement map in effect by the XML parser
17
- * @param hooks - Optional hooks to customize the normalization process (see {@link XmlParserHooks} for details)
18
- * @param getId - The function to be used to generate unique ids for the nodes of the ast. It is up to you to ensure that the ids are unique!
19
- *
20
- * @returns The normalized and decorated AST (i.e., as a doubly linked tree)
21
- */
22
- async function normalize(xmlString, tokenMap, hooks, getId = (0, model_1.deterministicCountingIdGenerator)(0)) {
23
- const config = { ...config_1.DEFAULT_XML_PARSER_CONFIG, tokenMap };
24
- const hooksWithDefaults = (0, objects_1.deepMergeObject)(hooks_1.DEFAULT_PARSER_HOOKS, hooks);
25
- const data = { config, hooks: hooksWithDefaults, currentRange: undefined, currentLexeme: undefined };
26
- const object = await (0, internal_1.xlm2jsonObject)(config, xmlString);
27
- return (0, model_1.decorateAst)((0, internal_1.parseRootObjToAst)(data, object), getId);
28
- }
29
- exports.normalize = normalize;
30
- //# sourceMappingURL=parser.js.map