@eagleoutice/flowr 2.0.1 → 2.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/benchmark/slicer.d.ts +1 -0
  2. package/benchmark/slicer.js +69 -8
  3. package/benchmark/stats/print.d.ts +1 -0
  4. package/benchmark/stats/print.js +94 -31
  5. package/benchmark/stats/size-of.d.ts +3 -0
  6. package/benchmark/stats/size-of.js +68 -0
  7. package/benchmark/stats/stats.d.ts +23 -0
  8. package/benchmark/summarizer/data.d.ts +24 -1
  9. package/benchmark/summarizer/first-phase/input.d.ts +2 -2
  10. package/benchmark/summarizer/first-phase/input.js +21 -21
  11. package/benchmark/summarizer/first-phase/process.d.ts +4 -2
  12. package/benchmark/summarizer/first-phase/process.js +120 -33
  13. package/benchmark/summarizer/second-phase/graph.js +7 -0
  14. package/benchmark/summarizer/second-phase/process.js +65 -27
  15. package/benchmark/summarizer/summarizer.d.ts +1 -0
  16. package/benchmark/summarizer/summarizer.js +23 -10
  17. package/cli/repl/commands/commands.js +19 -1
  18. package/cli/slicer-app.js +1 -1
  19. package/dataflow/environments/append.js +1 -2
  20. package/dataflow/environments/built-in.js +2 -1
  21. package/dataflow/environments/clone.js +1 -1
  22. package/dataflow/environments/diff.d.ts +1 -1
  23. package/dataflow/environments/diff.js +16 -18
  24. package/dataflow/environments/environment.d.ts +6 -8
  25. package/dataflow/environments/environment.js +5 -8
  26. package/dataflow/environments/identifier.d.ts +2 -1
  27. package/dataflow/environments/overwrite.js +1 -2
  28. package/dataflow/environments/scoping.js +1 -1
  29. package/dataflow/graph/diff.js +11 -6
  30. package/dataflow/graph/edge.d.ts +2 -3
  31. package/dataflow/graph/edge.js +2 -2
  32. package/dataflow/graph/graph.d.ts +6 -2
  33. package/dataflow/graph/graph.js +16 -9
  34. package/dataflow/graph/vertex.d.ts +2 -1
  35. package/dataflow/info.d.ts +10 -1
  36. package/dataflow/info.js +54 -2
  37. package/dataflow/internal/linker.d.ts +1 -1
  38. package/dataflow/internal/linker.js +1 -2
  39. package/dataflow/internal/process/functions/call/built-in/built-in-assignment.js +5 -5
  40. package/dataflow/internal/process/functions/call/built-in/built-in-for-loop.js +1 -1
  41. package/dataflow/internal/process/functions/call/built-in/built-in-function-definition.js +21 -25
  42. package/dataflow/internal/process/functions/call/built-in/built-in-get.js +6 -1
  43. package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +10 -8
  44. package/dataflow/internal/process/functions/call/built-in/built-in-logical-bin-op.d.ts +1 -0
  45. package/dataflow/internal/process/functions/call/built-in/built-in-logical-bin-op.js +1 -2
  46. package/dataflow/internal/process/functions/call/built-in/built-in-while-loop.js +1 -1
  47. package/dataflow/internal/process/functions/call/default-call-handling.js +1 -1
  48. package/dataflow/internal/process/functions/call/unnamed-call-handling.js +1 -1
  49. package/dataflow/internal/process/process-value.js +0 -1
  50. package/dataflow/processor.d.ts +2 -3
  51. package/package.json +5 -2
  52. package/r-bridge/data/data.d.ts +1 -1
  53. package/r-bridge/data/data.js +1 -1
  54. package/r-bridge/lang-4.x/ast/model/nodes/r-function-call.d.ts +2 -2
  55. package/r-bridge/lang-4.x/ast/model/operators.js +1 -1
  56. package/r-bridge/lang-4.x/ast/model/processing/decorate.js +1 -1
  57. package/r-bridge/lang-4.x/ast/model/processing/stateful-fold.js +1 -1
  58. package/r-bridge/lang-4.x/ast/model/processing/visitor.js +2 -2
  59. package/r-bridge/lang-4.x/ast/parser/xml/internal/functions/normalize-call.js +2 -2
  60. package/r-bridge/lang-4.x/ast/parser/xml/internal/operators/normalize-binary.js +1 -1
  61. package/r-bridge/retriever.d.ts +1 -1
  62. package/r-bridge/retriever.js +3 -2
  63. package/r-bridge/shell.js +2 -1
  64. package/reconstruct/reconstruct.d.ts +3 -3
  65. package/reconstruct/reconstruct.js +40 -41
  66. package/slicing/criterion/filters/all-variables.js +1 -1
  67. package/slicing/static/static-slicer.js +2 -2
  68. package/statistics/features/common-syntax-probability.js +1 -1
  69. package/statistics/features/supported/control-flow/control-flow.js +1 -1
  70. package/statistics/features/supported/defined-functions/defined-functions.js +1 -1
  71. package/statistics/features/supported/loops/loops.js +1 -1
  72. package/statistics/features/supported/used-functions/used-functions.js +1 -1
  73. package/util/assert.d.ts +1 -1
  74. package/util/mermaid/ast.js +4 -0
  75. package/util/mermaid/dfg.d.ts +0 -1
  76. package/util/mermaid/dfg.js +16 -13
  77. package/util/mermaid/mermaid.js +21 -1
  78. package/util/version.js +1 -1
@@ -3,27 +3,24 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.processSummarizedFileMeasurement = exports.processRunMeasurement = void 0;
6
+ exports.processSummarizedRunMeasurement = exports.processRunMeasurement = void 0;
7
7
  const fs_1 = __importDefault(require("fs"));
8
8
  const process_1 = require("../second-phase/process");
9
9
  const process_2 = require("./process");
10
10
  const assert_1 = require("../../../util/assert");
11
11
  const ansi_1 = require("../../../util/ansi");
12
12
  const json_1 = require("../../../util/json");
13
- const files_1 = require("../../../util/files");
14
13
  const print_1 = require("../../stats/print");
15
- async function processRunMeasurement(line, fileNum, lineNum, summarizedText, outputPath) {
14
+ async function processRunMeasurement(line, fileNum, lineNum, textOutputAppendPath, rawOutputPath) {
16
15
  let got = JSON.parse(line.toString());
17
16
  console.log(`[file ${fileNum}, line ${lineNum}] Summarize for ${got.filename}`);
18
17
  // now we have to recover the maps and bigints :C
19
18
  got = {
20
- filename: got.filename,
21
- 'file-id': got['file-id'],
22
- 'run-num': got['run-num'],
19
+ ...got,
23
20
  stats: {
24
- input: got.stats.input,
25
- request: got.stats.request,
26
- dataflow: got.stats.dataflow,
21
+ ...got.stats,
22
+ memory: new Map(got.stats.memory
23
+ .map(([k, v]) => [k, v])),
27
24
  commonMeasurements: new Map(got.stats.commonMeasurements
28
25
  .map(([k, v]) => {
29
26
  (0, assert_1.guard)(v.endsWith('n'), 'Expected a bigint');
@@ -38,28 +35,31 @@ async function processRunMeasurement(line, fileNum, lineNum, summarizedText, out
38
35
  let atSliceNumber = 0;
39
36
  const summarized = await (0, process_2.summarizeSlicerStats)(got.stats, (criterion, stats) => {
40
37
  console.log(`${ansi_1.escape}1F${ansi_1.escape}1G${ansi_1.escape}2K [${++atSliceNumber}/${totalSlices}] Summarizing ${JSON.stringify(criterion)} (reconstructed has ${stats.reconstructedCode.code.length} characters)`);
38
+ if (stats.reconstructedCode.code.length < 50) {
39
+ console.log(`Reconstructed code: ${stats.reconstructedCode.code}`);
40
+ }
41
41
  });
42
- console.log(` - Append raw summary to ${outputPath}`);
43
- fs_1.default.appendFileSync(outputPath, `${JSON.stringify({
42
+ console.log(` - Write raw summary to ${rawOutputPath}`);
43
+ fs_1.default.writeFileSync(rawOutputPath, `${JSON.stringify({
44
44
  filename: got.filename,
45
45
  'file-id': got['file-id'],
46
46
  'run-num': got['run-num'],
47
47
  summarize: summarized
48
48
  }, json_1.jsonReplacer)}\n`);
49
- console.log(` - Append textual summary to ${summarizedText}`);
50
- fs_1.default.appendFileSync(summarizedText, `${(0, print_1.stats2string)(summarized)}\n`);
49
+ console.log(` - Append textual summary to ${textOutputAppendPath}`);
50
+ fs_1.default.appendFileSync(textOutputAppendPath, `${(0, print_1.stats2string)(summarized)}\n`);
51
51
  }
52
52
  exports.processRunMeasurement = processRunMeasurement;
53
- function processSummarizedFileMeasurement(file, summariesFile, outputPath) {
54
- console.log(`Summarize all runs for ${file}`);
53
+ function processSummarizedRunMeasurement(runNum, summarizedFiles, appendPath) {
54
+ console.log(`Summarizing all file statistics for run ${runNum}`);
55
55
  const summaries = [];
56
- (0, files_1.readLineByLineSync)(summariesFile, l => (0, process_1.processNextSummary)(l, summaries));
57
- fs_1.default.appendFileSync(outputPath, `${JSON.stringify({
58
- filename: file,
59
- summarize: (0, process_1.summarizeAllSummarizedStats)(summaries)
60
- }, json_1.jsonReplacer)}\n`);
56
+ for (const file of summarizedFiles) {
57
+ (0, process_1.processNextSummary)(fs_1.default.readFileSync(file), summaries);
58
+ }
59
+ fs_1.default.appendFileSync(appendPath, `${JSON.stringify((0, process_1.summarizeAllSummarizedStats)(summaries), json_1.jsonReplacer)}\n`);
60
+ console.log(`Appended summary of run ${runNum} to ${appendPath}`);
61
61
  }
62
- exports.processSummarizedFileMeasurement = processSummarizedFileMeasurement;
62
+ exports.processSummarizedRunMeasurement = processSummarizedRunMeasurement;
63
63
  function mapPerSliceStats(k, v) {
64
64
  return [k, {
65
65
  reconstructedCode: v.reconstructedCode,
@@ -1,4 +1,4 @@
1
- import type { Reduction, SummarizedSlicerStats } from '../data';
1
+ import type { Reduction, SummarizedSlicerStats, TimePerToken } from '../data';
2
2
  import type { SummarizedMeasurement } from '../../../util/summarizer';
3
3
  import type { PerSliceStats, SlicerStats } from '../../stats/stats';
4
4
  import type { SlicingCriteria } from '../../../slicing/criterion/parse';
@@ -8,4 +8,6 @@ import type { SlicingCriteria } from '../../../slicing/criterion/parse';
8
8
  */
9
9
  export declare function summarizeSlicerStats(stats: SlicerStats, report?: (criteria: SlicingCriteria, stats: PerSliceStats) => void): Promise<Readonly<SummarizedSlicerStats>>;
10
10
  export declare function summarizeSummarizedMeasurement(data: SummarizedMeasurement[]): SummarizedMeasurement;
11
- export declare function summarizeReductions(reductions: Reduction<SummarizedMeasurement>[]): Reduction<SummarizedMeasurement>;
11
+ export declare function summarizeSummarizedReductions(reductions: Reduction<SummarizedMeasurement>[]): Reduction<SummarizedMeasurement>;
12
+ export declare function summarizeSummarizedTimePerToken(times: TimePerToken[]): TimePerToken;
13
+ export declare function summarizeTimePerToken(times: TimePerToken<number>[]): TimePerToken;
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
26
26
  return (mod && mod.__esModule) ? mod : { "default": mod };
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.summarizeReductions = exports.summarizeSummarizedMeasurement = exports.summarizeSlicerStats = void 0;
29
+ exports.summarizeTimePerToken = exports.summarizeSummarizedTimePerToken = exports.summarizeSummarizedReductions = exports.summarizeSummarizedMeasurement = exports.summarizeSlicerStats = void 0;
30
30
  const tmp = __importStar(require("tmp"));
31
31
  const fs_1 = __importDefault(require("fs"));
32
32
  const defaultmap_1 = require("../../../util/defaultmap");
@@ -65,14 +65,24 @@ function safeDivPercentage(a, b) {
65
65
  }
66
66
  }
67
67
  }
68
- function calculateReductionForSlice(input, dataflow, perSlice) {
68
+ function calculateReductionForSlice(input, dataflow, perSlice, ignoreFluff) {
69
+ const perSliceLines = ignoreFluff ? perSlice.nonEmptyLines : perSlice.lines;
70
+ const inputLines = ignoreFluff ? input.numberOfNonEmptyLines : input.numberOfLines;
69
71
  return {
70
- numberOfLines: safeDivPercentage(perSlice.lines, input.numberOfLines),
71
- numberOfLinesNoAutoSelection: safeDivPercentage(perSlice.lines - perSlice.autoSelected, input.numberOfLines),
72
- numberOfCharacters: safeDivPercentage(perSlice.characters, input.numberOfCharacters),
73
- numberOfNonWhitespaceCharacters: safeDivPercentage(perSlice.nonWhitespaceCharacters, input.numberOfNonWhitespaceCharacters),
74
- numberOfRTokens: safeDivPercentage(perSlice.tokens, input.numberOfRTokens),
75
- numberOfNormalizedTokens: safeDivPercentage(perSlice.normalizedTokens, input.numberOfNormalizedTokens),
72
+ numberOfLines: safeDivPercentage(perSliceLines, inputLines),
73
+ numberOfLinesNoAutoSelection: safeDivPercentage(perSliceLines - perSlice.linesWithAutoSelected, inputLines),
74
+ numberOfCharacters: ignoreFluff ?
75
+ safeDivPercentage(perSlice.charactersNoComments, input.numberOfCharactersNoComments) :
76
+ safeDivPercentage(perSlice.characters, input.numberOfCharacters),
77
+ numberOfNonWhitespaceCharacters: ignoreFluff ?
78
+ safeDivPercentage(perSlice.nonWhitespaceCharactersNoComments, input.numberOfNonWhitespaceCharactersNoComments) :
79
+ safeDivPercentage(perSlice.nonWhitespaceCharacters, input.numberOfNonWhitespaceCharacters),
80
+ numberOfRTokens: ignoreFluff ?
81
+ safeDivPercentage(perSlice.tokensNoComments, input.numberOfRTokensNoComments) :
82
+ safeDivPercentage(perSlice.tokens, input.numberOfRTokens),
83
+ numberOfNormalizedTokens: ignoreFluff ?
84
+ safeDivPercentage(perSlice.normalizedTokensNoComments, input.numberOfNormalizedTokensNoComments) :
85
+ safeDivPercentage(perSlice.normalizedTokens, input.numberOfNormalizedTokens),
76
86
  numberOfDataflowNodes: safeDivPercentage(perSlice.dataflowNodes, dataflow.numberOfNodes)
77
87
  };
78
88
  }
@@ -82,33 +92,44 @@ function calculateReductionForSlice(input, dataflow, perSlice) {
82
92
  */
83
93
  async function summarizeSlicerStats(stats, report = () => {
84
94
  }) {
85
- const perSliceStats = stats.perSliceMeasurements;
86
95
  const collect = new defaultmap_1.DefaultMap(() => []);
87
96
  const sizeOfSliceCriteria = [];
88
97
  const reParseShellSession = new shell_1.RShell();
98
+ const sliceTimes = [];
99
+ const reconstructTimes = [];
100
+ const totalTimes = [];
89
101
  const reductions = [];
102
+ const reductionsNoFluff = [];
90
103
  let failedOutputs = 0;
91
104
  const sliceSize = {
92
105
  lines: [],
93
- autoSelected: [],
106
+ nonEmptyLines: [],
107
+ linesWithAutoSelected: [],
94
108
  characters: [],
109
+ charactersNoComments: [],
95
110
  nonWhitespaceCharacters: [],
111
+ nonWhitespaceCharactersNoComments: [],
96
112
  tokens: [],
113
+ tokensNoComments: [],
97
114
  normalizedTokens: [],
115
+ normalizedTokensNoComments: [],
98
116
  dataflowNodes: []
99
117
  };
100
118
  let timesHitThreshold = 0;
101
- for (const [criteria, perSliceStat] of perSliceStats) {
119
+ for (const [criteria, perSliceStat] of stats.perSliceMeasurements) {
102
120
  report(criteria, perSliceStat);
103
121
  for (const measure of perSliceStat.measurements) {
104
122
  collect.get(measure[0]).push(Number(measure[1]));
105
123
  }
106
124
  sizeOfSliceCriteria.push(perSliceStat.slicingCriteria.length);
107
125
  timesHitThreshold += perSliceStat.timesHitThreshold > 0 ? 1 : 0;
108
- const { code: output, autoSelected } = perSliceStat.reconstructedCode;
109
- sliceSize.autoSelected.push(autoSelected);
110
- const lines = output.split('\n').length;
126
+ const { code: output, linesWithAutoSelected } = perSliceStat.reconstructedCode;
127
+ sliceSize.linesWithAutoSelected.push(linesWithAutoSelected);
128
+ const split = output.split('\n');
129
+ const lines = split.length;
130
+ const nonEmptyLines = split.filter(l => l.trim().length > 0).length;
111
131
  sliceSize.lines.push(lines);
132
+ sliceSize.nonEmptyLines.push(nonEmptyLines);
112
133
  sliceSize.characters.push(output.length);
113
134
  const nonWhitespace = (0, strings_1.withoutWhitespace)(output).length;
114
135
  sliceSize.nonWhitespaceCharacters.push(nonWhitespace);
@@ -118,22 +139,60 @@ async function summarizeSlicerStats(stats, report = () => {
118
139
  fs_1.default.writeFileSync(tempfile().name, output);
119
140
  const reParsed = await (0, retriever_1.retrieveNormalizedAstFromRCode)({ request: 'file', content: tempfile().name }, reParseShellSession);
120
141
  let numberOfNormalizedTokens = 0;
121
- (0, visitor_1.visitAst)(reParsed.ast, _ => {
142
+ let numberOfNormalizedTokensNoComments = 0;
143
+ let commentChars = 0;
144
+ let commentCharsNoWhitespace = 0;
145
+ (0, visitor_1.visitAst)(reParsed.ast, t => {
122
146
  numberOfNormalizedTokens++;
147
+ const comments = t.info.additionalTokens?.filter(t => t.type === "RComment" /* RType.Comment */);
148
+ if (comments && comments.length > 0) {
149
+ const content = comments.map(c => c.lexeme ?? '').join('');
150
+ commentChars += content.length;
151
+ commentCharsNoWhitespace += (0, strings_1.withoutWhitespace)(content).length;
152
+ }
153
+ else {
154
+ numberOfNormalizedTokensNoComments++;
155
+ }
123
156
  return false;
124
157
  });
125
158
  sliceSize.normalizedTokens.push(numberOfNormalizedTokens);
159
+ sliceSize.normalizedTokensNoComments.push(numberOfNormalizedTokensNoComments);
160
+ sliceSize.charactersNoComments.push(output.length - commentChars);
161
+ sliceSize.nonWhitespaceCharactersNoComments.push(nonWhitespace - commentCharsNoWhitespace);
126
162
  const numberOfRTokens = await (0, retriever_1.retrieveNumberOfRTokensOfLastParse)(reParseShellSession);
127
163
  sliceSize.tokens.push(numberOfRTokens);
128
- reductions.push(calculateReductionForSlice(stats.input, stats.dataflow, {
164
+ const numberOfRTokensNoComments = await (0, retriever_1.retrieveNumberOfRTokensOfLastParse)(reParseShellSession, true);
165
+ sliceSize.tokensNoComments.push(numberOfRTokensNoComments);
166
+ const perSlice = {
129
167
  lines: lines,
168
+ nonEmptyLines: nonEmptyLines,
130
169
  characters: output.length,
170
+ charactersNoComments: output.length - commentChars,
131
171
  nonWhitespaceCharacters: nonWhitespace,
132
- autoSelected: autoSelected,
172
+ nonWhitespaceCharactersNoComments: nonWhitespace - commentCharsNoWhitespace,
173
+ linesWithAutoSelected: linesWithAutoSelected,
133
174
  tokens: numberOfRTokens,
175
+ tokensNoComments: numberOfRTokensNoComments,
134
176
  normalizedTokens: numberOfNormalizedTokens,
177
+ normalizedTokensNoComments: numberOfNormalizedTokensNoComments,
135
178
  dataflowNodes: perSliceStat.numberOfDataflowNodesSliced
136
- }));
179
+ };
180
+ reductions.push(calculateReductionForSlice(stats.input, stats.dataflow, perSlice, false));
181
+ reductionsNoFluff.push(calculateReductionForSlice(stats.input, stats.dataflow, perSlice, true));
182
+ const sliceTime = Number(perSliceStat.measurements.get('static slicing'));
183
+ const reconstructTime = Number(perSliceStat.measurements.get('reconstruct code'));
184
+ sliceTimes.push({
185
+ raw: sliceTime / numberOfRTokens,
186
+ normalized: sliceTime / numberOfNormalizedTokens
187
+ });
188
+ reconstructTimes.push({
189
+ raw: reconstructTime / numberOfRTokens,
190
+ normalized: reconstructTime / numberOfNormalizedTokens
191
+ });
192
+ totalTimes.push({
193
+ raw: (sliceTime + reconstructTime) / numberOfRTokens,
194
+ normalized: (sliceTime + reconstructTime) / numberOfNormalizedTokens
195
+ });
137
196
  }
138
197
  catch (e) {
139
198
  console.error(` ! Failed to re-parse the output of the slicer for ${JSON.stringify(criteria)}`); //, e
@@ -151,27 +210,28 @@ async function summarizeSlicerStats(stats, report = () => {
151
210
  return {
152
211
  ...stats,
153
212
  perSliceMeasurements: {
154
- numberOfSlices: perSliceStats.size,
213
+ numberOfSlices: stats.perSliceMeasurements.size,
155
214
  sliceCriteriaSizes: (0, summarizer_1.summarizeMeasurement)(sizeOfSliceCriteria),
156
215
  measurements: summarized,
157
216
  failedToRepParse: failedOutputs,
158
217
  timesHitThreshold,
159
- reduction: {
160
- numberOfLines: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfLines).filter(assert_1.isNotUndefined)),
161
- numberOfLinesNoAutoSelection: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfLinesNoAutoSelection).filter(assert_1.isNotUndefined)),
162
- numberOfCharacters: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfCharacters).filter(assert_1.isNotUndefined)),
163
- numberOfNonWhitespaceCharacters: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfNonWhitespaceCharacters).filter(assert_1.isNotUndefined)),
164
- numberOfRTokens: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfRTokens).filter(assert_1.isNotUndefined)),
165
- numberOfNormalizedTokens: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfNormalizedTokens).filter(assert_1.isNotUndefined)),
166
- numberOfDataflowNodes: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfDataflowNodes).filter(assert_1.isNotUndefined))
167
- },
218
+ reduction: summarizeReductions(reductions),
219
+ reductionNoFluff: summarizeReductions(reductionsNoFluff),
220
+ sliceTimePerToken: summarizeTimePerToken(sliceTimes),
221
+ reconstructTimePerToken: summarizeTimePerToken(reconstructTimes),
222
+ totalPerSliceTimePerToken: summarizeTimePerToken(totalTimes),
168
223
  sliceSize: {
169
224
  lines: (0, summarizer_1.summarizeMeasurement)(sliceSize.lines),
225
+ nonEmptyLines: (0, summarizer_1.summarizeMeasurement)(sliceSize.nonEmptyLines),
170
226
  characters: (0, summarizer_1.summarizeMeasurement)(sliceSize.characters),
227
+ charactersNoComments: (0, summarizer_1.summarizeMeasurement)(sliceSize.charactersNoComments),
171
228
  nonWhitespaceCharacters: (0, summarizer_1.summarizeMeasurement)(sliceSize.nonWhitespaceCharacters),
172
- autoSelected: (0, summarizer_1.summarizeMeasurement)(sliceSize.autoSelected),
229
+ nonWhitespaceCharactersNoComments: (0, summarizer_1.summarizeMeasurement)(sliceSize.nonWhitespaceCharactersNoComments),
230
+ linesWithAutoSelected: (0, summarizer_1.summarizeMeasurement)(sliceSize.linesWithAutoSelected),
173
231
  tokens: (0, summarizer_1.summarizeMeasurement)(sliceSize.tokens),
232
+ tokensNoComments: (0, summarizer_1.summarizeMeasurement)(sliceSize.tokensNoComments),
174
233
  normalizedTokens: (0, summarizer_1.summarizeMeasurement)(sliceSize.normalizedTokens),
234
+ normalizedTokensNoComments: (0, summarizer_1.summarizeMeasurement)(sliceSize.normalizedTokensNoComments),
175
235
  dataflowNodes: (0, summarizer_1.summarizeMeasurement)(sliceSize.dataflowNodes)
176
236
  }
177
237
  }
@@ -179,10 +239,12 @@ async function summarizeSlicerStats(stats, report = () => {
179
239
  }
180
240
  exports.summarizeSlicerStats = summarizeSlicerStats;
181
241
  function summarizeSummarizedMeasurement(data) {
242
+ data = data.filter(assert_1.isNotUndefined);
182
243
  const min = data.map(d => d.min).filter(assert_1.isNotUndefined).reduce((a, b) => Math.min(a, b), Infinity);
183
244
  const max = data.map(d => d.max).filter(assert_1.isNotUndefined).reduce((a, b) => Math.max(a, b), -Infinity);
184
- // get most average
185
- const median = data.map(d => d.median).filter(assert_1.isNotUndefined).reduce((a, b) => a + b, 0) / data.length;
245
+ // calculate median of medians (don't just average the median!)
246
+ const medians = data.map(d => d.median).filter(assert_1.isNotUndefined).sort((a, b) => a - b);
247
+ const median = medians[Math.floor(medians.length / 2)];
186
248
  const mean = data.map(d => d.mean).filter(assert_1.isNotUndefined).reduce((a, b) => a + b, 0) / data.length;
187
249
  // Method 1 of https://www.statology.org/averaging-standard-deviations/
188
250
  const std = Math.sqrt(data.map(d => d.std ** 2).filter(assert_1.isNotUndefined).reduce((a, b) => a + b, 0) / data.length);
@@ -190,7 +252,7 @@ function summarizeSummarizedMeasurement(data) {
190
252
  return { min, max, median, mean, std, total };
191
253
  }
192
254
  exports.summarizeSummarizedMeasurement = summarizeSummarizedMeasurement;
193
- function summarizeReductions(reductions) {
255
+ function summarizeSummarizedReductions(reductions) {
194
256
  return {
195
257
  numberOfDataflowNodes: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfDataflowNodes)),
196
258
  numberOfLines: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfLines)),
@@ -201,5 +263,30 @@ function summarizeReductions(reductions) {
201
263
  numberOfRTokens: summarizeSummarizedMeasurement(reductions.map(r => r.numberOfRTokens))
202
264
  };
203
265
  }
204
- exports.summarizeReductions = summarizeReductions;
266
+ exports.summarizeSummarizedReductions = summarizeSummarizedReductions;
267
+ function summarizeReductions(reductions) {
268
+ return {
269
+ numberOfLines: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfLines).filter(assert_1.isNotUndefined)),
270
+ numberOfLinesNoAutoSelection: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfLinesNoAutoSelection).filter(assert_1.isNotUndefined)),
271
+ numberOfCharacters: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfCharacters).filter(assert_1.isNotUndefined)),
272
+ numberOfNonWhitespaceCharacters: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfNonWhitespaceCharacters).filter(assert_1.isNotUndefined)),
273
+ numberOfRTokens: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfRTokens).filter(assert_1.isNotUndefined)),
274
+ numberOfNormalizedTokens: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfNormalizedTokens).filter(assert_1.isNotUndefined)),
275
+ numberOfDataflowNodes: (0, summarizer_1.summarizeMeasurement)(reductions.map(r => r.numberOfDataflowNodes).filter(assert_1.isNotUndefined))
276
+ };
277
+ }
278
+ function summarizeSummarizedTimePerToken(times) {
279
+ return {
280
+ raw: summarizeSummarizedMeasurement(times.map(t => t.raw)),
281
+ normalized: summarizeSummarizedMeasurement(times.map(t => t.normalized)),
282
+ };
283
+ }
284
+ exports.summarizeSummarizedTimePerToken = summarizeSummarizedTimePerToken;
285
+ function summarizeTimePerToken(times) {
286
+ return {
287
+ raw: (0, summarizer_1.summarizeMeasurement)(times.map(t => t.raw)),
288
+ normalized: (0, summarizer_1.summarizeMeasurement)(times.map(t => t.normalized)),
289
+ };
290
+ }
291
+ exports.summarizeTimePerToken = summarizeTimePerToken;
205
292
  //# sourceMappingURL=process.js.map
@@ -47,6 +47,13 @@ function writeGraphOutput(ultimate, outputGraphPath) {
47
47
  value: ultimate.reduction.numberOfNormalizedTokens.mean,
48
48
  extra: `std: ${ultimate.reduction.numberOfNormalizedTokens.std}`
49
49
  });
50
+ data.push({
51
+ name: 'memory (df-graph)',
52
+ unit: 'Bytes',
53
+ value: Number(ultimate.dataflow.sizeOfObject.mean),
54
+ range: Number(ultimate.dataflow.sizeOfObject.std),
55
+ extra: `median: ${(ultimate.dataflow.sizeOfObject.median).toFixed(2)}`
56
+ });
50
57
  // write the output file
51
58
  fs_1.default.writeFileSync(outputGraphPath, JSON.stringify(data, json_1.jsonReplacer));
52
59
  }
@@ -9,7 +9,16 @@ const stats_1 = require("../../stats/stats");
9
9
  function summarizeAllSummarizedStats(stats) {
10
10
  const commonMeasurements = new defaultmap_1.DefaultMap(() => []);
11
11
  const perSliceMeasurements = new defaultmap_1.DefaultMap(() => []);
12
+ const sliceTimesPerToken = [];
13
+ const reconstructTimesPerToken = [];
14
+ const totalPerSliceTimesPerToken = [];
15
+ const retrieveTimesPerToken = [];
16
+ const normalizeTimesPerToken = [];
17
+ const dataflowTimesPerToken = [];
18
+ const totalCommonTimesPerToken = [];
19
+ const memory = new defaultmap_1.DefaultMap(() => []);
12
20
  const reductions = [];
21
+ const reductionsNoFluff = [];
13
22
  const inputs = [];
14
23
  const dataflows = [];
15
24
  let failedToRepParse = 0;
@@ -22,7 +31,18 @@ function summarizeAllSummarizedStats(stats) {
22
31
  for (const [k, v] of stat.perSliceMeasurements.measurements) {
23
32
  perSliceMeasurements.get(k).push(v);
24
33
  }
34
+ sliceTimesPerToken.push(stat.perSliceMeasurements.sliceTimePerToken);
35
+ reconstructTimesPerToken.push(stat.perSliceMeasurements.reconstructTimePerToken);
36
+ totalPerSliceTimesPerToken.push(stat.perSliceMeasurements.totalPerSliceTimePerToken);
37
+ retrieveTimesPerToken.push(stat.retrieveTimePerToken);
38
+ normalizeTimesPerToken.push(stat.normalizeTimePerToken);
39
+ dataflowTimesPerToken.push(stat.dataflowTimePerToken);
40
+ totalCommonTimesPerToken.push(stat.totalCommonTimePerToken);
41
+ for (const [k, v] of stat.memory) {
42
+ memory.get(k).push(v);
43
+ }
25
44
  reductions.push(stat.perSliceMeasurements.reduction);
45
+ reductionsNoFluff.push(stat.perSliceMeasurements.reductionNoFluff);
26
46
  inputs.push(stat.input);
27
47
  dataflows.push(stat.dataflow);
28
48
  failedToRepParse += stat.perSliceMeasurements.failedToRepParse;
@@ -34,21 +54,35 @@ function summarizeAllSummarizedStats(stats) {
34
54
  totalSlices: totalSlices,
35
55
  commonMeasurements: new Map([...commonMeasurements.entries()].map(([k, v]) => [k, (0, summarizer_1.summarizeMeasurement)(v)])),
36
56
  perSliceMeasurements: new Map([...perSliceMeasurements.entries()].map(([k, v]) => [k, (0, process_1.summarizeSummarizedMeasurement)(v)])),
57
+ sliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(sliceTimesPerToken),
58
+ reconstructTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(reconstructTimesPerToken),
59
+ totalPerSliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(totalPerSliceTimesPerToken),
60
+ retrieveTimePerToken: (0, process_1.summarizeTimePerToken)(retrieveTimesPerToken),
61
+ normalizeTimePerToken: (0, process_1.summarizeTimePerToken)(normalizeTimesPerToken),
62
+ dataflowTimePerToken: (0, process_1.summarizeTimePerToken)(dataflowTimesPerToken),
63
+ totalCommonTimePerToken: (0, process_1.summarizeTimePerToken)(totalCommonTimesPerToken),
37
64
  failedToRepParse,
38
65
  timesHitThreshold,
39
- reduction: (0, process_1.summarizeReductions)(reductions),
66
+ reduction: (0, process_1.summarizeSummarizedReductions)(reductions),
67
+ reductionNoFluff: (0, process_1.summarizeSummarizedReductions)(reductionsNoFluff),
40
68
  input: {
41
69
  numberOfLines: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfLines)),
70
+ numberOfNonEmptyLines: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonEmptyLines)),
42
71
  numberOfCharacters: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharacters)),
72
+ numberOfCharactersNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharactersNoComments)),
43
73
  numberOfNonWhitespaceCharacters: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonWhitespaceCharacters)),
74
+ numberOfNonWhitespaceCharactersNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonWhitespaceCharactersNoComments)),
44
75
  numberOfRTokens: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfRTokens)),
45
- numberOfNormalizedTokens: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokens))
76
+ numberOfRTokensNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfRTokensNoComments)),
77
+ numberOfNormalizedTokens: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokens)),
78
+ numberOfNormalizedTokensNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokensNoComments))
46
79
  },
47
80
  dataflow: {
48
81
  numberOfNodes: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfNodes)),
49
82
  numberOfFunctionDefinitions: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfFunctionDefinitions)),
50
83
  numberOfCalls: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfCalls)),
51
- numberOfEdges: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfEdges))
84
+ numberOfEdges: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfEdges)),
85
+ sizeOfObject: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.sizeOfObject))
52
86
  }
53
87
  };
54
88
  }
@@ -63,19 +97,33 @@ function summarizeAllUltimateStats(stats) {
63
97
  // average out / summarize other measurements
64
98
  commonMeasurements: new Map(stats_1.CommonSlicerMeasurements.map(m => [m, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.commonMeasurements.get(m)))])),
65
99
  perSliceMeasurements: new Map(stats_1.PerSliceMeasurements.map(m => [m, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.perSliceMeasurements.get(m)))])),
66
- reduction: (0, process_1.summarizeReductions)(stats.map(s => s.reduction)),
100
+ sliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.sliceTimePerToken)),
101
+ reconstructTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.reconstructTimePerToken)),
102
+ totalPerSliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.totalPerSliceTimePerToken)),
103
+ retrieveTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.retrieveTimePerToken)),
104
+ normalizeTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.normalizeTimePerToken)),
105
+ dataflowTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.dataflowTimePerToken)),
106
+ totalCommonTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.totalCommonTimePerToken)),
107
+ reduction: (0, process_1.summarizeSummarizedReductions)(stats.map(s => s.reduction)),
108
+ reductionNoFluff: (0, process_1.summarizeSummarizedReductions)(stats.map(s => s.reductionNoFluff)),
67
109
  input: {
68
110
  numberOfLines: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfLines)),
111
+ numberOfNonEmptyLines: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonEmptyLines)),
69
112
  numberOfCharacters: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharacters)),
113
+ numberOfCharactersNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharactersNoComments)),
70
114
  numberOfNonWhitespaceCharacters: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharacters)),
115
+ numberOfNonWhitespaceCharactersNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharactersNoComments)),
71
116
  numberOfRTokens: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokens)),
72
- numberOfNormalizedTokens: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokens))
117
+ numberOfRTokensNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokensNoComments)),
118
+ numberOfNormalizedTokens: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokens)),
119
+ numberOfNormalizedTokensNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokensNoComments))
73
120
  },
74
121
  dataflow: {
75
122
  numberOfNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfNodes)),
76
123
  numberOfFunctionDefinitions: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfFunctionDefinitions)),
77
124
  numberOfCalls: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfCalls)),
78
- numberOfEdges: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfEdges))
125
+ numberOfEdges: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfEdges)),
126
+ sizeOfObject: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.sizeOfObject))
79
127
  }
80
128
  };
81
129
  }
@@ -84,22 +132,19 @@ function processNextSummary(line, allSummarized) {
84
132
  let got = JSON.parse(line.toString());
85
133
  got = {
86
134
  summarize: {
87
- input: got.summarize.input,
88
- request: got.summarize.request,
89
- dataflow: got.summarize.dataflow,
135
+ ...got.summarize,
136
+ // restore maps
137
+ memory: new Map(got.summarize.memory
138
+ .map(([k, v]) => [k, v])),
90
139
  commonMeasurements: new Map(got.summarize.commonMeasurements
91
140
  .map(([k, v]) => {
92
141
  (0, assert_1.guard)(v.endsWith('n'), 'Expected a bigint');
93
142
  return [k, BigInt(v.slice(0, -1))];
94
143
  })),
95
144
  perSliceMeasurements: {
96
- numberOfSlices: got.summarize.perSliceMeasurements.numberOfSlices,
97
- sliceCriteriaSizes: got.summarize.perSliceMeasurements.sliceCriteriaSizes,
145
+ ...got.summarize.perSliceMeasurements,
146
+ // restore maps
98
147
  measurements: new Map(got.summarize.perSliceMeasurements.measurements),
99
- reduction: got.summarize.perSliceMeasurements.reduction,
100
- timesHitThreshold: got.summarize.perSliceMeasurements.timesHitThreshold,
101
- failedToRepParse: got.summarize.perSliceMeasurements.failedToRepParse,
102
- sliceSize: got.summarize.perSliceMeasurements.sliceSize
103
148
  }
104
149
  }
105
150
  };
@@ -109,19 +154,12 @@ exports.processNextSummary = processNextSummary;
109
154
  function processNextUltimateSummary(line, allSummarized) {
110
155
  let got = JSON.parse(line.toString());
111
156
  got = {
112
- summarize: {
113
- totalRequests: got.summarize.totalRequests,
114
- totalSlices: got.summarize.totalSlices,
115
- commonMeasurements: new Map(got.summarize.commonMeasurements),
116
- perSliceMeasurements: new Map(got.summarize.perSliceMeasurements),
117
- failedToRepParse: got.summarize.failedToRepParse,
118
- timesHitThreshold: got.summarize.timesHitThreshold,
119
- reduction: got.summarize.reduction,
120
- input: got.summarize.input,
121
- dataflow: got.summarize.dataflow,
122
- }
157
+ ...got,
158
+ // restore maps
159
+ commonMeasurements: new Map(got.commonMeasurements),
160
+ perSliceMeasurements: new Map(got.perSliceMeasurements),
123
161
  };
124
- allSummarized.push(got.summarize);
162
+ allSummarized.push(got);
125
163
  }
126
164
  exports.processNextUltimateSummary = processNextUltimateSummary;
127
165
  //# sourceMappingURL=process.js.map
@@ -28,4 +28,5 @@ export declare class BenchmarkSummarizer extends Summarizer<UltimateSlicerStats,
28
28
  preparationPhase(): Promise<void>;
29
29
  summarizePhase(): Promise<UltimateSlicerStats>;
30
30
  private removeIfExists;
31
+ private summaryFile;
31
32
  }
@@ -13,31 +13,41 @@ const summarizer_1 = require("../../util/summarizer");
13
13
  const files_1 = require("../../util/files");
14
14
  const json_1 = require("../../util/json");
15
15
  const print_1 = require("../stats/print");
16
+ const defaultmap_1 = require("../../util/defaultmap");
16
17
  class BenchmarkSummarizer extends summarizer_1.Summarizer {
17
18
  constructor(config) {
18
19
  super(config);
19
20
  }
20
21
  async preparationPhase() {
21
- this.removeIfExists(`${this.config.intermediateOutputPath}.json`);
22
+ this.removeIfExists(this.summaryFile());
22
23
  this.removeIfExists(this.config.intermediateOutputPath);
23
24
  fs_1.default.mkdirSync(this.config.intermediateOutputPath);
24
- const dirContent = fs_1.default.readdirSync(this.config.inputPath);
25
- for (let i = 0; i < dirContent.length; i++) {
26
- const filePath = path_1.default.join(this.config.inputPath, dirContent[i]);
27
- const outputPath = path_1.default.join(this.config.intermediateOutputPath, dirContent[i]);
25
+ const filesToSummarize = fs_1.default.readdirSync(this.config.inputPath);
26
+ const outputPathsPerRun = new defaultmap_1.DefaultMap(() => []);
27
+ for (let i = 0; i < filesToSummarize.length; i++) {
28
+ const fileInputPath = path_1.default.join(this.config.inputPath, filesToSummarize[i]);
29
+ const outputDir = path_1.default.join(this.config.intermediateOutputPath, path_1.default.parse(filesToSummarize[i]).name);
30
+ fs_1.default.mkdirSync(outputDir);
31
+ const textOutputPath = path_1.default.join(outputDir, 'summary.log');
28
32
  // generate measurements for each run
29
- await (0, files_1.readLineByLine)(filePath, (line, lineNumber) => (0, input_1.processRunMeasurement)(line, i, lineNumber, `${outputPath}.log`, outputPath));
30
- // generate combined measurements for the file
31
- (0, input_1.processSummarizedFileMeasurement)(filePath, outputPath, `${this.config.intermediateOutputPath}.json`);
33
+ await (0, files_1.readLineByLine)(fileInputPath, (line, lineNumber) => {
34
+ const runOutputPath = path_1.default.join(outputDir, `run-${lineNumber}.json`);
35
+ outputPathsPerRun.get(lineNumber).push(runOutputPath);
36
+ return (0, input_1.processRunMeasurement)(line, i, lineNumber, textOutputPath, runOutputPath);
37
+ });
38
+ }
39
+ // generate combined measurements for each file per run
40
+ for (const [run, paths] of outputPathsPerRun.entries()) {
41
+ (0, input_1.processSummarizedRunMeasurement)(run, paths, this.summaryFile());
32
42
  }
33
43
  this.log('Done summarizing');
34
44
  }
35
45
  // eslint-disable-next-line @typescript-eslint/require-await -- just to obey the structure
36
46
  async summarizePhase() {
37
- this.log(`Summarizing all summaries from ${this.config.inputPath}...`);
47
+ this.log(`Summarizing all summaries from ${this.summaryFile()}...`);
38
48
  this.removeIfExists(this.config.outputPath);
39
49
  const summaries = [];
40
- (0, files_1.readLineByLineSync)(`${this.config.intermediateOutputPath}.json`, (l) => (0, process_1.processNextUltimateSummary)(l, summaries));
50
+ (0, files_1.readLineByLineSync)(this.summaryFile(), (l) => (0, process_1.processNextUltimateSummary)(l, summaries));
41
51
  const ultimate = (0, process_1.summarizeAllUltimateStats)(summaries);
42
52
  this.log(`Writing ultimate summary to ${this.config.outputPath}`);
43
53
  fs_1.default.writeFileSync(this.config.outputPath, JSON.stringify(ultimate, json_1.jsonReplacer));
@@ -53,6 +63,9 @@ class BenchmarkSummarizer extends summarizer_1.Summarizer {
53
63
  fs_1.default.rmSync(path, { recursive: true });
54
64
  }
55
65
  }
66
+ summaryFile() {
67
+ return `${this.config.intermediateOutputPath}.json`;
68
+ }
56
69
  }
57
70
  exports.BenchmarkSummarizer = BenchmarkSummarizer;
58
71
  //# sourceMappingURL=summarizer.js.map
@@ -61,6 +61,15 @@ const _commands = {
61
61
  'controlflow*': cfg_1.controlflowStarCommand
62
62
  };
63
63
  let commandsInitialized = false;
64
+ function hasModule(path) {
65
+ try {
66
+ require.resolve(path);
67
+ return true;
68
+ }
69
+ catch (e) {
70
+ return false;
71
+ }
72
+ }
64
73
  function commands() {
65
74
  if (commandsInitialized) {
66
75
  return _commands;
@@ -74,7 +83,16 @@ function commands() {
74
83
  script: true,
75
84
  usageExample: `:${script} --help`,
76
85
  fn: async (output, _s, remainingLine) => {
77
- await (0, execute_1.waitOnScript)(`${__dirname}/../../${target}`, (0, args_1.splitAtEscapeSensitive)(remainingLine), stdio => (0, execute_1.stdioCaptureProcessor)(stdio, msg => output.stdout(msg), msg => output.stderr(msg)));
86
+ // check if the target *module* exists in the current directory, else try two dirs up, otherwise, fail with a message
87
+ let path = `${__dirname}/${target}`;
88
+ if (!hasModule(path)) {
89
+ path = `${__dirname}/../../${target}`;
90
+ if (!hasModule(path)) {
91
+ output.stderr(`Could not find the target script ${target} in the current directory or two directories up.`);
92
+ return;
93
+ }
94
+ }
95
+ await (0, execute_1.waitOnScript)(path, (0, args_1.splitAtEscapeSensitive)(remainingLine), stdio => (0, execute_1.stdioCaptureProcessor)(stdio, msg => output.stdout(msg), msg => output.stderr(msg)));
78
96
  }
79
97
  };
80
98
  }