@memlab/core 1.1.18 → 1.1.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__tests__/parser/StringNode.test.js +12 -1
- package/dist/index.d.ts +4 -2
- package/dist/index.js +6 -3
- package/dist/lib/Config.d.ts +2 -0
- package/dist/lib/Config.js +4 -0
- package/dist/lib/FileManager.d.ts +5 -0
- package/dist/lib/FileManager.js +78 -7
- package/dist/lib/HeapAnalyzer.d.ts +15 -8
- package/dist/lib/HeapAnalyzer.js +106 -98
- package/dist/lib/Types.d.ts +93 -0
- package/dist/lib/Utils.d.ts +9 -3
- package/dist/lib/Utils.js +37 -7
- package/dist/lib/charts/MemoryBarChart.d.ts +20 -0
- package/dist/lib/charts/MemoryBarChart.js +116 -0
- package/dist/lib/heap-data/HeapEdge.d.ts +3 -1
- package/dist/lib/heap-data/HeapEdge.js +13 -0
- package/dist/lib/heap-data/HeapLocation.d.ts +3 -1
- package/dist/lib/heap-data/HeapLocation.js +14 -0
- package/dist/lib/heap-data/HeapNode.d.ts +6 -1
- package/dist/lib/heap-data/HeapNode.js +46 -0
- package/dist/lib/heap-data/HeapSnapshot.d.ts +3 -1
- package/dist/lib/heap-data/HeapSnapshot.js +7 -0
- package/dist/lib/heap-data/HeapStringNode.d.ts +2 -1
- package/dist/lib/heap-data/HeapStringNode.js +5 -0
- package/dist/trace-cluster/TraceBucket.d.ts +11 -1
- package/dist/trace-cluster/TraceBucket.js +146 -1
- package/dist/trace-cluster/TraceElement.d.ts +6 -1
- package/dist/trace-cluster/TraceElement.js +33 -0
- package/dist/trace-cluster/strategies/TraceSimilarityStrategy.js +9 -0
- package/package.json +1 -1
- package/static/visit-order-single-snapshot.json +19 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
3
|
+
*
|
|
4
|
+
* This source code is licensed under the MIT license found in the
|
|
5
|
+
* LICENSE file in the root directory of this source tree.
|
|
6
|
+
*
|
|
7
|
+
* @format
|
|
8
|
+
* @oncall web_perf_infra
|
|
9
|
+
*/
|
|
10
|
+
'use strict';
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const babar_1 = __importDefault(require("babar"));
|
|
16
|
+
const Config_1 = __importDefault(require("../Config"));
|
|
17
|
+
const Console_1 = __importDefault(require("../Console"));
|
|
18
|
+
const Utils_1 = __importDefault(require("../Utils"));
|
|
19
|
+
const FileManager_1 = __importDefault(require("../FileManager"));
|
|
20
|
+
class MemoryBarChart {
|
|
21
|
+
plotMemoryBarChart(options = {}) {
|
|
22
|
+
if (Config_1.default.useExternalSnapshot || options.snapshotDir) {
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
let plotData;
|
|
26
|
+
try {
|
|
27
|
+
plotData = this.loadPlotData(options);
|
|
28
|
+
}
|
|
29
|
+
catch (ex) {
|
|
30
|
+
Console_1.default.warning(`plot data not load correctly: ${Utils_1.default.getError(ex).message}`);
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
if (plotData.length === 0) {
|
|
34
|
+
if (Config_1.default.verbose) {
|
|
35
|
+
Console_1.default.warning('no memory usage data to plot');
|
|
36
|
+
}
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
// normalize plot data
|
|
40
|
+
const minY = 1;
|
|
41
|
+
const maxY = plotData.reduce((m, v) => Math.max(m, v[1]), 0) * 1.15;
|
|
42
|
+
const yFractions = 1;
|
|
43
|
+
const yLabelWidth = 1 +
|
|
44
|
+
Math.max(minY.toFixed(yFractions).length, maxY.toFixed(yFractions).length);
|
|
45
|
+
const maxWidth = process.stdout.columns - 10;
|
|
46
|
+
const idealWidth = Math.max(2 * plotData.length + 2 * yLabelWidth, 10);
|
|
47
|
+
const plotWidth = Math.min(idealWidth, maxWidth);
|
|
48
|
+
Console_1.default.topLevel('Memory usage across all steps:');
|
|
49
|
+
Console_1.default.topLevel((0, babar_1.default)(plotData, {
|
|
50
|
+
color: 'green',
|
|
51
|
+
width: plotWidth,
|
|
52
|
+
height: 10,
|
|
53
|
+
xFractions: 0,
|
|
54
|
+
yFractions,
|
|
55
|
+
minY,
|
|
56
|
+
maxY,
|
|
57
|
+
}));
|
|
58
|
+
Console_1.default.topLevel('');
|
|
59
|
+
}
|
|
60
|
+
loadPlotDataFromTabsOrder(tabsOrder) {
|
|
61
|
+
for (const tab of tabsOrder) {
|
|
62
|
+
if (!(tab.JSHeapUsedSize > 0)) {
|
|
63
|
+
if (Config_1.default.verbose) {
|
|
64
|
+
Console_1.default.error('Memory usage data incomplete');
|
|
65
|
+
}
|
|
66
|
+
return [];
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
const plotData = tabsOrder.map((tab, idx) => [
|
|
70
|
+
idx + 1,
|
|
71
|
+
((tab.JSHeapUsedSize / 100000) | 0) / 10,
|
|
72
|
+
]);
|
|
73
|
+
// the graph component cannot handle an array with a single element
|
|
74
|
+
while (plotData.length < 2) {
|
|
75
|
+
plotData.push([plotData.length + 1, 0]);
|
|
76
|
+
}
|
|
77
|
+
return plotData;
|
|
78
|
+
}
|
|
79
|
+
loadPlotDataFromWorkDir(options = {}) {
|
|
80
|
+
const tabsOrder = Utils_1.default.loadTabsOrder(FileManager_1.default.getSnapshotSequenceMetaFile(options));
|
|
81
|
+
return this.loadPlotDataFromTabsOrder(tabsOrder);
|
|
82
|
+
}
|
|
83
|
+
loadPlotData(options = {}) {
|
|
84
|
+
// plot data for a single run
|
|
85
|
+
if (!options.controlWorkDir && !options.treatmentWorkDir) {
|
|
86
|
+
return this.loadPlotDataFromWorkDir(options);
|
|
87
|
+
}
|
|
88
|
+
// plot data for control and test run
|
|
89
|
+
const controlPlotData = this.loadPlotDataFromWorkDir({
|
|
90
|
+
workDir: options.controlWorkDir,
|
|
91
|
+
});
|
|
92
|
+
const testPlotData = this.loadPlotDataFromWorkDir({
|
|
93
|
+
workDir: options.treatmentWorkDir,
|
|
94
|
+
});
|
|
95
|
+
// merge plot data
|
|
96
|
+
return this.mergePlotData([controlPlotData, testPlotData]);
|
|
97
|
+
}
|
|
98
|
+
mergePlotData(plotDataArray) {
|
|
99
|
+
const plotData = [];
|
|
100
|
+
let xIndex = 1; // starts from 1
|
|
101
|
+
for (let i = 0; i < plotDataArray.length; ++i) {
|
|
102
|
+
const data = plotDataArray[i];
|
|
103
|
+
for (const [, yValue] of data) {
|
|
104
|
+
plotData.push([xIndex++, yValue]);
|
|
105
|
+
}
|
|
106
|
+
// push blank separators
|
|
107
|
+
if (i < plotDataArray.length - 1) {
|
|
108
|
+
for (let k = 0; k < 3; ++k) {
|
|
109
|
+
plotData.push([xIndex++, 0]);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return plotData;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
exports.default = new MemoryBarChart();
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* @lightSyntaxTransform
|
|
9
9
|
* @oncall web_perf_infra
|
|
10
10
|
*/
|
|
11
|
-
import type { IHeapEdge } from '../Types';
|
|
11
|
+
import type { AnyRecord, AnyValue, IHeapEdge } from '../Types';
|
|
12
12
|
import type HeapSnapshot from './HeapSnapshot';
|
|
13
13
|
import HeapNode from './HeapNode';
|
|
14
14
|
export default class HeapEdge implements IHeapEdge {
|
|
@@ -23,5 +23,7 @@ export default class HeapEdge implements IHeapEdge {
|
|
|
23
23
|
get to_node(): number;
|
|
24
24
|
get toNode(): HeapNode;
|
|
25
25
|
get fromNode(): HeapNode;
|
|
26
|
+
getJSONifyableObject(): AnyRecord;
|
|
27
|
+
toJSONString(...args: Array<AnyValue>): string;
|
|
26
28
|
}
|
|
27
29
|
//# sourceMappingURL=HeapEdge.d.ts.map
|
|
@@ -71,5 +71,18 @@ class HeapEdge {
|
|
|
71
71
|
const srcNodeIdx = edgeIndex2SrcNodeIndex[this.idx];
|
|
72
72
|
return new HeapNode_1.default(heapSnapshot, srcNodeIdx);
|
|
73
73
|
}
|
|
74
|
+
getJSONifyableObject() {
|
|
75
|
+
return {
|
|
76
|
+
name_or_index: this.name_or_index,
|
|
77
|
+
type: this.type,
|
|
78
|
+
edgeIndex: this.edgeIndex,
|
|
79
|
+
toNode: this.toNode.getJSONifyableObject(),
|
|
80
|
+
fromNode: this.fromNode.getJSONifyableObject(),
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
toJSONString(...args) {
|
|
84
|
+
const rep = this.getJSONifyableObject();
|
|
85
|
+
return JSON.stringify(rep, ...args);
|
|
86
|
+
}
|
|
74
87
|
}
|
|
75
88
|
exports.default = HeapEdge;
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* @lightSyntaxTransform
|
|
9
9
|
* @oncall web_perf_infra
|
|
10
10
|
*/
|
|
11
|
-
import type { IHeapLocation, IHeapNode, Nullable } from '../Types';
|
|
11
|
+
import type { AnyRecord, AnyValue, IHeapLocation, IHeapNode, Nullable } from '../Types';
|
|
12
12
|
import type HeapSnapshot from './HeapSnapshot';
|
|
13
13
|
export default class HeapLocation implements IHeapLocation {
|
|
14
14
|
private heapSnapshot;
|
|
@@ -19,5 +19,7 @@ export default class HeapLocation implements IHeapLocation {
|
|
|
19
19
|
get script_id(): number;
|
|
20
20
|
get line(): number;
|
|
21
21
|
get column(): number;
|
|
22
|
+
getJSONifyableObject(): AnyRecord;
|
|
23
|
+
toJSONString(...args: Array<AnyValue>): string;
|
|
22
24
|
}
|
|
23
25
|
//# sourceMappingURL=HeapLocation.d.ts.map
|
|
@@ -43,5 +43,19 @@ class HeapLocation {
|
|
|
43
43
|
const locationFieldsCount = heapSnapshot._locationFieldsCount;
|
|
44
44
|
return locations[this.idx * locationFieldsCount + heapSnapshot._locationColumnOffset];
|
|
45
45
|
}
|
|
46
|
+
getJSONifyableObject() {
|
|
47
|
+
const node = this.node;
|
|
48
|
+
const jsonNode = node == null ? null : node.getJSONifyableObject();
|
|
49
|
+
return {
|
|
50
|
+
node: jsonNode,
|
|
51
|
+
script_id: this.script_id,
|
|
52
|
+
line: this.line,
|
|
53
|
+
column: this.column,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
toJSONString(...args) {
|
|
57
|
+
const rep = this.getJSONifyableObject();
|
|
58
|
+
return JSON.stringify(rep, ...args);
|
|
59
|
+
}
|
|
46
60
|
}
|
|
47
61
|
exports.default = HeapLocation;
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* @lightSyntaxTransform
|
|
9
9
|
* @oncall web_perf_infra
|
|
10
10
|
*/
|
|
11
|
-
import type { IHeapNode, IHeapEdge, Nullable, EdgeIterationCallback, Predicator, IHeapStringNode } from '../Types';
|
|
11
|
+
import type { IHeapNode, IHeapEdge, Nullable, EdgeIterationCallback, Predicator, IHeapStringNode, AnyRecord, AnyValue } from '../Types';
|
|
12
12
|
import type HeapSnapshot from './HeapSnapshot';
|
|
13
13
|
import HeapEdge from './HeapEdge';
|
|
14
14
|
import HeapLocation from './HeapLocation';
|
|
@@ -33,8 +33,11 @@ export default class HeapNode implements IHeapNode {
|
|
|
33
33
|
forEachReference(callback: EdgeIterationCallback): void;
|
|
34
34
|
findAnyReference(predicate: Predicator<IHeapEdge>): Nullable<IHeapEdge>;
|
|
35
35
|
findAnyReferrer(predicate: Predicator<IHeapEdge>): Nullable<IHeapEdge>;
|
|
36
|
+
findAnyReferrerNode(predicate: Predicator<IHeapNode>): Nullable<IHeapNode>;
|
|
36
37
|
findReferrers(predicate: Predicator<IHeapEdge>): IHeapEdge[];
|
|
38
|
+
findReferrerNodes(predicate: Predicator<IHeapNode>): IHeapNode[];
|
|
37
39
|
get referrers(): HeapEdge[];
|
|
40
|
+
get numOfReferrers(): number;
|
|
38
41
|
forEachReferrer(callback: EdgeIterationCallback): void;
|
|
39
42
|
get hasPathEdge(): boolean;
|
|
40
43
|
get pathEdge(): Nullable<HeapEdge>;
|
|
@@ -53,5 +56,7 @@ export default class HeapNode implements IHeapNode {
|
|
|
53
56
|
getReferrerNodes(edgeName: string | number, edgeType?: string): IHeapNode[];
|
|
54
57
|
get isString(): boolean;
|
|
55
58
|
toStringNode(): Nullable<IHeapStringNode>;
|
|
59
|
+
getJSONifyableObject(): AnyRecord;
|
|
60
|
+
toJSONString(...args: Array<AnyValue>): string;
|
|
56
61
|
}
|
|
57
62
|
//# sourceMappingURL=HeapNode.d.ts.map
|
|
@@ -151,6 +151,17 @@ class HeapNode {
|
|
|
151
151
|
});
|
|
152
152
|
return found;
|
|
153
153
|
}
|
|
154
|
+
findAnyReferrerNode(predicate) {
|
|
155
|
+
let found = null;
|
|
156
|
+
this.forEachReferrer((edge) => {
|
|
157
|
+
const node = edge.fromNode;
|
|
158
|
+
if (predicate(node)) {
|
|
159
|
+
found = node;
|
|
160
|
+
return { stop: true };
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
return found;
|
|
164
|
+
}
|
|
154
165
|
findReferrers(predicate) {
|
|
155
166
|
const ret = [];
|
|
156
167
|
this.forEachReferrer((edge) => {
|
|
@@ -161,6 +172,17 @@ class HeapNode {
|
|
|
161
172
|
});
|
|
162
173
|
return ret;
|
|
163
174
|
}
|
|
175
|
+
findReferrerNodes(predicate) {
|
|
176
|
+
const ret = [];
|
|
177
|
+
this.forEachReferrer((edge) => {
|
|
178
|
+
const node = edge.fromNode;
|
|
179
|
+
if (predicate(node)) {
|
|
180
|
+
ret.push(node);
|
|
181
|
+
}
|
|
182
|
+
return null;
|
|
183
|
+
});
|
|
184
|
+
return ret;
|
|
185
|
+
}
|
|
164
186
|
get referrers() {
|
|
165
187
|
const heapSnapshot = this.heapSnapshot;
|
|
166
188
|
const retainingEdgeIndex2EdgeIndex = heapSnapshot._retainingEdgeIndex2EdgeIndex;
|
|
@@ -174,6 +196,13 @@ class HeapNode {
|
|
|
174
196
|
}
|
|
175
197
|
return ret;
|
|
176
198
|
}
|
|
199
|
+
get numOfReferrers() {
|
|
200
|
+
const heapSnapshot = this.heapSnapshot;
|
|
201
|
+
const firstRetainerIndex = heapSnapshot._firstRetainerIndex;
|
|
202
|
+
const beginIdx = firstRetainerIndex[this.idx];
|
|
203
|
+
const endIdx = firstRetainerIndex[this.idx + 1];
|
|
204
|
+
return endIdx - beginIdx;
|
|
205
|
+
}
|
|
177
206
|
forEachReferrer(callback) {
|
|
178
207
|
const heapSnapshot = this.heapSnapshot;
|
|
179
208
|
const retainingEdgeIndex2EdgeIndex = heapSnapshot._retainingEdgeIndex2EdgeIndex;
|
|
@@ -342,6 +371,23 @@ class HeapNode {
|
|
|
342
371
|
? new HeapStringNode_1.default(this.heapSnapshot, this.idx)
|
|
343
372
|
: null;
|
|
344
373
|
}
|
|
374
|
+
getJSONifyableObject() {
|
|
375
|
+
return {
|
|
376
|
+
id: this.id,
|
|
377
|
+
name: this.name,
|
|
378
|
+
type: this.type,
|
|
379
|
+
self_size: this.self_size,
|
|
380
|
+
trace_node_id: this.trace_node_id,
|
|
381
|
+
nodeIndex: this.nodeIndex,
|
|
382
|
+
outGoingEdgeCount: this.edge_count,
|
|
383
|
+
incomingEdgeCount: this.numOfReferrers,
|
|
384
|
+
contructorName: this.constructor.name,
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
toJSONString(...args) {
|
|
388
|
+
const rep = this.getJSONifyableObject();
|
|
389
|
+
return JSON.stringify(rep, ...args);
|
|
390
|
+
}
|
|
345
391
|
}
|
|
346
392
|
exports.default = HeapNode;
|
|
347
393
|
// HeapStringNode has to be imported after exporting HeapNode
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* @lightSyntaxTransform
|
|
9
9
|
* @oncall web_perf_infra
|
|
10
10
|
*/
|
|
11
|
-
import type { IHeapNode, IHeapNodes, IHeapEdges, IHeapSnapshot, HeapNodeTypes, HeapEdgeTypes, HeapSnapshotMeta, RawHeapSnapshot, NumericDictionary, Nullable } from '../Types';
|
|
11
|
+
import type { AnyRecord, AnyValue, IHeapNode, IHeapNodes, IHeapEdges, IHeapSnapshot, HeapNodeTypes, HeapEdgeTypes, HeapSnapshotMeta, RawHeapSnapshot, NumericDictionary, Nullable } from '../Types';
|
|
12
12
|
import HeapNode from './HeapNode';
|
|
13
13
|
export default class HeapSnapshot implements IHeapSnapshot {
|
|
14
14
|
snapshot: RawHeapSnapshot;
|
|
@@ -67,6 +67,8 @@ export default class HeapSnapshot implements IHeapSnapshot {
|
|
|
67
67
|
_firstRetainerIndex: Uint32Array;
|
|
68
68
|
_edgeIndex2SrcNodeIndex: Uint32Array;
|
|
69
69
|
constructor(snapshot: RawHeapSnapshot, _options?: Record<string, never>);
|
|
70
|
+
getJSONifyableObject(): AnyRecord;
|
|
71
|
+
toJSONString(...args: Array<AnyValue>): string;
|
|
70
72
|
hasObjectWithClassName(className: string): boolean;
|
|
71
73
|
getAnyObjectWithClassName(className: string): Nullable<IHeapNode>;
|
|
72
74
|
hasObjectWithPropertyName(nameOrIndex: string | number): boolean;
|
|
@@ -129,6 +129,13 @@ class HeapSnapshot {
|
|
|
129
129
|
},
|
|
130
130
|
};
|
|
131
131
|
}
|
|
132
|
+
getJSONifyableObject() {
|
|
133
|
+
return Object.assign({}, this.snapshot.snapshot);
|
|
134
|
+
}
|
|
135
|
+
toJSONString(...args) {
|
|
136
|
+
const rep = this.getJSONifyableObject();
|
|
137
|
+
return JSON.stringify(rep, ...args);
|
|
138
|
+
}
|
|
132
139
|
hasObjectWithClassName(className) {
|
|
133
140
|
let detected = false;
|
|
134
141
|
this.nodes.forEach((node) => {
|
|
@@ -8,11 +8,12 @@
|
|
|
8
8
|
* @lightSyntaxTransform
|
|
9
9
|
* @oncall web_perf_infra
|
|
10
10
|
*/
|
|
11
|
-
import type { IHeapStringNode } from '../Types';
|
|
11
|
+
import type { AnyRecord, IHeapStringNode } from '../Types';
|
|
12
12
|
import type HeapSnapshot from './HeapSnapshot';
|
|
13
13
|
import HeapNode from './HeapNode';
|
|
14
14
|
export default class HeapStringNode extends HeapNode implements IHeapStringNode {
|
|
15
15
|
constructor(heapSnapshot: HeapSnapshot, idx: number);
|
|
16
16
|
get stringValue(): string;
|
|
17
|
+
getJSONifyableObject(): AnyRecord;
|
|
17
18
|
}
|
|
18
19
|
//# sourceMappingURL=HeapStringNode.d.ts.map
|
|
@@ -7,13 +7,16 @@
|
|
|
7
7
|
* @format
|
|
8
8
|
* @oncall web_perf_infra
|
|
9
9
|
*/
|
|
10
|
-
import type { IHeapNode, IHeapSnapshot, LeakTrace, LeakTracePathItem, Optional, TraceCluster, TraceClusterDiff, IClusterStrategy } from '../lib/Types';
|
|
10
|
+
import type { IHeapNode, IHeapSnapshot, LeakTrace, LeakTracePathItem, Optional, TraceCluster, TraceClusterDiff, IClusterStrategy, ControlTreatmentClusterResult } from '../lib/Types';
|
|
11
11
|
import type { NormalizedTraceElement } from './TraceElement';
|
|
12
12
|
declare type AggregateNodeCb = (ids: Set<number>, snapshot: IHeapSnapshot, checkCb: (node: IHeapNode) => boolean, calculateCb: (node: IHeapNode) => number) => number;
|
|
13
13
|
export default class NormalizedTrace {
|
|
14
14
|
private trace;
|
|
15
15
|
private traceSummary;
|
|
16
16
|
constructor(p?: LeakTracePathItem | null, snapshot?: IHeapSnapshot | null);
|
|
17
|
+
static getPathLastNode(p: LeakTracePathItem, options?: {
|
|
18
|
+
untilFirstDetachedDOMElem?: boolean;
|
|
19
|
+
}): Optional<IHeapNode>;
|
|
17
20
|
static pathToTrace(p: LeakTracePathItem, options?: {
|
|
18
21
|
untilFirstDetachedDOMElem?: boolean;
|
|
19
22
|
}): NormalizedTraceElement[];
|
|
@@ -21,6 +24,7 @@ export default class NormalizedTrace {
|
|
|
21
24
|
getTraceSummary(): string;
|
|
22
25
|
static addLeakedNodeToCluster(cluster: TraceCluster, path: LeakTracePathItem): void;
|
|
23
26
|
static calculateClusterRetainedSize(cluster: TraceCluster, snapshot: IHeapSnapshot, aggregateDominatorMetrics: AggregateNodeCb): number;
|
|
27
|
+
static getSamplePathMaxLength(paths: LeakTracePathItem[]): number;
|
|
24
28
|
static samplePaths(paths: LeakTracePathItem[]): LeakTracePathItem[];
|
|
25
29
|
private static diffTraces;
|
|
26
30
|
static diffClusters(newClusters: TraceCluster[], existingClusters: TraceCluster[]): TraceClusterDiff;
|
|
@@ -30,6 +34,12 @@ export default class NormalizedTrace {
|
|
|
30
34
|
static clusterPaths(paths: LeakTracePathItem[], snapshot: IHeapSnapshot, aggregateDominatorMetrics: AggregateNodeCb, option?: {
|
|
31
35
|
strategy?: IClusterStrategy;
|
|
32
36
|
}): TraceCluster[];
|
|
37
|
+
private static buildTraceToPathMap;
|
|
38
|
+
private static pushLeakPathToCluster;
|
|
39
|
+
private static initEmptyCluster;
|
|
40
|
+
static clusterControlTreatmentPaths(controlPaths: LeakTracePathItem[], controlSnapshot: IHeapSnapshot, treatmentPaths: LeakTracePathItem[], treatmentSnapshot: IHeapSnapshot, aggregateDominatorMetrics: AggregateNodeCb, option?: {
|
|
41
|
+
strategy?: IClusterStrategy;
|
|
42
|
+
}): ControlTreatmentClusterResult;
|
|
33
43
|
static generateUnClassifiedClusters(paths: LeakTracePathItem[], snapshot: IHeapSnapshot, aggregateDominatorMetrics: AggregateNodeCb): TraceCluster[];
|
|
34
44
|
static loadCluster(): NormalizedTrace[];
|
|
35
45
|
static saveCluster(clusters: NormalizedTrace[]): void;
|
|
@@ -38,6 +38,27 @@ class NormalizedTrace {
|
|
|
38
38
|
: '';
|
|
39
39
|
}
|
|
40
40
|
}
|
|
41
|
+
static getPathLastNode(p, options = {}) {
|
|
42
|
+
const skipRest = !!options.untilFirstDetachedDOMElem;
|
|
43
|
+
const shouldSkip = (node) => {
|
|
44
|
+
// only consider the trace from GC root to the first detached element
|
|
45
|
+
// NOTE: do not use utils.isDetachedDOMNode, which relies on
|
|
46
|
+
// the fact that p.node is a HeapNode
|
|
47
|
+
return (skipRest &&
|
|
48
|
+
node.name.startsWith('Detached ') &&
|
|
49
|
+
node.name !== 'Detached InternalNode');
|
|
50
|
+
};
|
|
51
|
+
let curItem = p;
|
|
52
|
+
while (curItem.next) {
|
|
53
|
+
if (curItem.node) {
|
|
54
|
+
if (shouldSkip(curItem.node)) {
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
curItem = curItem.next;
|
|
59
|
+
}
|
|
60
|
+
return curItem === null || curItem === void 0 ? void 0 : curItem.node;
|
|
61
|
+
}
|
|
41
62
|
// convert path to leak trace
|
|
42
63
|
static pathToTrace(p, options = {}) {
|
|
43
64
|
const skipRest = !!options.untilFirstDetachedDOMElem;
|
|
@@ -101,19 +122,40 @@ class NormalizedTrace {
|
|
|
101
122
|
}
|
|
102
123
|
return (cluster.retainedSize = aggregateDominatorMetrics(cluster.leakedNodeIds, snapshot, () => true, (node) => node.retainedSize));
|
|
103
124
|
}
|
|
125
|
+
static getSamplePathMaxLength(paths) {
|
|
126
|
+
const lengthArr = paths.map(p => Utils_1.default.getLeakTracePathLength(p));
|
|
127
|
+
return Math.max(30, Utils_1.default.getNumberAtPercentile(lengthArr, 80));
|
|
128
|
+
}
|
|
104
129
|
static samplePaths(paths) {
|
|
105
130
|
const maxCount = 5000;
|
|
131
|
+
if (paths.length <= maxCount) {
|
|
132
|
+
return [...paths];
|
|
133
|
+
}
|
|
106
134
|
const sampleRatio = Math.min(1, maxCount / paths.length);
|
|
107
135
|
if (sampleRatio < 1) {
|
|
108
136
|
Console_1.default.warning('Sampling trace due to a large number of traces:');
|
|
109
137
|
Console_1.default.lowLevel(` Number of Traces: ${paths.length}`);
|
|
110
|
-
Console_1.default.lowLevel(` Sampling Ratio:
|
|
138
|
+
Console_1.default.lowLevel(` Sampling Ratio: ${Utils_1.default.getReadablePercent(sampleRatio)}`);
|
|
111
139
|
}
|
|
112
140
|
const ret = [];
|
|
141
|
+
const samplePathMaxLength = NormalizedTrace.getSamplePathMaxLength(paths);
|
|
142
|
+
if (Config_1.default.verbose) {
|
|
143
|
+
Console_1.default.lowLevel(` Sample Trace's Max Length: ${samplePathMaxLength}`);
|
|
144
|
+
}
|
|
113
145
|
for (const p of paths) {
|
|
146
|
+
if (Utils_1.default.getLeakTracePathLength(p) > samplePathMaxLength) {
|
|
147
|
+
continue;
|
|
148
|
+
}
|
|
114
149
|
if (Math.random() < sampleRatio) {
|
|
115
150
|
ret.push(p);
|
|
116
151
|
}
|
|
152
|
+
else {
|
|
153
|
+
// force sample objects with non-trvial self size
|
|
154
|
+
const lastNode = NormalizedTrace.getPathLastNode(p);
|
|
155
|
+
if (lastNode && lastNode.self_size >= 100000) {
|
|
156
|
+
ret.push(p);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
117
159
|
}
|
|
118
160
|
return ret;
|
|
119
161
|
}
|
|
@@ -233,6 +275,109 @@ class NormalizedTrace {
|
|
|
233
275
|
clusters.sort((c1, c2) => { var _a, _b; return ((_a = c2.retainedSize) !== null && _a !== void 0 ? _a : 0) - ((_b = c1.retainedSize) !== null && _b !== void 0 ? _b : 0); });
|
|
234
276
|
return clusters;
|
|
235
277
|
}
|
|
278
|
+
static buildTraceToPathMap(paths) {
|
|
279
|
+
const traceToPathMap = new Map();
|
|
280
|
+
for (const p of paths) {
|
|
281
|
+
const trace = NormalizedTrace.pathToTrace(p, {
|
|
282
|
+
untilFirstDetachedDOMElem: true,
|
|
283
|
+
});
|
|
284
|
+
traceToPathMap.set(trace, p);
|
|
285
|
+
}
|
|
286
|
+
return traceToPathMap;
|
|
287
|
+
}
|
|
288
|
+
static pushLeakPathToCluster(traceToPathMap, trace, cluster) {
|
|
289
|
+
// if this is a control path, update control cluster
|
|
290
|
+
const curPath = traceToPathMap.get(trace);
|
|
291
|
+
if (cluster.count === 0) {
|
|
292
|
+
cluster.path = curPath;
|
|
293
|
+
// add representative object id if there is one
|
|
294
|
+
const lastNode = trace[trace.length - 1];
|
|
295
|
+
if ('id' in lastNode) {
|
|
296
|
+
cluster.id = lastNode.id;
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
cluster.count = cluster.count + 1;
|
|
300
|
+
NormalizedTrace.addLeakedNodeToCluster(cluster, curPath);
|
|
301
|
+
}
|
|
302
|
+
static initEmptyCluster(snapshot) {
|
|
303
|
+
return {
|
|
304
|
+
path: {},
|
|
305
|
+
count: 0,
|
|
306
|
+
snapshot,
|
|
307
|
+
retainedSize: 0,
|
|
308
|
+
leakedNodeIds: new Set(),
|
|
309
|
+
};
|
|
310
|
+
}
|
|
311
|
+
static clusterControlTreatmentPaths(controlPaths, controlSnapshot, treatmentPaths, treatmentSnapshot, aggregateDominatorMetrics, option = {}) {
|
|
312
|
+
const result = {
|
|
313
|
+
controlOnlyClusters: [],
|
|
314
|
+
treatmentOnlyClusters: [],
|
|
315
|
+
hybridClusters: [],
|
|
316
|
+
};
|
|
317
|
+
Console_1.default.overwrite('Clustering leak traces');
|
|
318
|
+
if (controlPaths.length === 0 && treatmentPaths.length === 0) {
|
|
319
|
+
Console_1.default.midLevel('No leaks found');
|
|
320
|
+
return result;
|
|
321
|
+
}
|
|
322
|
+
// sample paths if there are too many
|
|
323
|
+
controlPaths = this.samplePaths(controlPaths);
|
|
324
|
+
treatmentPaths = this.samplePaths(treatmentPaths);
|
|
325
|
+
// build control trace to control path map
|
|
326
|
+
const controlTraceToPathMap = NormalizedTrace.buildTraceToPathMap(controlPaths);
|
|
327
|
+
const controlTraces = Array.from(controlTraceToPathMap.keys());
|
|
328
|
+
// build treatment trace to treatment path map
|
|
329
|
+
const treatmentTraceToPathMap = NormalizedTrace.buildTraceToPathMap(treatmentPaths);
|
|
330
|
+
const treatmentTraces = Array.from(treatmentTraceToPathMap.keys());
|
|
331
|
+
// cluster traces from both the control group and the treatment group
|
|
332
|
+
const { allClusters } = NormalizedTrace.diffTraces([...controlTraces, ...treatmentTraces], [], option);
|
|
333
|
+
// construct TraceCluster from clustering result
|
|
334
|
+
allClusters.forEach((traces) => {
|
|
335
|
+
var _a, _b;
|
|
336
|
+
const controlCluster = NormalizedTrace.initEmptyCluster(controlSnapshot);
|
|
337
|
+
const treatmentCluster = NormalizedTrace.initEmptyCluster(treatmentSnapshot);
|
|
338
|
+
for (const trace of traces) {
|
|
339
|
+
const normalizedTrace = trace;
|
|
340
|
+
if (controlTraceToPathMap.has(normalizedTrace)) {
|
|
341
|
+
NormalizedTrace.pushLeakPathToCluster(controlTraceToPathMap, normalizedTrace, controlCluster);
|
|
342
|
+
}
|
|
343
|
+
else {
|
|
344
|
+
NormalizedTrace.pushLeakPathToCluster(treatmentTraceToPathMap, normalizedTrace, treatmentCluster);
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
const controlClusterSize = (_a = controlCluster.count) !== null && _a !== void 0 ? _a : 0;
|
|
348
|
+
const treatmentClusterSize = (_b = treatmentCluster.count) !== null && _b !== void 0 ? _b : 0;
|
|
349
|
+
// calculate aggregated cluster size for control cluster
|
|
350
|
+
if (controlClusterSize > 0) {
|
|
351
|
+
this.calculateClusterRetainedSize(controlCluster, controlSnapshot, aggregateDominatorMetrics);
|
|
352
|
+
}
|
|
353
|
+
// calculate aggregated cluster size for treatment cluster
|
|
354
|
+
if (treatmentClusterSize > 0) {
|
|
355
|
+
this.calculateClusterRetainedSize(treatmentCluster, treatmentSnapshot, aggregateDominatorMetrics);
|
|
356
|
+
}
|
|
357
|
+
if (controlClusterSize === 0) {
|
|
358
|
+
result.treatmentOnlyClusters.push(treatmentCluster);
|
|
359
|
+
}
|
|
360
|
+
else if (treatmentClusterSize === 0) {
|
|
361
|
+
result.controlOnlyClusters.push(controlCluster);
|
|
362
|
+
}
|
|
363
|
+
else {
|
|
364
|
+
result.hybridClusters.push({
|
|
365
|
+
control: controlCluster,
|
|
366
|
+
treatment: treatmentCluster,
|
|
367
|
+
});
|
|
368
|
+
}
|
|
369
|
+
});
|
|
370
|
+
result.treatmentOnlyClusters.sort((c1, c2) => { var _a, _b; return ((_a = c2.retainedSize) !== null && _a !== void 0 ? _a : 0) - ((_b = c1.retainedSize) !== null && _b !== void 0 ? _b : 0); });
|
|
371
|
+
result.controlOnlyClusters.sort((c1, c2) => { var _a, _b; return ((_a = c2.retainedSize) !== null && _a !== void 0 ? _a : 0) - ((_b = c1.retainedSize) !== null && _b !== void 0 ? _b : 0); });
|
|
372
|
+
result.hybridClusters.sort((g1, g2) => {
|
|
373
|
+
var _a, _b, _c, _d;
|
|
374
|
+
return ((_a = g2.control.retainedSize) !== null && _a !== void 0 ? _a : 0) +
|
|
375
|
+
((_b = g2.treatment.retainedSize) !== null && _b !== void 0 ? _b : 0) -
|
|
376
|
+
((_c = g1.control.retainedSize) !== null && _c !== void 0 ? _c : 0) -
|
|
377
|
+
((_d = g1.treatment.retainedSize) !== null && _d !== void 0 ? _d : 0);
|
|
378
|
+
});
|
|
379
|
+
return result;
|
|
380
|
+
}
|
|
236
381
|
static generateUnClassifiedClusters(paths, snapshot, aggregateDominatorMetrics) {
|
|
237
382
|
return this.clusterPaths(paths, snapshot, aggregateDominatorMetrics, {
|
|
238
383
|
strategy: new TraceAsClusterStrategy_1.default(),
|
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
* @format
|
|
8
8
|
* @oncall web_perf_infra
|
|
9
9
|
*/
|
|
10
|
-
import type { EdgeIterationCallback, IHeapEdge, IHeapLocation, IHeapNode, IHeapSnapshot, IHeapStringNode, Nullable } from '../lib/Types';
|
|
10
|
+
import type { AnyValue, EdgeIterationCallback, IHeapEdge, IHeapLocation, IHeapNode, IHeapSnapshot, IHeapStringNode, Nullable } from '../lib/Types';
|
|
11
11
|
export declare class NodeRecord implements IHeapNode {
|
|
12
12
|
kind: string;
|
|
13
13
|
name: string;
|
|
@@ -32,11 +32,14 @@ export declare class NodeRecord implements IHeapNode {
|
|
|
32
32
|
forEachReference(_callback: EdgeIterationCallback): void;
|
|
33
33
|
set referrers(r: IHeapEdge[]);
|
|
34
34
|
get referrers(): IHeapEdge[];
|
|
35
|
+
get numOfReferrers(): number;
|
|
35
36
|
toStringNode(): IHeapStringNode;
|
|
36
37
|
forEachReferrer(_callback: EdgeIterationCallback): void;
|
|
37
38
|
findAnyReference(): Nullable<IHeapEdge>;
|
|
38
39
|
findAnyReferrer(): Nullable<IHeapEdge>;
|
|
40
|
+
findAnyReferrerNode(): Nullable<IHeapNode>;
|
|
39
41
|
findReferrers(): IHeapEdge[];
|
|
42
|
+
findReferrerNodes(): IHeapNode[];
|
|
40
43
|
set hasPathEdge(f: boolean);
|
|
41
44
|
get hasPathEdge(): boolean;
|
|
42
45
|
set pathEdge(r: IHeapEdge);
|
|
@@ -51,6 +54,7 @@ export declare class NodeRecord implements IHeapNode {
|
|
|
51
54
|
getAnyReferrerNode(_edgeName: string | number, _edgeType?: string): Nullable<IHeapNode>;
|
|
52
55
|
getReferrers(_edgeName: string | number, _edgeType?: string): IHeapEdge[];
|
|
53
56
|
getReferrerNodes(_edgeName: string | number, _edgeType?: string): IHeapNode[];
|
|
57
|
+
toJSONString(...args: Array<AnyValue>): string;
|
|
54
58
|
constructor(node: IHeapNode);
|
|
55
59
|
private extraceNodeName;
|
|
56
60
|
}
|
|
@@ -62,6 +66,7 @@ export declare class EdgeRecord implements IHeapEdge {
|
|
|
62
66
|
is_index: boolean;
|
|
63
67
|
to_node: number;
|
|
64
68
|
constructor(edge: IHeapEdge);
|
|
69
|
+
toJSONString(...args: Array<AnyValue>): string;
|
|
65
70
|
set snapshot(s: IHeapSnapshot);
|
|
66
71
|
get snapshot(): IHeapSnapshot;
|
|
67
72
|
set toNode(s: IHeapNode);
|
|
@@ -62,6 +62,9 @@ class NodeRecord {
|
|
|
62
62
|
get referrers() {
|
|
63
63
|
throw new Error('NodeRecord.referrers cannot be read');
|
|
64
64
|
}
|
|
65
|
+
get numOfReferrers() {
|
|
66
|
+
throw new Error('NodeRecord.numOfReferrers cannot be read');
|
|
67
|
+
}
|
|
65
68
|
toStringNode() {
|
|
66
69
|
throw new Error('NodeRecord.toStringNode is not implemented');
|
|
67
70
|
}
|
|
@@ -76,9 +79,15 @@ class NodeRecord {
|
|
|
76
79
|
findAnyReferrer() {
|
|
77
80
|
throw new Error('NodeRecord.findAnyReferrer is not implemented');
|
|
78
81
|
}
|
|
82
|
+
findAnyReferrerNode() {
|
|
83
|
+
throw new Error('NodeRecord.findAnyReferrerNode is not implemented');
|
|
84
|
+
}
|
|
79
85
|
findReferrers() {
|
|
80
86
|
throw new Error('NodeRecord.findReferrers is not implemented');
|
|
81
87
|
}
|
|
88
|
+
findReferrerNodes() {
|
|
89
|
+
throw new Error('NodeRecord.findReferrerNodes is not implemented');
|
|
90
|
+
}
|
|
82
91
|
set hasPathEdge(f) {
|
|
83
92
|
throw new Error('NodeRecord.hasPathEdge cannot be assigned');
|
|
84
93
|
}
|
|
@@ -145,6 +154,20 @@ class NodeRecord {
|
|
|
145
154
|
_edgeType) {
|
|
146
155
|
throw new Error('NodeRecord.getReferrerNodes is not implemented');
|
|
147
156
|
}
|
|
157
|
+
toJSONString(...args) {
|
|
158
|
+
const rep = {
|
|
159
|
+
id: this.id,
|
|
160
|
+
kind: this.kind,
|
|
161
|
+
name: this.name,
|
|
162
|
+
type: this.type,
|
|
163
|
+
self_size: this.self_size,
|
|
164
|
+
trace_node_id: this.trace_node_id,
|
|
165
|
+
nodeIndex: this.nodeIndex,
|
|
166
|
+
incomingEdgeCount: this.numOfReferrers,
|
|
167
|
+
contructorName: this.constructor.name,
|
|
168
|
+
};
|
|
169
|
+
return JSON.stringify(rep, ...args);
|
|
170
|
+
}
|
|
148
171
|
extraceNodeName(node) {
|
|
149
172
|
// deserialized node may not have snapshot info
|
|
150
173
|
if (!node.snapshot || !Utils_1.default.isFiberNode(node)) {
|
|
@@ -163,6 +186,16 @@ class EdgeRecord {
|
|
|
163
186
|
this.is_index = edge.is_index;
|
|
164
187
|
this.to_node = edge.to_node;
|
|
165
188
|
}
|
|
189
|
+
toJSONString(...args) {
|
|
190
|
+
const rep = {
|
|
191
|
+
kind: this.kind,
|
|
192
|
+
name_or_index: this.name_or_index,
|
|
193
|
+
type: this.type,
|
|
194
|
+
edgeIndex: this.edgeIndex,
|
|
195
|
+
to_node: this.to_node,
|
|
196
|
+
};
|
|
197
|
+
return JSON.stringify(rep, ...args);
|
|
198
|
+
}
|
|
166
199
|
set snapshot(s) {
|
|
167
200
|
throw new Error('EdgeRecord.snapshot cannot be assigned.');
|
|
168
201
|
}
|