@kanaries/graphic-walker 0.2.11 → 0.2.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/App.d.ts +6 -3
- package/dist/assets/explainer.worker-8428eb12.js.map +1 -1
- package/dist/components/button/base.d.ts +6 -0
- package/dist/components/button/default.d.ts +4 -0
- package/dist/components/button/primary.d.ts +4 -0
- package/dist/dataSource/utils.d.ts +1 -1
- package/dist/graphic-walker.es.js +17671 -18577
- package/dist/graphic-walker.es.js.map +1 -1
- package/dist/graphic-walker.umd.js +134 -134
- package/dist/graphic-walker.umd.js.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/interfaces.d.ts +8 -0
- package/dist/lib/inferMeta.d.ts +20 -0
- package/dist/store/commonStore.d.ts +1 -1
- package/dist/store/index.d.ts +0 -1
- package/dist/store/visualSpecStore.d.ts +5 -5
- package/dist/utils/dataPrep.d.ts +6 -0
- package/dist/utils/index.d.ts +2 -2
- package/dist/utils/normalization.d.ts +1 -1
- package/dist/utils/save.d.ts +3 -3
- package/dist/utils/throttle.d.ts +1 -1
- package/dist/vis/temporalFormat.d.ts +10 -0
- package/package.json +1 -1
- package/src/App.tsx +27 -10
- package/src/components/button/base.ts +7 -0
- package/src/components/button/default.tsx +17 -0
- package/src/components/button/primary.tsx +17 -0
- package/src/dataSource/dataSelection/csvData.tsx +8 -10
- package/src/dataSource/dataSelection/publicData.tsx +4 -4
- package/src/dataSource/index.tsx +10 -12
- package/src/dataSource/table.tsx +33 -20
- package/src/dataSource/utils.ts +30 -35
- package/src/fields/datasetFields/dimFields.tsx +1 -5
- package/src/fields/datasetFields/meaFields.tsx +1 -5
- package/src/fields/obComponents/obFContainer.tsx +1 -5
- package/src/index.tsx +3 -4
- package/src/interfaces.ts +9 -0
- package/src/lib/inferMeta.ts +88 -0
- package/src/locales/en-US.json +6 -0
- package/src/locales/zh-CN.json +6 -0
- package/src/main.tsx +1 -1
- package/src/store/commonStore.ts +8 -3
- package/src/store/index.tsx +0 -2
- package/src/store/visualSpecStore.ts +245 -183
- package/src/utils/autoMark.ts +14 -14
- package/src/utils/dataPrep.ts +44 -0
- package/src/utils/index.ts +140 -128
- package/src/utils/normalization.ts +59 -51
- package/src/utils/save.ts +22 -21
- package/src/utils/throttle.ts +5 -1
- package/src/vis/react-vega.tsx +6 -10
- package/src/vis/temporalFormat.ts +66 -0
- package/dist/pitch/dnd-offset.d.ts +0 -2
- package/src/pitch/dnd-offset.ts +0 -64
package/src/utils/autoMark.ts
CHANGED
|
@@ -1,30 +1,30 @@
|
|
|
1
1
|
import { ISemanticType } from "visual-insights";
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
*
|
|
4
|
+
*
|
|
5
5
|
* @param semanticTypeList semanticTypeList.length <= 2,调用时,手动将columns 和 rows的最后一个元素组合传进来
|
|
6
6
|
* @returns geom(mark) type
|
|
7
7
|
*/
|
|
8
8
|
export function autoMark(semanticTypeList: ISemanticType[]): string {
|
|
9
9
|
if (semanticTypeList.length < 2) {
|
|
10
|
-
if (semanticTypeList[0] ===
|
|
11
|
-
return
|
|
10
|
+
if (semanticTypeList[0] === "temporal") return "tick";
|
|
11
|
+
return "bar";
|
|
12
12
|
}
|
|
13
13
|
const couter: Map<ISemanticType, number> = new Map();
|
|
14
|
-
([
|
|
15
|
-
couter.set(s, 0)
|
|
16
|
-
})
|
|
14
|
+
(["nominal", "ordinal", "quantitative", "temporal"] as ISemanticType[]).forEach((s) => {
|
|
15
|
+
couter.set(s, 0);
|
|
16
|
+
});
|
|
17
17
|
for (let st of semanticTypeList) {
|
|
18
18
|
couter.set(st, couter.get(st)! + 1);
|
|
19
19
|
}
|
|
20
|
-
if (couter.get(
|
|
21
|
-
return
|
|
20
|
+
if (couter.get("nominal") === 1 || couter.get("ordinal") === 1) {
|
|
21
|
+
return "bar";
|
|
22
22
|
}
|
|
23
|
-
if (couter.get(
|
|
24
|
-
return
|
|
23
|
+
if (couter.get("temporal") === 1 && couter.get("quantitative") === 1) {
|
|
24
|
+
return "line";
|
|
25
25
|
}
|
|
26
|
-
if (couter.get(
|
|
27
|
-
return
|
|
26
|
+
if (couter.get("quantitative") === 2) {
|
|
27
|
+
return "point";
|
|
28
28
|
}
|
|
29
|
-
return
|
|
30
|
-
}
|
|
29
|
+
return "point";
|
|
30
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { IRow } from "visual-insights";
|
|
2
|
+
import { IMutField } from "../interfaces";
|
|
3
|
+
|
|
4
|
+
function updateRowKeys(data: IRow[], keyEncodeList: {from: string; to: string}[]): IRow[] {
|
|
5
|
+
return data.map((row) => {
|
|
6
|
+
const newRow: IRow = {};
|
|
7
|
+
for (let k in keyEncodeList) {
|
|
8
|
+
const { from, to } = keyEncodeList[k];
|
|
9
|
+
newRow[to] = row[from];
|
|
10
|
+
}
|
|
11
|
+
return newRow;
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* parse column id(key) to a safe string
|
|
17
|
+
* @param metas
|
|
18
|
+
*/
|
|
19
|
+
function parseColumnMetas (metas: IMutField[]) {
|
|
20
|
+
return metas.map((meta, i) => {
|
|
21
|
+
const safeKey = `gwc_${i}`;
|
|
22
|
+
return {
|
|
23
|
+
...meta,
|
|
24
|
+
key: safeKey,
|
|
25
|
+
fid: safeKey,
|
|
26
|
+
};
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function guardDataKeys (data: IRow[], metas: IMutField[]): {
|
|
31
|
+
safeData: IRow[];
|
|
32
|
+
safeMetas: IMutField[];
|
|
33
|
+
} {
|
|
34
|
+
const safeMetas = parseColumnMetas(metas)
|
|
35
|
+
const keyEncodeList = safeMetas.map((f, i) => ({
|
|
36
|
+
from: metas[i].fid,
|
|
37
|
+
to: f.fid
|
|
38
|
+
}));
|
|
39
|
+
const safeData = updateRowKeys(data, keyEncodeList);
|
|
40
|
+
return {
|
|
41
|
+
safeData,
|
|
42
|
+
safeMetas
|
|
43
|
+
}
|
|
44
|
+
}
|
package/src/utils/index.ts
CHANGED
|
@@ -1,106 +1,110 @@
|
|
|
1
|
-
import i18next from
|
|
2
|
-
import { COUNT_FIELD_ID } from
|
|
3
|
-
import { IRow, Filters, IMutField } from
|
|
1
|
+
import i18next from "i18next";
|
|
2
|
+
import { COUNT_FIELD_ID } from "../constants";
|
|
3
|
+
import { IRow, Filters, IMutField } from "../interfaces";
|
|
4
4
|
interface NRReturns {
|
|
5
5
|
normalizedData: IRow[];
|
|
6
|
-
maxMeasures:IRow;
|
|
7
|
-
minMeasures:IRow;
|
|
8
|
-
totalMeasures:IRow
|
|
6
|
+
maxMeasures: IRow;
|
|
7
|
+
minMeasures: IRow;
|
|
8
|
+
totalMeasures: IRow;
|
|
9
9
|
}
|
|
10
10
|
function normalizeRecords(dataSource: IRow[], measures: string[]): NRReturns {
|
|
11
11
|
const maxMeasures: IRow = {};
|
|
12
12
|
const minMeasures: IRow = {};
|
|
13
13
|
const totalMeasures: IRow = {};
|
|
14
|
-
measures.forEach(mea => {
|
|
14
|
+
measures.forEach((mea) => {
|
|
15
15
|
maxMeasures[mea] = -Infinity;
|
|
16
16
|
minMeasures[mea] = Infinity;
|
|
17
17
|
totalMeasures[mea] = 0;
|
|
18
|
-
})
|
|
19
|
-
dataSource.forEach(record => {
|
|
20
|
-
measures.forEach(mea => {
|
|
21
|
-
maxMeasures[mea] = Math.max(record[mea], maxMeasures[mea])
|
|
22
|
-
minMeasures[mea] = Math.min(record[mea], minMeasures[mea])
|
|
23
|
-
})
|
|
24
|
-
})
|
|
18
|
+
});
|
|
19
|
+
dataSource.forEach((record) => {
|
|
20
|
+
measures.forEach((mea) => {
|
|
21
|
+
maxMeasures[mea] = Math.max(record[mea], maxMeasures[mea]);
|
|
22
|
+
minMeasures[mea] = Math.min(record[mea], minMeasures[mea]);
|
|
23
|
+
});
|
|
24
|
+
});
|
|
25
25
|
const newData: IRow[] = [];
|
|
26
|
-
dataSource.forEach(record => {
|
|
27
|
-
const norRecord: IRow = { ...
|
|
28
|
-
measures.forEach(mea => {
|
|
26
|
+
dataSource.forEach((record) => {
|
|
27
|
+
const norRecord: IRow = { ...record };
|
|
28
|
+
measures.forEach((mea) => {
|
|
29
29
|
totalMeasures[mea] += Math.abs(norRecord[mea]);
|
|
30
|
-
})
|
|
31
|
-
newData.push(norRecord)
|
|
32
|
-
})
|
|
33
|
-
newData.forEach(record => {
|
|
34
|
-
measures.forEach(mea => {
|
|
30
|
+
});
|
|
31
|
+
newData.push(norRecord);
|
|
32
|
+
});
|
|
33
|
+
newData.forEach((record) => {
|
|
34
|
+
measures.forEach((mea) => {
|
|
35
35
|
record[mea] /= totalMeasures[mea];
|
|
36
|
-
})
|
|
37
|
-
})
|
|
36
|
+
});
|
|
37
|
+
});
|
|
38
38
|
return {
|
|
39
39
|
normalizedData: newData,
|
|
40
40
|
maxMeasures,
|
|
41
41
|
minMeasures,
|
|
42
|
-
totalMeasures
|
|
43
|
-
}
|
|
42
|
+
totalMeasures,
|
|
43
|
+
};
|
|
44
44
|
}
|
|
45
45
|
|
|
46
46
|
function normalize2PositiveRecords(dataSource: IRow[], measures: string[]): NRReturns {
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
measures.forEach((mea) => {
|
|
51
|
-
maxMeasures[mea] = -Infinity;
|
|
52
|
-
minMeasures[mea] = Infinity;
|
|
53
|
-
totalMeasures[mea] = 0;
|
|
54
|
-
});
|
|
55
|
-
dataSource.forEach((record) => {
|
|
47
|
+
const maxMeasures: IRow = {};
|
|
48
|
+
const minMeasures: IRow = {};
|
|
49
|
+
const totalMeasures: IRow = {};
|
|
56
50
|
measures.forEach((mea) => {
|
|
57
|
-
|
|
58
|
-
|
|
51
|
+
maxMeasures[mea] = -Infinity;
|
|
52
|
+
minMeasures[mea] = Infinity;
|
|
53
|
+
totalMeasures[mea] = 0;
|
|
59
54
|
});
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
norRecord[mea] = norRecord[mea] - minMeasures[mea]
|
|
66
|
-
totalMeasures[mea] += norRecord[mea];
|
|
55
|
+
dataSource.forEach((record) => {
|
|
56
|
+
measures.forEach((mea) => {
|
|
57
|
+
maxMeasures[mea] = Math.max(record[mea], maxMeasures[mea]);
|
|
58
|
+
minMeasures[mea] = Math.min(record[mea], minMeasures[mea]);
|
|
59
|
+
});
|
|
67
60
|
});
|
|
68
|
-
newData
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
61
|
+
const newData: IRow[] = [];
|
|
62
|
+
dataSource.forEach((record) => {
|
|
63
|
+
const norRecord: IRow = { ...record };
|
|
64
|
+
measures.forEach((mea) => {
|
|
65
|
+
norRecord[mea] = norRecord[mea] - minMeasures[mea];
|
|
66
|
+
totalMeasures[mea] += norRecord[mea];
|
|
67
|
+
});
|
|
68
|
+
newData.push(norRecord);
|
|
69
|
+
});
|
|
70
|
+
newData.forEach((record) => {
|
|
71
|
+
measures.forEach((mea) => {
|
|
72
|
+
record[mea] /= totalMeasures[mea];
|
|
73
|
+
});
|
|
74
74
|
});
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
};
|
|
75
|
+
return {
|
|
76
|
+
normalizedData: newData,
|
|
77
|
+
maxMeasures,
|
|
78
|
+
minMeasures,
|
|
79
|
+
totalMeasures,
|
|
80
|
+
};
|
|
82
81
|
}
|
|
83
82
|
|
|
84
|
-
export function checkMajorFactor(
|
|
83
|
+
export function checkMajorFactor(
|
|
84
|
+
data: IRow[],
|
|
85
|
+
childrenData: Map<any, IRow[]>,
|
|
86
|
+
dimensions: string[],
|
|
87
|
+
measures: string[]
|
|
88
|
+
): { majorKey: string; majorSum: number } {
|
|
85
89
|
const { normalizedData, maxMeasures, minMeasures, totalMeasures } = normalizeRecords(data, measures);
|
|
86
90
|
let majorSum = Infinity;
|
|
87
|
-
let majorKey =
|
|
91
|
+
let majorKey = "";
|
|
88
92
|
for (let [key, childData] of childrenData) {
|
|
89
93
|
let sum = 0;
|
|
90
|
-
for (let record of normalizedData
|
|
91
|
-
let target = childData.find(childRecord => {
|
|
92
|
-
return dimensions.every(dim => record[dim] === childRecord[dim])
|
|
93
|
-
})
|
|
94
|
+
for (let record of normalizedData) {
|
|
95
|
+
let target = childData.find((childRecord) => {
|
|
96
|
+
return dimensions.every((dim) => record[dim] === childRecord[dim]);
|
|
97
|
+
});
|
|
94
98
|
if (target) {
|
|
95
|
-
measures.forEach(mea => {
|
|
96
|
-
let targetValue =
|
|
97
|
-
targetValue =
|
|
98
|
-
sum += Math.abs(record[mea] - targetValue)
|
|
99
|
-
})
|
|
99
|
+
measures.forEach((mea) => {
|
|
100
|
+
let targetValue = typeof target![mea] === "number" && !isNaN(target![mea]) ? target![mea] : 0;
|
|
101
|
+
targetValue = targetValue / totalMeasures[mea];
|
|
102
|
+
sum += Math.abs(record[mea] - targetValue);
|
|
103
|
+
});
|
|
100
104
|
} else {
|
|
101
|
-
measures.forEach(mea => {
|
|
105
|
+
measures.forEach((mea) => {
|
|
102
106
|
sum += Math.abs(record[mea]);
|
|
103
|
-
})
|
|
107
|
+
});
|
|
104
108
|
}
|
|
105
109
|
}
|
|
106
110
|
if (sum < majorSum) {
|
|
@@ -108,32 +112,37 @@ export function checkMajorFactor(data: IRow[], childrenData: Map<any, IRow[]>, d
|
|
|
108
112
|
majorKey = key;
|
|
109
113
|
}
|
|
110
114
|
}
|
|
111
|
-
majorSum /=
|
|
115
|
+
majorSum /= measures.length * 2;
|
|
112
116
|
return { majorKey, majorSum };
|
|
113
117
|
}
|
|
114
118
|
|
|
115
|
-
export function checkChildOutlier(
|
|
119
|
+
export function checkChildOutlier(
|
|
120
|
+
data: IRow[],
|
|
121
|
+
childrenData: Map<any, IRow[]>,
|
|
122
|
+
dimensions: string[],
|
|
123
|
+
measures: string[]
|
|
124
|
+
): { outlierKey: string; outlierSum: number } {
|
|
116
125
|
// const { normalizedData, maxMeasures, minMeasures, totalMeasures } = normalize2PositiveRecords(data, measures);
|
|
117
126
|
const { normalizedData, maxMeasures, minMeasures, totalMeasures } = normalizeRecords(data, measures);
|
|
118
127
|
let outlierSum = -Infinity;
|
|
119
|
-
let outlierKey =
|
|
128
|
+
let outlierKey = "";
|
|
120
129
|
for (let [key, childData] of childrenData) {
|
|
121
130
|
// const { normalizedData: normalizedChildData } = normalize2PositiveRecords(childData, measures);
|
|
122
131
|
const { normalizedData: normalizedChildData } = normalizeRecords(childData, measures);
|
|
123
132
|
let sum = 0;
|
|
124
|
-
for (let record of normalizedData
|
|
125
|
-
let target = normalizedChildData.find(childRecord => {
|
|
126
|
-
return dimensions.every(dim => record[dim] === childRecord[dim])
|
|
127
|
-
})
|
|
133
|
+
for (let record of normalizedData) {
|
|
134
|
+
let target = normalizedChildData.find((childRecord) => {
|
|
135
|
+
return dimensions.every((dim) => record[dim] === childRecord[dim]);
|
|
136
|
+
});
|
|
128
137
|
if (target) {
|
|
129
|
-
measures.forEach(mea => {
|
|
130
|
-
let targetValue =
|
|
131
|
-
sum += Math.abs(record[mea] - targetValue)
|
|
132
|
-
})
|
|
138
|
+
measures.forEach((mea) => {
|
|
139
|
+
let targetValue = typeof target![mea] === "number" && !isNaN(target![mea]) ? target![mea] : 0;
|
|
140
|
+
sum += Math.abs(record[mea] - targetValue);
|
|
141
|
+
});
|
|
133
142
|
} else {
|
|
134
|
-
measures.forEach(mea => {
|
|
143
|
+
measures.forEach((mea) => {
|
|
135
144
|
sum += Math.abs(record[mea]);
|
|
136
|
-
})
|
|
145
|
+
});
|
|
137
146
|
}
|
|
138
147
|
}
|
|
139
148
|
if (sum > outlierSum) {
|
|
@@ -141,55 +150,55 @@ export function checkChildOutlier(data: IRow[], childrenData: Map<any, IRow[]>,
|
|
|
141
150
|
outlierKey = key;
|
|
142
151
|
}
|
|
143
152
|
}
|
|
144
|
-
outlierSum /=
|
|
153
|
+
outlierSum /= measures.length * 2;
|
|
145
154
|
return { outlierKey, outlierSum };
|
|
146
155
|
}
|
|
147
156
|
export interface IPredicate {
|
|
148
157
|
key: string;
|
|
149
|
-
type:
|
|
158
|
+
type: "discrete" | "continuous";
|
|
150
159
|
range: Set<any> | [number, number];
|
|
151
160
|
}
|
|
152
161
|
export function getPredicates(selection: IRow[], dimensions: string[], measures: string[]): IPredicate[] {
|
|
153
162
|
const predicates: IPredicate[] = [];
|
|
154
|
-
dimensions.forEach(dim => {
|
|
163
|
+
dimensions.forEach((dim) => {
|
|
155
164
|
predicates.push({
|
|
156
165
|
key: dim,
|
|
157
|
-
type:
|
|
158
|
-
range: new Set()
|
|
159
|
-
})
|
|
160
|
-
})
|
|
161
|
-
measures.forEach(mea => {
|
|
166
|
+
type: "discrete",
|
|
167
|
+
range: new Set(),
|
|
168
|
+
});
|
|
169
|
+
});
|
|
170
|
+
measures.forEach((mea) => {
|
|
162
171
|
predicates.push({
|
|
163
172
|
key: mea,
|
|
164
|
-
type:
|
|
165
|
-
range: [Infinity, -Infinity]
|
|
166
|
-
})
|
|
167
|
-
})
|
|
168
|
-
selection.forEach(record => {
|
|
173
|
+
type: "continuous",
|
|
174
|
+
range: [Infinity, -Infinity],
|
|
175
|
+
});
|
|
176
|
+
});
|
|
177
|
+
selection.forEach((record) => {
|
|
169
178
|
dimensions.forEach((dim, index) => {
|
|
170
|
-
(predicates[index].range as Set<any>).add(record[dim])
|
|
171
|
-
})
|
|
179
|
+
(predicates[index].range as Set<any>).add(record[dim]);
|
|
180
|
+
});
|
|
172
181
|
measures.forEach((mea, index) => {
|
|
173
182
|
(predicates[index].range as [number, number])[0] = Math.min(
|
|
174
|
-
|
|
175
|
-
|
|
183
|
+
(predicates[index].range as [number, number])[0],
|
|
184
|
+
record[mea]
|
|
176
185
|
);
|
|
177
186
|
(predicates[index].range as [number, number])[1] = Math.max(
|
|
178
|
-
|
|
179
|
-
|
|
187
|
+
(predicates[index].range as [number, number])[1],
|
|
188
|
+
record[mea]
|
|
180
189
|
);
|
|
181
|
-
})
|
|
182
|
-
})
|
|
190
|
+
});
|
|
191
|
+
});
|
|
183
192
|
return predicates;
|
|
184
193
|
}
|
|
185
194
|
|
|
186
195
|
export function getPredicatesFromVegaSignals(signals: Filters, dimensions: string[], measures: string[]): IPredicate[] {
|
|
187
196
|
const predicates: IPredicate[] = [];
|
|
188
|
-
dimensions.forEach(dim => {
|
|
197
|
+
dimensions.forEach((dim) => {
|
|
189
198
|
predicates.push({
|
|
190
|
-
type:
|
|
199
|
+
type: "discrete",
|
|
191
200
|
range: new Set(signals[dim]),
|
|
192
|
-
key: dim
|
|
201
|
+
key: dim,
|
|
193
202
|
});
|
|
194
203
|
});
|
|
195
204
|
return predicates;
|
|
@@ -197,16 +206,16 @@ export function getPredicatesFromVegaSignals(signals: Filters, dimensions: strin
|
|
|
197
206
|
|
|
198
207
|
export function filterByPredicates(data: IRow[], predicates: IPredicate[]): IRow[] {
|
|
199
208
|
const filterData = data.filter((record) => {
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
209
|
+
return predicates.every((pre) => {
|
|
210
|
+
if (pre.type === "continuous") {
|
|
211
|
+
return (
|
|
212
|
+
record[pre.key] >= (pre.range as [number, number])[0] &&
|
|
213
|
+
record[pre.key] <= (pre.range as [number, number])[1]
|
|
214
|
+
);
|
|
215
|
+
} else {
|
|
216
|
+
return (pre.range as Set<any>).has(record[pre.key]);
|
|
217
|
+
}
|
|
218
|
+
});
|
|
210
219
|
});
|
|
211
220
|
return filterData;
|
|
212
221
|
}
|
|
@@ -227,22 +236,25 @@ export function applyFilters(dataSource: IRow[], filters: Filters): IRow[] {
|
|
|
227
236
|
});
|
|
228
237
|
}
|
|
229
238
|
|
|
230
|
-
export function extendCountField
|
|
239
|
+
export function extendCountField(
|
|
240
|
+
dataSource: IRow[],
|
|
241
|
+
fields: IMutField[]
|
|
242
|
+
): {
|
|
231
243
|
dataSource: IRow[];
|
|
232
244
|
fields: IMutField[];
|
|
233
245
|
} {
|
|
234
|
-
const nextData = dataSource.map(r => ({
|
|
246
|
+
const nextData = dataSource.map((r) => ({
|
|
235
247
|
...r,
|
|
236
|
-
[COUNT_FIELD_ID]: 1
|
|
237
|
-
}))
|
|
248
|
+
[COUNT_FIELD_ID]: 1,
|
|
249
|
+
}));
|
|
238
250
|
const nextFields = fields.concat({
|
|
239
251
|
fid: COUNT_FIELD_ID,
|
|
240
|
-
name: i18next.t(
|
|
241
|
-
analyticType:
|
|
242
|
-
semanticType:
|
|
243
|
-
})
|
|
252
|
+
name: i18next.t("constant.row_count"),
|
|
253
|
+
analyticType: "measure",
|
|
254
|
+
semanticType: "quantitative",
|
|
255
|
+
});
|
|
244
256
|
return {
|
|
245
257
|
dataSource: nextData,
|
|
246
|
-
fields: nextFields
|
|
247
|
-
}
|
|
248
|
-
}
|
|
258
|
+
fields: nextFields,
|
|
259
|
+
};
|
|
260
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { IRow } from
|
|
1
|
+
import { IRow } from "../interfaces";
|
|
2
2
|
|
|
3
3
|
export function normalizeWithParent(
|
|
4
4
|
data: IRow[],
|
|
@@ -11,69 +11,72 @@ export function normalizeWithParent(
|
|
|
11
11
|
} {
|
|
12
12
|
const totalMeasuresOfParent: IRow = {};
|
|
13
13
|
const totalMeasures: IRow = {};
|
|
14
|
-
measures.forEach(mea => {
|
|
14
|
+
measures.forEach((mea) => {
|
|
15
15
|
totalMeasuresOfParent[mea] = 0;
|
|
16
16
|
totalMeasures[mea] = 0;
|
|
17
|
-
})
|
|
18
|
-
parentData.forEach(record => {
|
|
19
|
-
measures.forEach(mea => {
|
|
20
|
-
totalMeasuresOfParent[mea] += Math.abs(record[mea])
|
|
21
|
-
})
|
|
22
|
-
})
|
|
23
|
-
data.forEach(record => {
|
|
24
|
-
measures.forEach(mea => {
|
|
17
|
+
});
|
|
18
|
+
parentData.forEach((record) => {
|
|
19
|
+
measures.forEach((mea) => {
|
|
20
|
+
totalMeasuresOfParent[mea] += Math.abs(record[mea]);
|
|
21
|
+
});
|
|
22
|
+
});
|
|
23
|
+
data.forEach((record) => {
|
|
24
|
+
measures.forEach((mea) => {
|
|
25
25
|
totalMeasures[mea] += Math.abs(record[mea]);
|
|
26
|
-
})
|
|
27
|
-
})
|
|
26
|
+
});
|
|
27
|
+
});
|
|
28
28
|
const normalizedParentData: IRow[] = [];
|
|
29
|
-
parentData.forEach(record => {
|
|
29
|
+
parentData.forEach((record) => {
|
|
30
30
|
const newRecord = { ...record };
|
|
31
|
-
measures.forEach(mea => {
|
|
31
|
+
measures.forEach((mea) => {
|
|
32
32
|
newRecord[mea] /= totalMeasuresOfParent[mea];
|
|
33
|
-
})
|
|
33
|
+
});
|
|
34
34
|
normalizedParentData.push(newRecord);
|
|
35
|
-
})
|
|
35
|
+
});
|
|
36
36
|
const normalizedData: IRow[] = [];
|
|
37
|
-
data.forEach(record => {
|
|
37
|
+
data.forEach((record) => {
|
|
38
38
|
const newRecord = { ...record };
|
|
39
|
-
measures.forEach(mea => {
|
|
39
|
+
measures.forEach((mea) => {
|
|
40
40
|
if (syncScale) {
|
|
41
41
|
newRecord[mea] /= totalMeasuresOfParent[mea];
|
|
42
42
|
} else {
|
|
43
|
-
newRecord[mea] /= totalMeasures[mea]
|
|
43
|
+
newRecord[mea] /= totalMeasures[mea];
|
|
44
44
|
}
|
|
45
|
-
})
|
|
45
|
+
});
|
|
46
46
|
normalizedData.push(newRecord);
|
|
47
|
-
})
|
|
47
|
+
});
|
|
48
48
|
return {
|
|
49
49
|
normalizedData,
|
|
50
|
-
normalizedParentData
|
|
50
|
+
normalizedParentData,
|
|
51
51
|
};
|
|
52
52
|
}
|
|
53
53
|
|
|
54
|
-
export function compareDistribution
|
|
54
|
+
export function compareDistribution(
|
|
55
|
+
distribution1: IRow[],
|
|
56
|
+
distribution2: IRow[],
|
|
57
|
+
dimensions: string[],
|
|
58
|
+
measures: string[]
|
|
59
|
+
): number {
|
|
55
60
|
let score = 0;
|
|
56
61
|
let count = 0;
|
|
57
62
|
const tagsForD2: boolean[] = distribution2.map(() => false);
|
|
58
63
|
for (let record of distribution1) {
|
|
59
64
|
let targetRecordIndex = distribution2.findIndex((r, i) => {
|
|
60
|
-
return !tagsForD2[i] && dimensions.every(dim => r[dim] === record[dim])
|
|
61
|
-
})
|
|
65
|
+
return !tagsForD2[i] && dimensions.every((dim) => r[dim] === record[dim]);
|
|
66
|
+
});
|
|
62
67
|
if (targetRecordIndex > -1) {
|
|
63
68
|
tagsForD2[targetRecordIndex] = true;
|
|
64
69
|
const targetRecord = distribution2[targetRecordIndex];
|
|
65
70
|
for (let mea of measures) {
|
|
66
|
-
|
|
67
71
|
score = Math.max(
|
|
68
72
|
score,
|
|
69
|
-
Math.max(targetRecord[mea], record[mea]) /
|
|
70
|
-
Math.min(targetRecord[mea], record[mea])
|
|
73
|
+
Math.max(targetRecord[mea], record[mea]) / Math.min(targetRecord[mea], record[mea])
|
|
71
74
|
);
|
|
72
75
|
count++;
|
|
73
76
|
}
|
|
74
77
|
} else {
|
|
75
78
|
for (let mea of measures) {
|
|
76
|
-
score = Math.max(score, record[mea])
|
|
79
|
+
score = Math.max(score, record[mea]);
|
|
77
80
|
count++;
|
|
78
81
|
}
|
|
79
82
|
}
|
|
@@ -90,31 +93,36 @@ export function compareDistribution (distribution1: IRow[], distribution2: IRow[
|
|
|
90
93
|
return score;
|
|
91
94
|
}
|
|
92
95
|
|
|
93
|
-
export function normalizeByMeasures
|
|
96
|
+
export function normalizeByMeasures(dataSource: IRow[], measures: string[]) {
|
|
94
97
|
let sums: Map<string, number> = new Map();
|
|
95
98
|
|
|
96
|
-
measures.forEach(mea => {
|
|
99
|
+
measures.forEach((mea) => {
|
|
97
100
|
sums.set(mea, 0);
|
|
98
|
-
})
|
|
101
|
+
});
|
|
99
102
|
|
|
100
|
-
dataSource.forEach(record => {
|
|
101
|
-
measures.forEach(mea => {
|
|
103
|
+
dataSource.forEach((record) => {
|
|
104
|
+
measures.forEach((mea) => {
|
|
102
105
|
sums.set(mea, sums.get(mea)! + Math.abs(record[mea]));
|
|
103
|
-
})
|
|
104
|
-
})
|
|
106
|
+
});
|
|
107
|
+
});
|
|
105
108
|
|
|
106
109
|
const ans: IRow[] = [];
|
|
107
|
-
dataSource.forEach(record => {
|
|
110
|
+
dataSource.forEach((record) => {
|
|
108
111
|
const norRecord: IRow = { ...record };
|
|
109
|
-
measures.forEach(mea => {
|
|
112
|
+
measures.forEach((mea) => {
|
|
110
113
|
norRecord[mea] /= sums.get(mea)!;
|
|
111
|
-
})
|
|
114
|
+
});
|
|
112
115
|
ans.push(norRecord);
|
|
113
116
|
});
|
|
114
117
|
return ans;
|
|
115
118
|
}
|
|
116
119
|
|
|
117
|
-
export function getDistributionDifference(
|
|
120
|
+
export function getDistributionDifference(
|
|
121
|
+
dataSource: IRow[],
|
|
122
|
+
dimensions: string[],
|
|
123
|
+
measure1: string,
|
|
124
|
+
measure2: string
|
|
125
|
+
): number {
|
|
118
126
|
let score = 0;
|
|
119
127
|
for (let record of dataSource) {
|
|
120
128
|
if (record[measure1] === 0 || record[measure2] === 0) continue;
|
|
@@ -123,7 +131,7 @@ export function getDistributionDifference(dataSource: IRow[], dimensions: string
|
|
|
123
131
|
return score;
|
|
124
132
|
}
|
|
125
133
|
|
|
126
|
-
export function makeBinField
|
|
134
|
+
export function makeBinField(dataSource: IRow[], fid: string, binFid: string, binSize: number | undefined = 10) {
|
|
127
135
|
let _min = Infinity;
|
|
128
136
|
let _max = -Infinity;
|
|
129
137
|
for (let i = 0; i < dataSource.length; i++) {
|
|
@@ -132,21 +140,21 @@ export function makeBinField (dataSource: IRow[], fid: string, binFid: string, b
|
|
|
132
140
|
if (val < _min) _min = val;
|
|
133
141
|
}
|
|
134
142
|
const step = (_max - _min) / binSize;
|
|
135
|
-
return dataSource.map(r => {
|
|
143
|
+
return dataSource.map((r) => {
|
|
136
144
|
let bIndex = Math.floor((r[fid] - _min) / step);
|
|
137
145
|
if (bIndex === binSize) bIndex = binSize - 1;
|
|
138
146
|
return {
|
|
139
147
|
...r,
|
|
140
|
-
[binFid]: bIndex * step + _min
|
|
141
|
-
}
|
|
142
|
-
})
|
|
148
|
+
[binFid]: bIndex * step + _min,
|
|
149
|
+
};
|
|
150
|
+
});
|
|
143
151
|
}
|
|
144
152
|
|
|
145
|
-
export function makeLogField
|
|
146
|
-
return dataSource.map(r => {
|
|
153
|
+
export function makeLogField(dataSource: IRow[], fid: string, logFid: string) {
|
|
154
|
+
return dataSource.map((r) => {
|
|
147
155
|
return {
|
|
148
156
|
...r,
|
|
149
|
-
[logFid]:
|
|
150
|
-
}
|
|
151
|
-
})
|
|
152
|
-
}
|
|
157
|
+
[logFid]: typeof r[fid] === "number" && r[fid] > 0 ? Math.log10(r[fid]) : null,
|
|
158
|
+
};
|
|
159
|
+
});
|
|
160
|
+
}
|