@dra2020/dra-types 1.8.133 → 1.8.136
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/alldt.d.ts +1 -0
- package/dist/cities.d.ts +22 -0
- package/dist/dra-types.js +97 -182
- package/dist/dra-types.js.map +1 -1
- package/dist/packedfields.d.ts +22 -24
- package/lib/alldt.ts +1 -0
- package/lib/cities.ts +22 -0
- package/lib/colormgr.ts +10 -34
- package/lib/packedfields.ts +84 -166
- package/lib/splittogeofeature.ts +0 -4
- package/package.json +2 -2
package/dist/packedfields.d.ts
CHANGED
|
@@ -25,10 +25,6 @@ export interface StatesMetaIndex {
|
|
|
25
25
|
export interface StatesMeta {
|
|
26
26
|
[key: string]: StatesMetaIndex;
|
|
27
27
|
}
|
|
28
|
-
export type PackedFieldsArray = Float64Array;
|
|
29
|
-
export type PackedFields = {
|
|
30
|
-
[datasetid: string]: PackedFieldsArray;
|
|
31
|
-
};
|
|
32
28
|
export interface PackedFieldsIndex {
|
|
33
29
|
[field: string]: number;
|
|
34
30
|
}
|
|
@@ -37,10 +33,9 @@ export interface PackedMetaIndex {
|
|
|
37
33
|
fields: {
|
|
38
34
|
[dataset: string]: PackedFieldsIndex;
|
|
39
35
|
};
|
|
40
|
-
getDatasetField: (f: any, dataset: string, field: string) => number;
|
|
41
36
|
}
|
|
42
37
|
export type GroupPackedMetaIndex = {
|
|
43
|
-
[
|
|
38
|
+
[did: string]: PackedMetaIndex;
|
|
44
39
|
};
|
|
45
40
|
export interface PrimaryDatasetKeys {
|
|
46
41
|
SHAPES?: string;
|
|
@@ -48,8 +43,15 @@ export interface PrimaryDatasetKeys {
|
|
|
48
43
|
VAP: string;
|
|
49
44
|
ELECTION: string;
|
|
50
45
|
}
|
|
46
|
+
export type PackedFieldsArray = Float64Array;
|
|
47
|
+
export interface PackedFields {
|
|
48
|
+
dsGroup: GroupPackedMetaIndex;
|
|
49
|
+
data: {
|
|
50
|
+
[did: string]: PackedFieldsArray;
|
|
51
|
+
};
|
|
52
|
+
}
|
|
51
53
|
export interface DatasetContext {
|
|
52
|
-
|
|
54
|
+
dsGroup: GroupPackedMetaIndex;
|
|
53
55
|
dsMeta: DatasetsMeta;
|
|
54
56
|
primeDDS: string;
|
|
55
57
|
primeVDS: string;
|
|
@@ -69,38 +71,34 @@ export type DSLists = {
|
|
|
69
71
|
export type PlanTypePlus = PlanType | '';
|
|
70
72
|
export declare function fGet(f: any, p: string): any;
|
|
71
73
|
export declare function sortedFieldList(ds: DatasetMeta): string[];
|
|
72
|
-
export declare function getDatasetField(f: any,
|
|
73
|
-
export declare function computeMetaIndex(
|
|
74
|
-
export declare function
|
|
75
|
-
export declare function
|
|
76
|
-
export declare function computeOnePackedFields(f: any, index: PackedMetaIndex, did: string, datasetKey: string): PackedFields;
|
|
74
|
+
export declare function getDatasetField(f: any, did: string, field: string): number;
|
|
75
|
+
export declare function computeMetaIndex(did: string, meta: DatasetsMeta): PackedMetaIndex;
|
|
76
|
+
export declare function computeOnePackedFields(f: any, dsGroup: GroupPackedMetaIndex, index: PackedMetaIndex, did: string, datasetKey: string): PackedFields;
|
|
77
|
+
export declare function initPackedFields(f: any, dsGroup: GroupPackedMetaIndex): void;
|
|
77
78
|
export declare function clearPackedFields(f: any): void;
|
|
78
79
|
export declare function hasPackedFields(f: any): boolean;
|
|
79
|
-
export declare function setPackedFields(f: any, pf: PackedFields
|
|
80
|
+
export declare function setPackedFields(f: any, pf: PackedFields): void;
|
|
80
81
|
export declare function isExtDataset(did: string): boolean;
|
|
81
|
-
export declare function toDatasetID(datasetKey: string): string;
|
|
82
82
|
export type ExtPackedFields = Uint32Array;
|
|
83
83
|
export type ExtBlockCardinality = Map<string, number>;
|
|
84
|
-
export declare function
|
|
85
|
-
export declare function
|
|
84
|
+
export declare function pushExtPackedFields(blocks: string[], pf: PackedFields, did: string, index: PackedMetaIndex, data: ExtPackedFields, card: ExtBlockCardinality): void;
|
|
85
|
+
export declare function featurePushExtPackedFields(f: any, did: string, index: PackedMetaIndex, data: ExtPackedFields, card: ExtBlockCardinality): void;
|
|
86
|
+
export declare function featurePushedExtPackedFields(f: any, did: string, card: ExtBlockCardinality): boolean;
|
|
86
87
|
export declare function pushedExtPackedFields(pf: PackedFields, datasetids: string[]): boolean;
|
|
87
88
|
export declare function retrievePackedFields(f: any): PackedFields;
|
|
88
|
-
export declare function
|
|
89
|
-
export declare function zeroPackedFields(index: GroupPackedMetaIndex): PackedFields;
|
|
89
|
+
export declare function zeroPackedFields(dsGroup: GroupPackedMetaIndex): PackedFields;
|
|
90
90
|
export declare function zeroPackedCopy(pf: PackedFields): PackedFields;
|
|
91
91
|
export declare function packedCopy(pf: PackedFields): PackedFields;
|
|
92
92
|
export declare function aggregatePackedFields(agg: PackedFields, pf: PackedFields): PackedFields;
|
|
93
93
|
export declare function aggregateCount(agg: PackedFields): number;
|
|
94
94
|
export declare function decrementPackedFields(agg: PackedFields, pf: PackedFields): PackedFields;
|
|
95
95
|
export declare function diffPackedFields(main: any, parts: any[]): PackedFields;
|
|
96
|
-
export declare function getPackedField(
|
|
97
|
-
export declare function findPackedField(
|
|
96
|
+
export declare function getPackedField(pf: PackedFields, did: string, field: string): number;
|
|
97
|
+
export declare function findPackedField(pf: PackedFields, did: string, field: string): number;
|
|
98
98
|
export type FieldGetter = (f: string) => number;
|
|
99
99
|
export declare function fieldGetterNotLoaded(f: string): number;
|
|
100
|
-
export declare function ToGetter(agg: PackedFields, dc: DatasetContext,
|
|
101
|
-
export declare function
|
|
102
|
-
export declare function ToGetterPvi20(agg: PackedFields, dc: DatasetContext): FieldGetter;
|
|
103
|
-
export declare function calcShift(agg: PackedFields, dc: DatasetContext, datasetOld: string, datasetNew: string): number;
|
|
100
|
+
export declare function ToGetter(agg: PackedFields, dc: DatasetContext, did: string): FieldGetter;
|
|
101
|
+
export declare function calcShift(agg: PackedFields, dc: DatasetContext, didOld: string, didNew: string): number;
|
|
104
102
|
export declare function calcRawPvi(getter: FieldGetter): number;
|
|
105
103
|
export declare function pviStr(getter: FieldGetter): string;
|
|
106
104
|
export declare function calcRaw2020Pvi(getter16: FieldGetter, getter20: FieldGetter): number;
|
package/lib/alldt.ts
CHANGED
package/lib/cities.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export interface Cities
|
|
2
|
+
{
|
|
3
|
+
id?: string,
|
|
4
|
+
state?: string,
|
|
5
|
+
datasource?: string,
|
|
6
|
+
geoids?: string[],
|
|
7
|
+
name?: string,
|
|
8
|
+
description?: string,
|
|
9
|
+
labels?: string[],
|
|
10
|
+
createdBy?: string,
|
|
11
|
+
createTime?: string,
|
|
12
|
+
modifyTime?: string,
|
|
13
|
+
publishTime?: string,
|
|
14
|
+
deleted?: boolean,
|
|
15
|
+
published?: string,
|
|
16
|
+
official?: boolean,
|
|
17
|
+
supersets?: string[],
|
|
18
|
+
subsets?: string[],
|
|
19
|
+
conflicts?: string[],
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export type CitiesIndex = { [id: string]: Cities };
|
package/lib/colormgr.ts
CHANGED
|
@@ -172,10 +172,9 @@ export function ethnicBackgroundColor(index: number, pd: PaletteDefaults): strin
|
|
|
172
172
|
export function ToAllEthnicColor(agg: PF.PackedFields, dc: PF.DatasetContext, pd: PaletteDefaults): number
|
|
173
173
|
{
|
|
174
174
|
// Use VAP/CVAP if it exists
|
|
175
|
-
const
|
|
176
|
-
const
|
|
177
|
-
|
|
178
|
-
return AggregateEthnicColor(PF.ToGetter(agg, dc, did, dataset), pd, builtin.endsWith('NH'));
|
|
175
|
+
const did = dc.primeVDS ? dc.primeVDS : dc.primeDDS
|
|
176
|
+
const builtin = dc.dsMeta[did]?.builtin || did;
|
|
177
|
+
return AggregateEthnicColor(PF.ToGetter(agg, dc, did), pd, builtin.endsWith('NH'));
|
|
179
178
|
}
|
|
180
179
|
|
|
181
180
|
export function ToPartisanColorStr(agg: PF.PackedFields, dc: PF.DatasetContext, pd: PaletteDefaults): string
|
|
@@ -190,30 +189,8 @@ export function ToPartisanDistrictColor(agg: PF.PackedFields, dc: PF.DatasetCont
|
|
|
190
189
|
|
|
191
190
|
function ToPartisanColor(agg: PF.PackedFields, dc: PF.DatasetContext, stops: Util.GradientStops): string
|
|
192
191
|
{
|
|
193
|
-
const
|
|
194
|
-
|
|
195
|
-
if (dc.primeEDS === PF.DS_PVI2020)
|
|
196
|
-
{
|
|
197
|
-
const getter16 = PF.ToGetter(agg, dc, did, PF.DS_PRES2016);
|
|
198
|
-
const getter20 = PF.ToGetter(agg, dc, did, PF.DS_PRES2020);
|
|
199
|
-
|
|
200
|
-
const pviRaw = PF.calcRaw2020Pvi(getter16, getter20);
|
|
201
|
-
const color: string = ColorFromRGBPcts((1 - pviRaw / 100), 0, pviRaw / 100, stops);
|
|
202
|
-
//console.log('Pvi (r, d, color): (' + (1 - pviRaw/100) + ', ' + pviRaw/100 + ', ' + color + ')');
|
|
203
|
-
return color;
|
|
204
|
-
}
|
|
205
|
-
else if (dc.primeEDS === PF.DS_PVI2016)
|
|
206
|
-
{
|
|
207
|
-
const getter = PF.ToGetter(agg, dc, did, dc.primeEDS);
|
|
208
|
-
const pviRaw = PF.calcRawPvi(getter);
|
|
209
|
-
const color: string = ColorFromRGBPcts((1 - pviRaw/100), 0, pviRaw/100, stops);
|
|
210
|
-
return color;
|
|
211
|
-
}
|
|
212
|
-
else
|
|
213
|
-
{
|
|
214
|
-
const getter = PF.ToGetter(agg, dc, did, dc.primeEDS);
|
|
215
|
-
return AggregatePartisanColorStr(getter, stops);
|
|
216
|
-
}
|
|
192
|
+
const getter = PF.ToGetter(agg, dc, dc.primeEDS);
|
|
193
|
+
return AggregatePartisanColorStr(getter, stops);
|
|
217
194
|
}
|
|
218
195
|
|
|
219
196
|
export function ToPartisanShiftColor(agg: PF.PackedFields, dc: PF.DatasetContext, datasets: string[], pd: PaletteDefaults, isDistrict?: boolean): string
|
|
@@ -240,9 +217,8 @@ export function ToEthnicColorStr(agg: PF.PackedFields, dc: PF.DatasetContext, pd
|
|
|
240
217
|
let ethnic: string = 'Wh';
|
|
241
218
|
let total: string = 'Tot';
|
|
242
219
|
let bInvert: boolean = false;
|
|
243
|
-
const
|
|
244
|
-
const
|
|
245
|
-
const builtin = dc.dsMeta[did]?.builtin || dataset;
|
|
220
|
+
const did = dc.primeVDS ? dc.primeVDS : dc.primeDDS;
|
|
221
|
+
const builtin = dc.dsMeta[did]?.builtin || did;
|
|
246
222
|
switch (detail)
|
|
247
223
|
{
|
|
248
224
|
case null: case '': case 'all':
|
|
@@ -261,7 +237,7 @@ export function ToEthnicColorStr(agg: PF.PackedFields, dc: PF.DatasetContext, pd
|
|
|
261
237
|
default: break;
|
|
262
238
|
}
|
|
263
239
|
|
|
264
|
-
const getter = PF.ToGetter(agg, dc, did
|
|
240
|
+
const getter = PF.ToGetter(agg, dc, did);
|
|
265
241
|
let den = getter(total);
|
|
266
242
|
let num = getter(ethnic);
|
|
267
243
|
if (den === undefined || isNaN(den) || num === undefined || isNaN(num))
|
|
@@ -570,7 +546,7 @@ export function ToExtendedColor(agg: PF.PackedFields, dc: PF.DatasetContext, col
|
|
|
570
546
|
colors = safeColors('');
|
|
571
547
|
}
|
|
572
548
|
let o: any = {};
|
|
573
|
-
let getter = PF.ToGetter(agg, dc, datasetid
|
|
549
|
+
let getter = PF.ToGetter(agg, dc, datasetid);
|
|
574
550
|
Object.keys(meta.fields).forEach(f => o[f] = getter(f));
|
|
575
551
|
let formatter = new Detail.FormatDetail(dscolor.expr);
|
|
576
552
|
let result = formatter.format(Detail.FormatDetail.prepare(o));
|
|
@@ -579,7 +555,7 @@ export function ToExtendedColor(agg: PF.PackedFields, dc: PF.DatasetContext, col
|
|
|
579
555
|
}
|
|
580
556
|
else
|
|
581
557
|
{
|
|
582
|
-
let getter = PF.ToGetter(agg, dc, datasetid
|
|
558
|
+
let getter = PF.ToGetter(agg, dc, datasetid);
|
|
583
559
|
let fields = PF.sortedFieldList(meta);
|
|
584
560
|
let den = 0;
|
|
585
561
|
if (meta.fields['Tot'])
|
package/lib/packedfields.ts
CHANGED
|
@@ -66,8 +66,6 @@ export interface StatesMeta
|
|
|
66
66
|
[key: string]: StatesMetaIndex; // key is one of the datasource strings
|
|
67
67
|
}
|
|
68
68
|
|
|
69
|
-
export type PackedFieldsArray = Float64Array;
|
|
70
|
-
export type PackedFields = { [datasetid: string]: PackedFieldsArray };
|
|
71
69
|
export interface PackedFieldsIndex
|
|
72
70
|
{
|
|
73
71
|
[field: string]: number; // offset into PackedFields
|
|
@@ -77,10 +75,9 @@ export interface PackedMetaIndex
|
|
|
77
75
|
{
|
|
78
76
|
length: number;
|
|
79
77
|
fields: { [dataset: string]: PackedFieldsIndex };
|
|
80
|
-
getDatasetField: (f: any, dataset: string, field: string) => number;
|
|
81
78
|
}
|
|
82
79
|
|
|
83
|
-
export type GroupPackedMetaIndex = { [
|
|
80
|
+
export type GroupPackedMetaIndex = { [did: string]: PackedMetaIndex };
|
|
84
81
|
|
|
85
82
|
export interface PrimaryDatasetKeys
|
|
86
83
|
{
|
|
@@ -90,11 +87,17 @@ export interface PrimaryDatasetKeys
|
|
|
90
87
|
ELECTION: string,
|
|
91
88
|
}
|
|
92
89
|
|
|
90
|
+
export type PackedFieldsArray = Float64Array;
|
|
91
|
+
export interface PackedFields {
|
|
92
|
+
dsGroup: GroupPackedMetaIndex,
|
|
93
|
+
data: { [did: string]: PackedFieldsArray }
|
|
94
|
+
}
|
|
95
|
+
|
|
93
96
|
// This integregates the information associated with a specific state and datasource as
|
|
94
97
|
// well as user selections around which datasets to view. Used to propagate through UI.
|
|
95
98
|
export interface DatasetContext
|
|
96
99
|
{
|
|
97
|
-
|
|
100
|
+
dsGroup: GroupPackedMetaIndex;
|
|
98
101
|
dsMeta: DatasetsMeta;
|
|
99
102
|
primeDDS: string; // Demographic (Census)
|
|
100
103
|
primeVDS: string; // VAP/CVAP
|
|
@@ -154,19 +157,17 @@ export function sortedFieldList(ds: DatasetMeta): string[]
|
|
|
154
157
|
return kv.map(kv => kv.k);
|
|
155
158
|
}
|
|
156
159
|
|
|
157
|
-
export function getDatasetField(f: any,
|
|
160
|
+
export function getDatasetField(f: any, did: string, field: string): number
|
|
158
161
|
{
|
|
159
162
|
let pf = retrievePackedFields(f);
|
|
160
|
-
|
|
161
|
-
let did = toDatasetID(dataset);
|
|
162
|
-
return getPackedField(dxGroup, pf, did, dataset, field);
|
|
163
|
+
return getPackedField(pf, did, field);
|
|
163
164
|
}
|
|
164
165
|
|
|
165
|
-
export function computeMetaIndex(
|
|
166
|
+
export function computeMetaIndex(did: string, meta: DatasetsMeta): PackedMetaIndex
|
|
166
167
|
{
|
|
167
168
|
if (meta == null) return null;
|
|
168
169
|
let offset = 1; // first entry is count of aggregates
|
|
169
|
-
let index: PackedMetaIndex = { length: 0, fields: {}
|
|
170
|
+
let index: PackedMetaIndex = { length: 0, fields: {} };
|
|
170
171
|
Object.keys(meta).forEach((datasetKey: string) => {
|
|
171
172
|
let dataset = meta[datasetKey];
|
|
172
173
|
let fieldsIndex: PackedFieldsIndex = {};
|
|
@@ -176,54 +177,10 @@ export function computeMetaIndex(datasetid: string, meta: DatasetsMeta): PackedM
|
|
|
176
177
|
index.fields[datasetKey] = fieldsIndex;
|
|
177
178
|
});
|
|
178
179
|
index.length = offset;
|
|
179
|
-
index.getDatasetField = getDatasetField;
|
|
180
180
|
return index;
|
|
181
181
|
}
|
|
182
182
|
|
|
183
|
-
|
|
184
|
-
function allocPackedFieldsArray(length: number): PackedFieldsArray
|
|
185
|
-
{
|
|
186
|
-
let ab = new ArrayBuffer(8 * length);
|
|
187
|
-
let af = new Float64Array(ab);
|
|
188
|
-
nAlloc++;
|
|
189
|
-
//if ((nAlloc % 10000) == 0) console.log(`allocPackedFieldsArray: ${nAlloc} allocs`);
|
|
190
|
-
return af;
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
export function initPackedFields(f: any): void
|
|
194
|
-
{
|
|
195
|
-
if (f.properties.packedFields !== undefined) throw 'Packed fields already set';
|
|
196
|
-
|
|
197
|
-
f.properties.packedIndex = {};
|
|
198
|
-
f.properties.packedFields = {};
|
|
199
|
-
f.properties.getDatasetField = getDatasetField;
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
export function computePackedFields(f: any, index: PackedMetaIndex): PackedFields
|
|
203
|
-
{
|
|
204
|
-
if (f.properties.packedFields) return f.properties.packedFields as PackedFields;
|
|
205
|
-
|
|
206
|
-
let af = allocPackedFieldsArray(index.length);
|
|
207
|
-
af[0] = 0; // count of number of aggregates
|
|
208
|
-
Object.keys(index.fields).forEach((dataset: string) => {
|
|
209
|
-
let fields = index.fields[dataset];
|
|
210
|
-
Object.keys(fields).forEach((field: string) => {
|
|
211
|
-
let n = fGetW(f, dataset, field);
|
|
212
|
-
if (isNaN(n))
|
|
213
|
-
n = 0;
|
|
214
|
-
af[fields[field]] = n;
|
|
215
|
-
});
|
|
216
|
-
});
|
|
217
|
-
f.properties.packedIndex = { ['']: index };
|
|
218
|
-
f.properties.packedFields = { ['']: af }; // cache here
|
|
219
|
-
f.properties.getDatasetField = index.getDatasetField;
|
|
220
|
-
|
|
221
|
-
// Major memory savings to delete this after packing
|
|
222
|
-
delete f.properties.datasets;
|
|
223
|
-
return f.properties.packedFields;
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
export function computeOnePackedFields(f: any, index: PackedMetaIndex, did: string, datasetKey: string): PackedFields
|
|
183
|
+
export function computeOnePackedFields(f: any, dsGroup: GroupPackedMetaIndex, index: PackedMetaIndex, did: string, datasetKey: string): PackedFields
|
|
227
184
|
{
|
|
228
185
|
let af = allocPackedFieldsArray(index.length);
|
|
229
186
|
af[0] = 0; // count of number of aggregates
|
|
@@ -235,20 +192,33 @@ export function computeOnePackedFields(f: any, index: PackedMetaIndex, did: stri
|
|
|
235
192
|
af[fields[field]] = n;
|
|
236
193
|
});
|
|
237
194
|
|
|
238
|
-
if (! f.properties.
|
|
239
|
-
initPackedFields(f);
|
|
240
|
-
f.properties.
|
|
241
|
-
f.properties.packedFields[did] = af;
|
|
242
|
-
f.properties.getDatasetField = index.getDatasetField;
|
|
195
|
+
if (! f.properties.packedFields)
|
|
196
|
+
initPackedFields(f, dsGroup);
|
|
197
|
+
f.properties.packedFields.data[did] = af;
|
|
243
198
|
|
|
244
199
|
return f.properties.packedFields;
|
|
245
200
|
}
|
|
246
201
|
|
|
202
|
+
let nAlloc = 0;
|
|
203
|
+
function allocPackedFieldsArray(length: number): PackedFieldsArray
|
|
204
|
+
{
|
|
205
|
+
let ab = new ArrayBuffer(8 * length);
|
|
206
|
+
let af = new Float64Array(ab);
|
|
207
|
+
nAlloc++;
|
|
208
|
+
//if ((nAlloc % 10000) == 0) console.log(`allocPackedFieldsArray: ${nAlloc} allocs`);
|
|
209
|
+
return af;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
export function initPackedFields(f: any, dsGroup: GroupPackedMetaIndex): void
|
|
213
|
+
{
|
|
214
|
+
if (f.properties.packedFields !== undefined) throw 'Packed fields already set';
|
|
215
|
+
|
|
216
|
+
f.properties.packedFields = { dsGroup, data: {} };
|
|
217
|
+
}
|
|
218
|
+
|
|
247
219
|
export function clearPackedFields(f: any): void
|
|
248
220
|
{
|
|
249
|
-
delete f.properties.packedIndex;
|
|
250
221
|
delete f.properties.packedFields;
|
|
251
|
-
delete f.properties.getDatasetField;
|
|
252
222
|
}
|
|
253
223
|
|
|
254
224
|
export function hasPackedFields(f: any): boolean
|
|
@@ -256,12 +226,10 @@ export function hasPackedFields(f: any): boolean
|
|
|
256
226
|
return f.properties.packedFields !== undefined;
|
|
257
227
|
}
|
|
258
228
|
|
|
259
|
-
export function setPackedFields(f: any, pf: PackedFields
|
|
229
|
+
export function setPackedFields(f: any, pf: PackedFields): void
|
|
260
230
|
{
|
|
261
231
|
if (f.properties.packedFields !== undefined) throw 'Packed fields already set';
|
|
262
|
-
f.properties.packedIndex = fIndex.properties.packedIndex;
|
|
263
232
|
f.properties.packedFields = pf;
|
|
264
|
-
f.properties.getDatasetField = fIndex.properties.getDatasetField
|
|
265
233
|
}
|
|
266
234
|
|
|
267
235
|
const reExtDataset = /^.*\.ds$/;
|
|
@@ -270,24 +238,18 @@ export function isExtDataset(did: string): boolean
|
|
|
270
238
|
return did && reExtDataset.test(did);
|
|
271
239
|
}
|
|
272
240
|
|
|
273
|
-
export function toDatasetID(datasetKey: string): string
|
|
274
|
-
{
|
|
275
|
-
return isExtDataset(datasetKey) ? datasetKey : '';
|
|
276
|
-
}
|
|
277
|
-
|
|
278
241
|
export type ExtPackedFields = Uint32Array; // [nblocks][nfields][fields]...
|
|
279
242
|
export type ExtBlockCardinality = Map<string, number>;
|
|
280
243
|
|
|
281
|
-
export function
|
|
244
|
+
export function pushExtPackedFields(blocks: string[], pf: PackedFields, did: string, index: PackedMetaIndex, data: ExtPackedFields, card: ExtBlockCardinality): void
|
|
282
245
|
{
|
|
283
|
-
let blocks = f?.properties?.blocks || (card.has(f.properties.id) ? [ f.properties.id ] : null);
|
|
284
246
|
if (!blocks)
|
|
285
247
|
return;
|
|
286
|
-
if (!
|
|
287
|
-
throw('pushExtPackedFields:
|
|
248
|
+
if (! pf)
|
|
249
|
+
throw('pushExtPackedFields: packed fields should be initialized before push');
|
|
288
250
|
if (card.size != data[0])
|
|
289
251
|
throw('pushExtPackedFields: packed fields and block cardinality do not match');
|
|
290
|
-
if (
|
|
252
|
+
if (pf.data[did])
|
|
291
253
|
return; // already pushed
|
|
292
254
|
let nfields = data[1];
|
|
293
255
|
let pfa = allocPackedFieldsArray(nfields+1); // field count
|
|
@@ -300,26 +262,31 @@ export function featurePushExtPackedFields(f: any, datasetid: string, index: Pac
|
|
|
300
262
|
for (let i = 1; i <= nfields; i++)
|
|
301
263
|
pfa[i] += (data[x++] << 0); // left shift by 0 to force unsigned to be interpreted as signed (used by prisoner-adjusted)
|
|
302
264
|
});
|
|
303
|
-
|
|
304
|
-
|
|
265
|
+
pf.data[did] = pfa;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
export function featurePushExtPackedFields(f: any, did: string, index: PackedMetaIndex, data: ExtPackedFields, card: ExtBlockCardinality): void
|
|
269
|
+
{
|
|
270
|
+
let blocks = f?.properties?.blocks || (card.has(f.properties.id) ? [ f.properties.id ] : null);
|
|
271
|
+
pushExtPackedFields(blocks, f.properties.packedFields, did, index, data, card);
|
|
305
272
|
}
|
|
306
273
|
|
|
307
|
-
export function featurePushedExtPackedFields(f: any,
|
|
274
|
+
export function featurePushedExtPackedFields(f: any, did: string, card: ExtBlockCardinality): boolean
|
|
308
275
|
{
|
|
309
276
|
if (! f) return true;
|
|
310
|
-
if (f.features) return featurePushedExtPackedFields(f.features[0],
|
|
277
|
+
if (f.features) return featurePushedExtPackedFields(f.features[0], did, card);
|
|
311
278
|
if (!f?.properties?.blocks && !card.has(f.properties.id))
|
|
312
279
|
return true;
|
|
313
280
|
if (!f.properties.packedFields)
|
|
314
281
|
return true;
|
|
315
|
-
return !!f.properties.packedFields[
|
|
282
|
+
return !!f.properties.packedFields.data[did];
|
|
316
283
|
}
|
|
317
284
|
|
|
318
285
|
export function pushedExtPackedFields(pf: PackedFields, datasetids: string[]): boolean
|
|
319
286
|
{
|
|
320
287
|
if (pf && datasetids)
|
|
321
288
|
for (let i = 0; i < datasetids.length; i++)
|
|
322
|
-
if (! pf[datasetids[i]])
|
|
289
|
+
if (! pf.data[datasetids[i]])
|
|
323
290
|
return false;
|
|
324
291
|
return !!pf;
|
|
325
292
|
}
|
|
@@ -330,28 +297,22 @@ export function retrievePackedFields(f: any): PackedFields
|
|
|
330
297
|
return f.properties.packedFields as PackedFields;
|
|
331
298
|
}
|
|
332
299
|
|
|
333
|
-
|
|
334
|
-
{
|
|
335
|
-
if (f.properties.packedIndex === undefined) throw 'Feature should have pre-computed packed index';
|
|
336
|
-
return f.properties.packedIndex as GroupPackedMetaIndex;
|
|
337
|
-
}
|
|
338
|
-
|
|
339
|
-
// The first entry in the PackedFields aggregate is the count of items aggregated.
|
|
300
|
+
// The first entry in the PackedFieldsArray aggregate is the count of items aggregated.
|
|
340
301
|
// Treat a null instance as just a single entry with no aggregates.
|
|
341
302
|
let abZero = new ArrayBuffer(8);
|
|
342
303
|
let afZero = new Float64Array(abZero);
|
|
343
304
|
afZero[0] = 0;
|
|
344
|
-
let pfZero = { ['']: afZero };
|
|
305
|
+
let pfZero = { dsGroup: {}, data: { ['']: afZero } };
|
|
345
306
|
|
|
346
|
-
export function zeroPackedFields(
|
|
307
|
+
export function zeroPackedFields(dsGroup: GroupPackedMetaIndex): PackedFields
|
|
347
308
|
{
|
|
348
|
-
if (
|
|
349
|
-
let pf: PackedFields = {};
|
|
350
|
-
Object.keys(
|
|
351
|
-
let af = allocPackedFieldsArray(
|
|
309
|
+
if (dsGroup == null) return pfZero;
|
|
310
|
+
let pf: PackedFields = { dsGroup, data: {} };
|
|
311
|
+
Object.keys(dsGroup).forEach(did => {
|
|
312
|
+
let af = allocPackedFieldsArray(dsGroup[did].length);
|
|
352
313
|
for (let i = 0; i < af.length; i++)
|
|
353
314
|
af[i] = 0;
|
|
354
|
-
pf[
|
|
315
|
+
pf.data[did] = af;
|
|
355
316
|
});
|
|
356
317
|
return pf;
|
|
357
318
|
}
|
|
@@ -359,12 +320,12 @@ export function zeroPackedFields(index: GroupPackedMetaIndex): PackedFields
|
|
|
359
320
|
export function zeroPackedCopy(pf: PackedFields): PackedFields
|
|
360
321
|
{
|
|
361
322
|
if (pf == null) return pfZero;
|
|
362
|
-
let copy: PackedFields = {};
|
|
363
|
-
Object.keys(pf).forEach(
|
|
364
|
-
let cf = allocPackedFieldsArray(pf[
|
|
323
|
+
let copy: PackedFields = { dsGroup: pf.dsGroup, data: {} };
|
|
324
|
+
Object.keys(pf.data).forEach(did => {
|
|
325
|
+
let cf = allocPackedFieldsArray(pf.data[did].length);
|
|
365
326
|
for (let i = 0; i < cf.length; i++)
|
|
366
327
|
cf[i] = 0;
|
|
367
|
-
copy[
|
|
328
|
+
copy.data[did] = cf;
|
|
368
329
|
});
|
|
369
330
|
return copy;
|
|
370
331
|
}
|
|
@@ -372,13 +333,13 @@ export function zeroPackedCopy(pf: PackedFields): PackedFields
|
|
|
372
333
|
export function packedCopy(pf: PackedFields): PackedFields
|
|
373
334
|
{
|
|
374
335
|
if (pf == null) return null;
|
|
375
|
-
let copy: PackedFields = {};
|
|
376
|
-
Object.keys(pf).forEach(
|
|
377
|
-
let af = pf[
|
|
336
|
+
let copy: PackedFields = { dsGroup: pf.dsGroup, data: {} };
|
|
337
|
+
Object.keys(pf.data).forEach(did => {
|
|
338
|
+
let af = pf.data[did];
|
|
378
339
|
let cf = allocPackedFieldsArray(af.length);
|
|
379
340
|
for (let i = 0; i < af.length; i++)
|
|
380
341
|
cf[i] = af[i];
|
|
381
|
-
copy[
|
|
342
|
+
copy.data[did] = cf;
|
|
382
343
|
});
|
|
383
344
|
return copy;
|
|
384
345
|
}
|
|
@@ -386,9 +347,9 @@ export function packedCopy(pf: PackedFields): PackedFields
|
|
|
386
347
|
export function aggregatePackedFields(agg: PackedFields, pf: PackedFields): PackedFields
|
|
387
348
|
{
|
|
388
349
|
if (agg == null || pf == null) return agg;
|
|
389
|
-
Object.keys(pf).forEach(
|
|
390
|
-
let af = agg[
|
|
391
|
-
let sf = pf[
|
|
350
|
+
Object.keys(pf.data).forEach(did => {
|
|
351
|
+
let af = agg.data[did];
|
|
352
|
+
let sf = pf.data[did];
|
|
392
353
|
if (sf && (!af || sf.length == af.length))
|
|
393
354
|
{
|
|
394
355
|
if (! af)
|
|
@@ -397,7 +358,7 @@ export function aggregatePackedFields(agg: PackedFields, pf: PackedFields): Pack
|
|
|
397
358
|
af[0] = 0;
|
|
398
359
|
for (let i = 1; i < sf.length; i++)
|
|
399
360
|
af[i] = sf[i];
|
|
400
|
-
agg[
|
|
361
|
+
agg.data[did] = af;
|
|
401
362
|
}
|
|
402
363
|
else
|
|
403
364
|
{
|
|
@@ -416,16 +377,16 @@ export function aggregateCount(agg: PackedFields): number
|
|
|
416
377
|
// If we have multiple packedfieldarrays, all of them track the aggregate in zero spot.
|
|
417
378
|
// So we just pick the one that happens to be come up first.
|
|
418
379
|
if (!agg) return 0;
|
|
419
|
-
let pfa = Util.nthProperty(agg) as PackedFieldsArray;
|
|
380
|
+
let pfa = Util.nthProperty(agg.data) as PackedFieldsArray;
|
|
420
381
|
return pfa ? pfa[0] : 0;
|
|
421
382
|
}
|
|
422
383
|
|
|
423
384
|
export function decrementPackedFields(agg: PackedFields, pf: PackedFields): PackedFields
|
|
424
385
|
{
|
|
425
386
|
if (agg == null || pf == null) return agg;
|
|
426
|
-
Object.keys(agg).forEach(
|
|
427
|
-
let af = agg[
|
|
428
|
-
let sf = pf[
|
|
387
|
+
Object.keys(agg.data).forEach(did => {
|
|
388
|
+
let af = agg.data[did];
|
|
389
|
+
let sf = pf.data[did];
|
|
429
390
|
if (sf && sf.length == af.length)
|
|
430
391
|
{
|
|
431
392
|
let n = af.length;
|
|
@@ -446,16 +407,16 @@ export function diffPackedFields(main: any, parts: any[]): PackedFields
|
|
|
446
407
|
return main;
|
|
447
408
|
}
|
|
448
409
|
|
|
449
|
-
export function getPackedField(
|
|
410
|
+
export function getPackedField(pf: PackedFields, did: string, field: string): number
|
|
450
411
|
{
|
|
451
|
-
if (!
|
|
452
|
-
let fields =
|
|
453
|
-
return fields ? (fields[field] !== undefined ? pf[
|
|
412
|
+
if (!pf || !pf.dsGroup || !pf.dsGroup[did] || !pf.data[did]) return 0;
|
|
413
|
+
let fields = pf.dsGroup[did].fields[did];
|
|
414
|
+
return fields ? (fields[field] !== undefined ? pf.data[did][fields[field]] : 0) : 0;
|
|
454
415
|
}
|
|
455
416
|
|
|
456
|
-
export function findPackedField(
|
|
417
|
+
export function findPackedField(pf: PackedFields, did: string, field: string): number
|
|
457
418
|
{
|
|
458
|
-
let fields =
|
|
419
|
+
let fields = pf.dsGroup[did].fields[did];
|
|
459
420
|
return fields ? (fields[field] !== undefined ? fields[field] : -1) : -1;
|
|
460
421
|
}
|
|
461
422
|
|
|
@@ -463,58 +424,15 @@ export function findPackedField(index: GroupPackedMetaIndex, pf: PackedFields, d
|
|
|
463
424
|
export type FieldGetter = (f: string) => number;
|
|
464
425
|
export function fieldGetterNotLoaded(f: string): number { return undefined }
|
|
465
426
|
|
|
466
|
-
export function ToGetter(agg: PackedFields, dc: DatasetContext,
|
|
427
|
+
export function ToGetter(agg: PackedFields, dc: DatasetContext, did: string): FieldGetter
|
|
467
428
|
{
|
|
468
|
-
return (field: string) => { return getPackedField(
|
|
429
|
+
return (field: string) => { return getPackedField(agg, did, field) };
|
|
469
430
|
}
|
|
470
431
|
|
|
471
|
-
export function
|
|
432
|
+
export function calcShift(agg: PackedFields, dc: DatasetContext, didOld: string, didNew: string): number
|
|
472
433
|
{
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
if (field === 'R')
|
|
476
|
-
return Math.round((getPackedField(dc.dsIndex, agg, '', datasetKey, 'R12') + getPackedField(dc.dsIndex, agg, '', datasetKey, 'R16')) / 2);
|
|
477
|
-
if (field === 'D')
|
|
478
|
-
return Math.round((getPackedField(dc.dsIndex, agg, '', datasetKey, 'D12') + getPackedField(dc.dsIndex, agg, '', datasetKey, 'D16')) / 2);
|
|
479
|
-
if (field === 'Tot')
|
|
480
|
-
return Math.round((
|
|
481
|
-
getPackedField(dc.dsIndex, agg, '', datasetKey, 'R12') + getPackedField(dc.dsIndex, agg, '', datasetKey, 'R16') +
|
|
482
|
-
getPackedField(dc.dsIndex, agg, '', datasetKey, 'D12') + getPackedField(dc.dsIndex, agg, '', datasetKey, 'D16')) / 2);
|
|
483
|
-
return 0;
|
|
484
|
-
};
|
|
485
|
-
}
|
|
486
|
-
|
|
487
|
-
export function ToGetterPvi20(agg: PackedFields, dc: DatasetContext): FieldGetter
|
|
488
|
-
{
|
|
489
|
-
return (field: string) =>
|
|
490
|
-
{
|
|
491
|
-
if (field === 'R')
|
|
492
|
-
return Math.round((getPackedField(dc.dsIndex, agg, '', DS_PRES2016, 'R') + getPackedField(dc.dsIndex, agg, '', DS_PRES2020, 'R')) / 2);
|
|
493
|
-
if (field === 'D')
|
|
494
|
-
return Math.round((getPackedField(dc.dsIndex, agg, '', DS_PRES2016, 'D') + getPackedField(dc.dsIndex, agg, '', DS_PRES2020, 'D')) / 2);
|
|
495
|
-
if (field === 'Tot')
|
|
496
|
-
return Math.round((
|
|
497
|
-
getPackedField(dc.dsIndex, agg, '', DS_PRES2016, 'R') + getPackedField(dc.dsIndex, agg, '', DS_PRES2020, 'R') +
|
|
498
|
-
getPackedField(dc.dsIndex, agg, '', DS_PRES2016, 'D') + getPackedField(dc.dsIndex, agg, '', DS_PRES2020, 'D')) / 2);
|
|
499
|
-
return 0;
|
|
500
|
-
};
|
|
501
|
-
|
|
502
|
-
}
|
|
503
|
-
|
|
504
|
-
export function calcShift(agg: PackedFields, dc: DatasetContext, datasetOld: string, datasetNew: string): number
|
|
505
|
-
{
|
|
506
|
-
const didOld = toDatasetID(datasetOld);
|
|
507
|
-
const didNew = toDatasetID(datasetNew);
|
|
508
|
-
const getterOld = datasetOld === DS_PVI2016 ?
|
|
509
|
-
ToGetterPvi16(agg, dc, datasetOld) :
|
|
510
|
-
datasetOld === DS_PVI2020 ?
|
|
511
|
-
ToGetterPvi20(agg, dc) :
|
|
512
|
-
ToGetter(agg, dc, didOld, datasetOld);
|
|
513
|
-
const getterNew = datasetNew === DS_PVI2016 ?
|
|
514
|
-
ToGetterPvi16(agg, dc, datasetNew) :
|
|
515
|
-
datasetNew === DS_PVI2020 ?
|
|
516
|
-
ToGetterPvi20(agg, dc) :
|
|
517
|
-
ToGetter(agg, dc, didNew, datasetNew);
|
|
434
|
+
const getterOld = ToGetter(agg, dc, didOld);
|
|
435
|
+
const getterNew = ToGetter(agg, dc, didNew);
|
|
518
436
|
|
|
519
437
|
// Calc two-party Swing
|
|
520
438
|
const repOld = getterOld('R');
|
package/lib/splittogeofeature.ts
CHANGED
|
@@ -24,11 +24,7 @@ export function splitToGeoFeature(split: DT.SplitBlock, topoPrecinct: Poly.Topo,
|
|
|
24
24
|
if (b.properties.packedFields)
|
|
25
25
|
{
|
|
26
26
|
if (! f.properties.packedFields)
|
|
27
|
-
{
|
|
28
27
|
f.properties.packedFields = PF.packedCopy(b.properties.packedFields);
|
|
29
|
-
f.properties.packedIndex = b.properties.packedIndex;
|
|
30
|
-
f.properties.getDatasetField = b.properties.getDatasetField;
|
|
31
|
-
}
|
|
32
28
|
else
|
|
33
29
|
PF.aggregatePackedFields(f.properties.packedFields, b.properties.packedFields);
|
|
34
30
|
}
|