@dra2020/dra-types 1.4.9 → 1.5.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/all.d.ts +3 -0
- package/dist/csv.d.ts +40 -0
- package/dist/dra-types.d.ts +0 -58
- package/dist/dra-types.js +289 -80
- package/dist/dra-types.js.map +1 -1
- package/dist/stats.d.ts +45 -0
- package/dist/vfeature.d.ts +20 -0
- package/lib/all.ts +3 -0
- package/lib/bucketmap.ts +3 -0
- package/lib/csv.ts +515 -0
- package/lib/dra-types.ts +0 -592
- package/lib/schemas.ts +3 -0
- package/lib/stats.ts +203 -0
- package/lib/vfeature.ts +105 -0
- package/package.json +4 -4
package/dist/all.d.ts
CHANGED
package/dist/csv.d.ts
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import * as VF from './vfeature';
|
|
2
|
+
export declare type BlockMap = {
|
|
3
|
+
[id: string]: number;
|
|
4
|
+
};
|
|
5
|
+
export declare type BlockMapping = {
|
|
6
|
+
[id: string]: string;
|
|
7
|
+
};
|
|
8
|
+
export declare function canonicalDistrictID(districtID: string): string;
|
|
9
|
+
export declare function canonicalSortingDistrictID(districtID: string): string;
|
|
10
|
+
export declare function canonicalNumericFromDistrictID(districtID: string): number;
|
|
11
|
+
export declare function canonicalDistrictIDFromNumber(districtID: string, n: number): string;
|
|
12
|
+
export declare type DistrictOrder = {
|
|
13
|
+
[districtID: string]: number;
|
|
14
|
+
};
|
|
15
|
+
export declare function canonicalDistrictIDGapFill(keys: string[]): string[];
|
|
16
|
+
export declare function canonicalDistrictIDOrdering(order: DistrictOrder): DistrictOrder;
|
|
17
|
+
export interface OneCSVLine {
|
|
18
|
+
geoid: string;
|
|
19
|
+
districtID: string;
|
|
20
|
+
}
|
|
21
|
+
export declare function parseCSVLine(line: string): OneCSVLine;
|
|
22
|
+
export interface ConvertResult {
|
|
23
|
+
inBlockMap: BlockMapping;
|
|
24
|
+
inStateMap: BlockMapping;
|
|
25
|
+
outValid: boolean;
|
|
26
|
+
outState: string;
|
|
27
|
+
outMap: BlockMapping;
|
|
28
|
+
outOrder: DistrictOrder;
|
|
29
|
+
outDistrictToSplit: VF.DistrictToSplitBlock;
|
|
30
|
+
}
|
|
31
|
+
export declare function blockmapToState(blockMap: BlockMapping): string;
|
|
32
|
+
export declare function blockmapToVTDmap(blockMap: BlockMapping, stateMap: BlockMapping): ConvertResult;
|
|
33
|
+
export declare const GEOIDToState: any;
|
|
34
|
+
export declare const StateToGEOID: any;
|
|
35
|
+
export declare function geoidToState(geoid: string): string;
|
|
36
|
+
export declare type StateUrls = ('alabama' | 'alaska' | 'arizona' | 'arkansas' | 'california' | 'colorado' | 'connecticut' | 'delaware' | 'florida' | 'georgia' | 'hawaii' | 'idaho' | 'illinois' | 'indiana' | 'iowa' | 'kansas' | 'kentucky' | 'louisiana' | 'maine' | 'maryland' | 'massachusetts' | 'michigan' | 'minnesota' | 'mississippi' | 'missouri' | 'montana' | 'nebraska' | 'nevada' | 'new-hampshire' | 'new-jersey' | 'new-mexico' | 'new-york' | 'north-carolina' | 'north-dakota' | 'ohio' | 'oklahoma' | 'oregon' | 'pennsylvania' | 'rhode-island' | 'south-carolina' | 'south-dakota' | 'tennessee' | 'texas' | 'utah' | 'vermont' | 'virginia' | 'washington' | 'west-virginia' | 'wisconsin' | 'wyoming');
|
|
37
|
+
export declare type ValidStateUrlsType = {
|
|
38
|
+
readonly [stateUrl in StateUrls]: boolean;
|
|
39
|
+
};
|
|
40
|
+
export declare function isStateUrl(s: any): s is StateUrls;
|
package/dist/dra-types.d.ts
CHANGED
|
@@ -1,9 +1,3 @@
|
|
|
1
|
-
export declare type BlockMap = {
|
|
2
|
-
[id: string]: number;
|
|
3
|
-
};
|
|
4
|
-
export declare type BlockMapping = {
|
|
5
|
-
[id: string]: string;
|
|
6
|
-
};
|
|
7
1
|
export interface Comment {
|
|
8
2
|
userid: string;
|
|
9
3
|
text: string;
|
|
@@ -27,55 +21,3 @@ export interface UserLikes {
|
|
|
27
21
|
id?: string;
|
|
28
22
|
[aid: string]: Like | string;
|
|
29
23
|
}
|
|
30
|
-
export interface SplitBlock {
|
|
31
|
-
id?: string;
|
|
32
|
-
chunkKey?: string;
|
|
33
|
-
chunk?: string;
|
|
34
|
-
state: string;
|
|
35
|
-
datasource: string;
|
|
36
|
-
geoid: string;
|
|
37
|
-
blocks: string[];
|
|
38
|
-
}
|
|
39
|
-
export declare type DistrictToSplitBlock = {
|
|
40
|
-
[districtID: string]: SplitBlock[];
|
|
41
|
-
};
|
|
42
|
-
export declare function vgeoidToGeoid(vgeoid: string): string;
|
|
43
|
-
export declare function vgeoidToChunk(vgeoid: string): string;
|
|
44
|
-
export declare function vgeoidToHash(vgeoid: string): string;
|
|
45
|
-
export declare function isVfeature(geoid: string): boolean;
|
|
46
|
-
export declare function splitToCacheKey(s: SplitBlock): string;
|
|
47
|
-
export declare function splitToChunkKey(s: SplitBlock): string;
|
|
48
|
-
export declare function splitToPrefix(s: SplitBlock): string;
|
|
49
|
-
export declare function cacheKeysToChunkHash(keys: string[]): string;
|
|
50
|
-
export declare function canonicalDistrictID(districtID: string): string;
|
|
51
|
-
export declare function canonicalSortingDistrictID(districtID: string): string;
|
|
52
|
-
export declare function canonicalNumericFromDistrictID(districtID: string): number;
|
|
53
|
-
export declare function canonicalDistrictIDFromNumber(districtID: string, n: number): string;
|
|
54
|
-
export declare type DistrictOrder = {
|
|
55
|
-
[districtID: string]: number;
|
|
56
|
-
};
|
|
57
|
-
export declare function canonicalDistrictIDOrdering(order: DistrictOrder): DistrictOrder;
|
|
58
|
-
export interface OneCSVLine {
|
|
59
|
-
geoid: string;
|
|
60
|
-
districtID: string;
|
|
61
|
-
}
|
|
62
|
-
export declare function parseCSVLine(line: string): OneCSVLine;
|
|
63
|
-
export interface ConvertResult {
|
|
64
|
-
inBlockMap: BlockMapping;
|
|
65
|
-
inStateMap: BlockMapping;
|
|
66
|
-
outValid: boolean;
|
|
67
|
-
outState: string;
|
|
68
|
-
outMap: BlockMapping;
|
|
69
|
-
outOrder: DistrictOrder;
|
|
70
|
-
outDistrictToSplit: DistrictToSplitBlock;
|
|
71
|
-
}
|
|
72
|
-
export declare function blockmapToState(blockMap: BlockMapping): string;
|
|
73
|
-
export declare function blockmapToVTDmap(blockMap: BlockMapping, stateMap: BlockMapping): ConvertResult;
|
|
74
|
-
export declare const GEOIDToState: any;
|
|
75
|
-
export declare const StateToGEOID: any;
|
|
76
|
-
export declare function geoidToState(geoid: string): string;
|
|
77
|
-
export declare type StateUrls = ('alabama' | 'alaska' | 'arizona' | 'arkansas' | 'california' | 'colorado' | 'connecticut' | 'delaware' | 'florida' | 'georgia' | 'hawaii' | 'idaho' | 'illinois' | 'indiana' | 'iowa' | 'kansas' | 'kentucky' | 'louisiana' | 'maine' | 'maryland' | 'massachusetts' | 'michigan' | 'minnesota' | 'mississippi' | 'missouri' | 'montana' | 'nebraska' | 'nevada' | 'new-hampshire' | 'new-jersey' | 'new-mexico' | 'new-york' | 'north-carolina' | 'north-dakota' | 'ohio' | 'oklahoma' | 'oregon' | 'pennsylvania' | 'rhode-island' | 'south-carolina' | 'south-dakota' | 'tennessee' | 'texas' | 'utah' | 'vermont' | 'virginia' | 'washington' | 'west-virginia' | 'wisconsin' | 'wyoming');
|
|
78
|
-
export declare type ValidStateUrlsType = {
|
|
79
|
-
readonly [stateUrl in StateUrls]: boolean;
|
|
80
|
-
};
|
|
81
|
-
export declare function isStateUrl(s: any): s is StateUrls;
|
package/dist/dra-types.js
CHANGED
|
@@ -109,11 +109,13 @@ function __export(m) {
|
|
|
109
109
|
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
|
|
110
110
|
}
|
|
111
111
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
112
|
-
__export(__webpack_require__(/*! ./dra-types */ "./lib/dra-types.ts"));
|
|
113
112
|
__export(__webpack_require__(/*! ./schemas */ "./lib/schemas.ts"));
|
|
114
113
|
__export(__webpack_require__(/*! ./bucketmap */ "./lib/bucketmap.ts"));
|
|
115
114
|
__export(__webpack_require__(/*! ./colordata */ "./lib/colordata.ts"));
|
|
116
115
|
__export(__webpack_require__(/*! ./gencolor */ "./lib/gencolor.ts"));
|
|
116
|
+
__export(__webpack_require__(/*! ./vfeature */ "./lib/vfeature.ts"));
|
|
117
|
+
__export(__webpack_require__(/*! ./csv */ "./lib/csv.ts"));
|
|
118
|
+
__export(__webpack_require__(/*! ./stats */ "./lib/stats.ts"));
|
|
117
119
|
|
|
118
120
|
|
|
119
121
|
/***/ }),
|
|
@@ -135,8 +137,11 @@ exports.BucketMap = {
|
|
|
135
137
|
'state': 'dra-us-west-2-723398989493',
|
|
136
138
|
'state-dev': 'dev-dra-us-west-2-723398989493',
|
|
137
139
|
'logs': 'dra-uswest-logs',
|
|
140
|
+
'logs-dev': 'dra-uswest-logs',
|
|
138
141
|
'memsqs': 'dra-uswest-memsqs',
|
|
142
|
+
'memsqs-dev': 'dra-uswest-memsqs',
|
|
139
143
|
'images': 'dra-uswest-images',
|
|
144
|
+
'images-dev': 'dra-uswest-images',
|
|
140
145
|
'data': 'dra-us-west-datafiles',
|
|
141
146
|
'data-dev': 'dra-us-west-datafiles-dev',
|
|
142
147
|
'splits': 'dra-block-cache',
|
|
@@ -475,10 +480,10 @@ lum.forEach((n) => { exports.IntensityBackgroundColor.push(`rgba(${n}, ${n}, ${n
|
|
|
475
480
|
|
|
476
481
|
/***/ }),
|
|
477
482
|
|
|
478
|
-
/***/ "./lib/
|
|
479
|
-
|
|
480
|
-
!*** ./lib/
|
|
481
|
-
|
|
483
|
+
/***/ "./lib/csv.ts":
|
|
484
|
+
/*!********************!*\
|
|
485
|
+
!*** ./lib/csv.ts ***!
|
|
486
|
+
\********************/
|
|
482
487
|
/*! no static exports found */
|
|
483
488
|
/***/ (function(module, exports, __webpack_require__) {
|
|
484
489
|
|
|
@@ -486,82 +491,7 @@ lum.forEach((n) => { exports.IntensityBackgroundColor.push(`rgba(${n}, ${n}, ${n
|
|
|
486
491
|
|
|
487
492
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
488
493
|
// Public libraries
|
|
489
|
-
const Hash = __webpack_require__(/*! object-hash */ "object-hash");
|
|
490
494
|
const Util = __webpack_require__(/*! @dra2020/util */ "@dra2020/util");
|
|
491
|
-
// Canonical hashing of splitblock data
|
|
492
|
-
function hash(o) {
|
|
493
|
-
return Hash(o, { respectType: false,
|
|
494
|
-
unorderedArrays: true,
|
|
495
|
-
unorderedObjects: true,
|
|
496
|
-
excludeKeys: (k) => (k === 'id' || k === 'chunk')
|
|
497
|
-
});
|
|
498
|
-
}
|
|
499
|
-
function vgeoidToGeoid(vgeoid) {
|
|
500
|
-
let re = /vfeature_([^_]*)_.*/;
|
|
501
|
-
let a = re.exec(vgeoid);
|
|
502
|
-
if (a == null || a.length != 2)
|
|
503
|
-
return '';
|
|
504
|
-
else
|
|
505
|
-
return a[1];
|
|
506
|
-
}
|
|
507
|
-
exports.vgeoidToGeoid = vgeoidToGeoid;
|
|
508
|
-
function vgeoidToChunk(vgeoid) {
|
|
509
|
-
// vgeoid is string of form: "vfeature_[geoid]_[chunkid]_[hash]"
|
|
510
|
-
// the contents are chunked into a file of form "vfeature_chunk_[chunkid]"
|
|
511
|
-
// So extract the chunk ID and download that.
|
|
512
|
-
let re = /vfeature_([^_]*)_([^_*])_(.*)/;
|
|
513
|
-
let a = re.exec(vgeoid);
|
|
514
|
-
if (a && a.length == 4)
|
|
515
|
-
vgeoid = `vfeature_chunk_${a[2]}`;
|
|
516
|
-
else
|
|
517
|
-
vgeoid = null;
|
|
518
|
-
return vgeoid;
|
|
519
|
-
}
|
|
520
|
-
exports.vgeoidToChunk = vgeoidToChunk;
|
|
521
|
-
function vgeoidToHash(vgeoid) {
|
|
522
|
-
// vgeoid is string of form: "vfeature_[geoid]_[chunkid]_[hash]"
|
|
523
|
-
let re = /vfeature_([^_]*)_([^_*])_(.*)/;
|
|
524
|
-
let a = re.exec(vgeoid);
|
|
525
|
-
if (a && a.length == 4)
|
|
526
|
-
vgeoid = a[3];
|
|
527
|
-
else
|
|
528
|
-
vgeoid = null;
|
|
529
|
-
return vgeoid;
|
|
530
|
-
}
|
|
531
|
-
exports.vgeoidToHash = vgeoidToHash;
|
|
532
|
-
function isVfeature(geoid) {
|
|
533
|
-
return geoid.indexOf('vfeature') === 0;
|
|
534
|
-
}
|
|
535
|
-
exports.isVfeature = isVfeature;
|
|
536
|
-
function splitToCacheKey(s) {
|
|
537
|
-
if (s.id === undefined)
|
|
538
|
-
s.id = hash(s);
|
|
539
|
-
if (s.chunk === undefined)
|
|
540
|
-
s.chunk = "0";
|
|
541
|
-
return `_${s.state}_${s.datasource}_vfeature_${s.geoid}_${s.chunk}_${s.id}.geojson`;
|
|
542
|
-
}
|
|
543
|
-
exports.splitToCacheKey = splitToCacheKey;
|
|
544
|
-
function splitToChunkKey(s) {
|
|
545
|
-
if (s.chunk === undefined)
|
|
546
|
-
s.chunk = "0";
|
|
547
|
-
return `_${s.state}_${s.datasource}_vfeature_chunk_${s.chunk}.geojson`;
|
|
548
|
-
}
|
|
549
|
-
exports.splitToChunkKey = splitToChunkKey;
|
|
550
|
-
function splitToPrefix(s) {
|
|
551
|
-
if (s.blocks === undefined) {
|
|
552
|
-
let re = /_([^_]*)_(.*)_vfeature.*\.geojson$/;
|
|
553
|
-
let a = re.exec(s.id);
|
|
554
|
-
if (a && a.length == 3)
|
|
555
|
-
return `_${a[1]}_${a[2]}`;
|
|
556
|
-
return s.id;
|
|
557
|
-
}
|
|
558
|
-
return `_${s.state}_${s.datasource}`;
|
|
559
|
-
}
|
|
560
|
-
exports.splitToPrefix = splitToPrefix;
|
|
561
|
-
function cacheKeysToChunkHash(keys) {
|
|
562
|
-
return hash(keys);
|
|
563
|
-
}
|
|
564
|
-
exports.cacheKeysToChunkHash = cacheKeysToChunkHash;
|
|
565
495
|
let reNumeric = /^(\D*)(\d*)(\D*)$/;
|
|
566
496
|
let reDistrictNumber = /^\d+$/;
|
|
567
497
|
let reDistrictNumeric = /^\d/;
|
|
@@ -577,6 +507,8 @@ function canonicalDistrictID(districtID) {
|
|
|
577
507
|
}
|
|
578
508
|
exports.canonicalDistrictID = canonicalDistrictID;
|
|
579
509
|
// Normalize any numeric part to have four digits with padded leading zeros
|
|
510
|
+
// so alphabetic sorting will result in correct numeric sort for mixed alphanumber
|
|
511
|
+
// district labels.
|
|
580
512
|
function canonicalSortingDistrictID(districtID) {
|
|
581
513
|
let a = reNumeric.exec(districtID);
|
|
582
514
|
if (a && a.length == 4) {
|
|
@@ -622,12 +554,34 @@ function canonicalDistrictIDFromNumber(districtID, n) {
|
|
|
622
554
|
return districtID;
|
|
623
555
|
}
|
|
624
556
|
exports.canonicalDistrictIDFromNumber = canonicalDistrictIDFromNumber;
|
|
557
|
+
// If purely numeric districtIDs and we are missing some number of IDs less than
|
|
558
|
+
function canonicalDistrictIDGapFill(keys) {
|
|
559
|
+
if (keys == null || keys.length == 0)
|
|
560
|
+
return keys;
|
|
561
|
+
let nonNumeric = keys.find((s) => !reDistrictNumber.test(s)) !== undefined;
|
|
562
|
+
if (nonNumeric)
|
|
563
|
+
return keys;
|
|
564
|
+
let max = Number(keys[keys.length - 1]);
|
|
565
|
+
if (max == keys.length || (max - keys.length) > keys.length)
|
|
566
|
+
return keys; // no gaps or too many gaps
|
|
567
|
+
// OK, finally going to fill some gaps
|
|
568
|
+
for (let i = 0; i < keys.length; i++) {
|
|
569
|
+
let here = Number(keys[i]);
|
|
570
|
+
while (here > i + 1) {
|
|
571
|
+
keys.splice(i, 0, canonicalSortingDistrictID(String(i + 1)));
|
|
572
|
+
i++;
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
return keys;
|
|
576
|
+
}
|
|
577
|
+
exports.canonicalDistrictIDGapFill = canonicalDistrictIDGapFill;
|
|
625
578
|
function canonicalDistrictIDOrdering(order) {
|
|
626
579
|
let keys = Object.keys(order);
|
|
627
580
|
let i;
|
|
628
581
|
let a = [];
|
|
629
582
|
let template = undefined;
|
|
630
583
|
keys = keys.map((s) => canonicalSortingDistrictID(s));
|
|
584
|
+
keys = canonicalDistrictIDGapFill(keys);
|
|
631
585
|
keys.sort();
|
|
632
586
|
order = {};
|
|
633
587
|
for (i = 0; i < keys.length; i++)
|
|
@@ -986,6 +940,7 @@ exports.Schemas = {
|
|
|
986
940
|
resetGUID: 'S',
|
|
987
941
|
resetTime: 'S',
|
|
988
942
|
lastActive: 'S',
|
|
943
|
+
modifyTime: 'S',
|
|
989
944
|
resetCount: 'N',
|
|
990
945
|
accessed: 'M',
|
|
991
946
|
likeID: 'S',
|
|
@@ -1072,9 +1027,263 @@ exports.Schemas = {
|
|
|
1072
1027
|
'likes': {},
|
|
1073
1028
|
'comments': {},
|
|
1074
1029
|
'stats': {},
|
|
1030
|
+
'livestats': {},
|
|
1075
1031
|
};
|
|
1076
1032
|
|
|
1077
1033
|
|
|
1034
|
+
/***/ }),
|
|
1035
|
+
|
|
1036
|
+
/***/ "./lib/stats.ts":
|
|
1037
|
+
/*!**********************!*\
|
|
1038
|
+
!*** ./lib/stats.ts ***!
|
|
1039
|
+
\**********************/
|
|
1040
|
+
/*! no static exports found */
|
|
1041
|
+
/***/ (function(module, exports, __webpack_require__) {
|
|
1042
|
+
|
|
1043
|
+
"use strict";
|
|
1044
|
+
|
|
1045
|
+
// In constrast to logging, the stats manager is responsible for tracking live server status.
|
|
1046
|
+
// The results are stored in a compact database record which can be easily queried for current
|
|
1047
|
+
// status. Each server instance maintains its own field in the overall "values" record in the
|
|
1048
|
+
// stats database. The reporting tool aggregates all instances to report overall server activity.
|
|
1049
|
+
//
|
|
1050
|
+
// There are basically three kinds of values:
|
|
1051
|
+
// Sum - we want to sum the value across all server instances (e.g. number of connnected clients)
|
|
1052
|
+
// Avg - we just take average across all server instances (e.g. memory heap size)
|
|
1053
|
+
// Rate - we want to track the rate of some activity over some time period
|
|
1054
|
+
//
|
|
1055
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
1056
|
+
exports.ValTypeSum = 0;
|
|
1057
|
+
exports.ValTypeAvg = 1;
|
|
1058
|
+
exports.ValTypeRate = 2;
|
|
1059
|
+
const ExpiryAge = 1000 * 60 * 60 * 24; // throw away instance record after this time period
|
|
1060
|
+
function statExpiryTime() {
|
|
1061
|
+
let time = new Date();
|
|
1062
|
+
time.setTime(time.getTime() + ExpiryAge);
|
|
1063
|
+
return time.toJSON();
|
|
1064
|
+
}
|
|
1065
|
+
exports.statExpiryTime = statExpiryTime;
|
|
1066
|
+
// Aggregate running value. This applies both in memory (logging multiple times before
|
|
1067
|
+
// saving to database) as well as aggregating into an instance structure in the DB.
|
|
1068
|
+
function statValueRecord(sv, cur, valType) {
|
|
1069
|
+
if (sv === undefined)
|
|
1070
|
+
sv = {};
|
|
1071
|
+
if (sv.min === undefined || cur < sv.min)
|
|
1072
|
+
sv.min = cur;
|
|
1073
|
+
if (sv.max === undefined || cur > sv.max)
|
|
1074
|
+
sv.max = cur;
|
|
1075
|
+
sv.cur = cur;
|
|
1076
|
+
sv.valType = valType;
|
|
1077
|
+
return sv;
|
|
1078
|
+
}
|
|
1079
|
+
exports.statValueRecord = statValueRecord;
|
|
1080
|
+
function statRateIncr(sr, incr = 1) {
|
|
1081
|
+
if (sr == null)
|
|
1082
|
+
sr = {};
|
|
1083
|
+
sr.valType = exports.ValTypeRate;
|
|
1084
|
+
sr.thisMin = sr.thisMin === undefined ? incr : sr.thisMin + incr;
|
|
1085
|
+
sr.thisHour = sr.thisHour === undefined ? incr : sr.thisHour + incr;
|
|
1086
|
+
sr.thisDay = sr.thisDay === undefined ? incr : sr.thisDay + incr;
|
|
1087
|
+
return sr;
|
|
1088
|
+
}
|
|
1089
|
+
exports.statRateIncr = statRateIncr;
|
|
1090
|
+
exports.OneMinute = 1000 * 60;
|
|
1091
|
+
exports.OneHour = exports.OneMinute * 60;
|
|
1092
|
+
exports.OneDay = exports.OneHour * 24;
|
|
1093
|
+
function statRateRollover(sr, period) {
|
|
1094
|
+
if ((period % exports.OneMinute) == 0) {
|
|
1095
|
+
sr.lastMin = sr.thisMin;
|
|
1096
|
+
sr.thisMin = 0;
|
|
1097
|
+
}
|
|
1098
|
+
if ((period % exports.OneHour) == 0) {
|
|
1099
|
+
sr.lastHour = sr.thisHour;
|
|
1100
|
+
sr.thisHour = 0;
|
|
1101
|
+
}
|
|
1102
|
+
if ((period % exports.OneDay) == 0) {
|
|
1103
|
+
sr.lastDay = sr.thisDay;
|
|
1104
|
+
sr.thisDay = 0;
|
|
1105
|
+
}
|
|
1106
|
+
}
|
|
1107
|
+
exports.statRateRollover = statRateRollover;
|
|
1108
|
+
function statEntryIndexMerge(accum, si) {
|
|
1109
|
+
if (accum == null)
|
|
1110
|
+
accum = {};
|
|
1111
|
+
// We just discard and replace old rate information
|
|
1112
|
+
Object.keys(accum).forEach((p) => { if (accum[p].valType == exports.ValTypeRate)
|
|
1113
|
+
delete accum[p]; });
|
|
1114
|
+
// But merge in all the other values
|
|
1115
|
+
Object.keys(si).forEach((p) => {
|
|
1116
|
+
let se = si[p];
|
|
1117
|
+
if (se.valType == exports.ValTypeRate)
|
|
1118
|
+
accum[p] = se;
|
|
1119
|
+
else {
|
|
1120
|
+
let sv = se;
|
|
1121
|
+
accum[p] = statValueRecord(se, sv.cur, sv.valType);
|
|
1122
|
+
}
|
|
1123
|
+
});
|
|
1124
|
+
return accum;
|
|
1125
|
+
}
|
|
1126
|
+
exports.statEntryIndexMerge = statEntryIndexMerge;
|
|
1127
|
+
function statEntryIndexRollover(si, period) {
|
|
1128
|
+
Object.keys(si).forEach((p) => {
|
|
1129
|
+
let se = si[p];
|
|
1130
|
+
if (se.valType == exports.ValTypeRate)
|
|
1131
|
+
statRateRollover(se, period);
|
|
1132
|
+
});
|
|
1133
|
+
}
|
|
1134
|
+
exports.statEntryIndexRollover = statEntryIndexRollover;
|
|
1135
|
+
function statRecordMerge(accum, si) {
|
|
1136
|
+
if (accum == null)
|
|
1137
|
+
accum = { id: si.id, production: si.production, time: si.time, expires: si.expires, index: {} };
|
|
1138
|
+
accum.expires = si.expires;
|
|
1139
|
+
accum.time = si.time;
|
|
1140
|
+
statEntryIndexMerge(accum.index, si.index);
|
|
1141
|
+
return accum;
|
|
1142
|
+
}
|
|
1143
|
+
exports.statRecordMerge = statRecordMerge;
|
|
1144
|
+
// Accumulate a set of values together (e.g. from multiple running instances)
|
|
1145
|
+
// for reporting current state.
|
|
1146
|
+
//
|
|
1147
|
+
function statValueAccum(accum, sv) {
|
|
1148
|
+
if (accum == null) {
|
|
1149
|
+
accum = {};
|
|
1150
|
+
accum.valType = sv.valType;
|
|
1151
|
+
}
|
|
1152
|
+
if (accum.min === undefined || sv.min < accum.min)
|
|
1153
|
+
accum.min = sv.min;
|
|
1154
|
+
if (accum.max === undefined || sv.max > accum.max)
|
|
1155
|
+
accum.max = sv.max;
|
|
1156
|
+
if (accum.valType === undefined)
|
|
1157
|
+
accum.valType = sv.valType;
|
|
1158
|
+
if (accum.valType == exports.ValTypeSum)
|
|
1159
|
+
accum.cur = (accum.cur === undefined ? sv.cur : accum.cur + sv.cur);
|
|
1160
|
+
else if (accum.valType == exports.ValTypeAvg) {
|
|
1161
|
+
accum.tot = (accum.tot === undefined ? 0 : accum.tot) + sv.cur;
|
|
1162
|
+
accum.cnt = (accum.cnt === undefined ? 0 : accum.cnt) + 1;
|
|
1163
|
+
accum.cur = accum.tot / accum.cnt;
|
|
1164
|
+
}
|
|
1165
|
+
return accum;
|
|
1166
|
+
}
|
|
1167
|
+
exports.statValueAccum = statValueAccum;
|
|
1168
|
+
function statRateAccum(accum, sr) {
|
|
1169
|
+
if (accum == null) {
|
|
1170
|
+
accum = {};
|
|
1171
|
+
accum.valType = sr.valType;
|
|
1172
|
+
}
|
|
1173
|
+
accum.thisMin = accum.thisMin === undefined ? sr.thisMin : (accum.thisMin + (sr.thisMin === undefined ? 0 : sr.thisMin));
|
|
1174
|
+
accum.lastMin = accum.lastMin === undefined ? sr.lastMin : (accum.lastMin + (sr.lastMin === undefined ? 0 : sr.lastMin));
|
|
1175
|
+
accum.thisHour = accum.thisHour === undefined ? sr.thisHour : (accum.thisHour + (sr.thisHour === undefined ? 0 : sr.thisHour));
|
|
1176
|
+
accum.lastHour = accum.lastHour === undefined ? sr.lastHour : (accum.lastHour + (sr.lastHour === undefined ? 0 : sr.lastHour));
|
|
1177
|
+
accum.thisDay = accum.thisDay === undefined ? sr.thisDay : (accum.thisDay + (sr.thisDay === undefined ? 0 : sr.thisDay));
|
|
1178
|
+
accum.lastDay = accum.lastDay === undefined ? sr.lastDay : (accum.lastDay + (sr.lastDay === undefined ? 0 : sr.lastDay));
|
|
1179
|
+
return accum;
|
|
1180
|
+
}
|
|
1181
|
+
exports.statRateAccum = statRateAccum;
|
|
1182
|
+
function statIndexAccum(accum, si) {
|
|
1183
|
+
if (accum == null)
|
|
1184
|
+
accum = {};
|
|
1185
|
+
Object.keys(si).forEach((p) => {
|
|
1186
|
+
let se = si[p];
|
|
1187
|
+
if (se.valType == exports.ValTypeRate)
|
|
1188
|
+
accum[p] = statRateAccum(accum[p], se);
|
|
1189
|
+
else
|
|
1190
|
+
accum[p] = statValueAccum(accum[p], se);
|
|
1191
|
+
});
|
|
1192
|
+
return accum;
|
|
1193
|
+
}
|
|
1194
|
+
exports.statIndexAccum = statIndexAccum;
|
|
1195
|
+
|
|
1196
|
+
|
|
1197
|
+
/***/ }),
|
|
1198
|
+
|
|
1199
|
+
/***/ "./lib/vfeature.ts":
|
|
1200
|
+
/*!*************************!*\
|
|
1201
|
+
!*** ./lib/vfeature.ts ***!
|
|
1202
|
+
\*************************/
|
|
1203
|
+
/*! no static exports found */
|
|
1204
|
+
/***/ (function(module, exports, __webpack_require__) {
|
|
1205
|
+
|
|
1206
|
+
"use strict";
|
|
1207
|
+
|
|
1208
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
1209
|
+
// Public libraries
|
|
1210
|
+
const Hash = __webpack_require__(/*! object-hash */ "object-hash");
|
|
1211
|
+
// Canonical hashing of splitblock data
|
|
1212
|
+
function hash(o) {
|
|
1213
|
+
return Hash(o, { respectType: false,
|
|
1214
|
+
unorderedArrays: true,
|
|
1215
|
+
unorderedObjects: true,
|
|
1216
|
+
excludeKeys: (k) => (k === 'id' || k === 'chunk')
|
|
1217
|
+
});
|
|
1218
|
+
}
|
|
1219
|
+
function vgeoidToGeoid(vgeoid) {
|
|
1220
|
+
let re = /vfeature_([^_]*)_.*/;
|
|
1221
|
+
let a = re.exec(vgeoid);
|
|
1222
|
+
if (a == null || a.length != 2)
|
|
1223
|
+
return '';
|
|
1224
|
+
else
|
|
1225
|
+
return a[1];
|
|
1226
|
+
}
|
|
1227
|
+
exports.vgeoidToGeoid = vgeoidToGeoid;
|
|
1228
|
+
function vgeoidToChunk(vgeoid) {
|
|
1229
|
+
// vgeoid is string of form: "vfeature_[geoid]_[chunkid]_[hash]"
|
|
1230
|
+
// the contents are chunked into a file of form "vfeature_chunk_[chunkid]"
|
|
1231
|
+
// So extract the chunk ID and download that.
|
|
1232
|
+
let re = /vfeature_([^_]*)_([^_*])_(.*)/;
|
|
1233
|
+
let a = re.exec(vgeoid);
|
|
1234
|
+
if (a && a.length == 4)
|
|
1235
|
+
vgeoid = `vfeature_chunk_${a[2]}`;
|
|
1236
|
+
else
|
|
1237
|
+
vgeoid = null;
|
|
1238
|
+
return vgeoid;
|
|
1239
|
+
}
|
|
1240
|
+
exports.vgeoidToChunk = vgeoidToChunk;
|
|
1241
|
+
function vgeoidToHash(vgeoid) {
|
|
1242
|
+
// vgeoid is string of form: "vfeature_[geoid]_[chunkid]_[hash]"
|
|
1243
|
+
let re = /vfeature_([^_]*)_([^_*])_(.*)/;
|
|
1244
|
+
let a = re.exec(vgeoid);
|
|
1245
|
+
if (a && a.length == 4)
|
|
1246
|
+
vgeoid = a[3];
|
|
1247
|
+
else
|
|
1248
|
+
vgeoid = null;
|
|
1249
|
+
return vgeoid;
|
|
1250
|
+
}
|
|
1251
|
+
exports.vgeoidToHash = vgeoidToHash;
|
|
1252
|
+
function isVfeature(geoid) {
|
|
1253
|
+
return geoid.indexOf('vfeature') === 0;
|
|
1254
|
+
}
|
|
1255
|
+
exports.isVfeature = isVfeature;
|
|
1256
|
+
function splitToCacheKey(s) {
|
|
1257
|
+
if (s.id === undefined)
|
|
1258
|
+
s.id = hash(s);
|
|
1259
|
+
if (s.chunk === undefined)
|
|
1260
|
+
s.chunk = "0";
|
|
1261
|
+
return `_${s.state}_${s.datasource}_vfeature_${s.geoid}_${s.chunk}_${s.id}.geojson`;
|
|
1262
|
+
}
|
|
1263
|
+
exports.splitToCacheKey = splitToCacheKey;
|
|
1264
|
+
function splitToChunkKey(s) {
|
|
1265
|
+
if (s.chunk === undefined)
|
|
1266
|
+
s.chunk = "0";
|
|
1267
|
+
return `_${s.state}_${s.datasource}_vfeature_chunk_${s.chunk}.geojson`;
|
|
1268
|
+
}
|
|
1269
|
+
exports.splitToChunkKey = splitToChunkKey;
|
|
1270
|
+
function splitToPrefix(s) {
|
|
1271
|
+
if (s.blocks === undefined) {
|
|
1272
|
+
let re = /_([^_]*)_(.*)_vfeature.*\.geojson$/;
|
|
1273
|
+
let a = re.exec(s.id);
|
|
1274
|
+
if (a && a.length == 3)
|
|
1275
|
+
return `_${a[1]}_${a[2]}`;
|
|
1276
|
+
return s.id;
|
|
1277
|
+
}
|
|
1278
|
+
return `_${s.state}_${s.datasource}`;
|
|
1279
|
+
}
|
|
1280
|
+
exports.splitToPrefix = splitToPrefix;
|
|
1281
|
+
function cacheKeysToChunkHash(keys) {
|
|
1282
|
+
return hash(keys);
|
|
1283
|
+
}
|
|
1284
|
+
exports.cacheKeysToChunkHash = cacheKeysToChunkHash;
|
|
1285
|
+
|
|
1286
|
+
|
|
1078
1287
|
/***/ }),
|
|
1079
1288
|
|
|
1080
1289
|
/***/ "@dra2020/util":
|