@saber-usa/node-common 1.7.8 → 1.7.9-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -38
- package/package.json +1 -1
- package/src/astro.js +3 -3
- package/src/checkNetwork.cjs +20 -20
- package/src/index.js +1 -1
- package/src/loggerFactory.cjs +98 -98
- package/src/udl.js +117 -2
package/README.md
CHANGED
|
@@ -1,43 +1,13 @@
|
|
|
1
|
-
#
|
|
1
|
+
# Node Common
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Common nodejs functions used across a number of saber's applications.
|
|
4
4
|
|
|
5
|
-
##
|
|
6
|
-
|
|
7
|
-
The logger component is a standard logger based on [winston](https://github.com/winstonjs/winston). The logger factory
|
|
8
|
-
creates a default console logger for errors along with exception/rejection catching. By setting the `CONSOLE_LOG`
|
|
9
|
-
variable to `true` will turn on the console logger for local development. The log level is configured by the `LOG_LEVEL`
|
|
10
|
-
environment variable to one of the following levels: `emerg`, `alert`, `crit`, `error`, `warning`, `warn`, `notice`,
|
|
11
|
-
`info`, `debug` (default level is `error`). The reason using an environment variable for the level is useful when
|
|
12
|
-
deployed to the server. Changing the environment variable is easier than setting a new flag in the script execution.
|
|
13
|
-
|
|
14
|
-
### Usage
|
|
15
|
-
|
|
16
|
-
To create a new logger, just pass in options to the factory:
|
|
17
|
-
|
|
18
|
-
```javascript
|
|
19
|
-
const logger = loggerFactory(opts);
|
|
20
|
-
|
|
21
|
-
logger.debug("This is a debug message");
|
|
22
|
-
```
|
|
23
|
-
|
|
24
|
-
For further usage, consult [winston](https://github.com/winstonjs/winston)
|
|
25
|
-
|
|
26
|
-
### Options
|
|
27
|
-
|
|
28
|
-
- `nameSpace` help distinguish where the logger message comes from.
|
|
29
|
-
- `additionalData` Data to include with each message
|
|
30
|
-
- `level` overwrite the `LOG_LEVEL` environment level
|
|
31
|
-
|
|
32
|
-
## Transformer
|
|
33
|
-
|
|
34
|
-
A transformer for object keys is provided. This will take an object and transform the keys using a function.
|
|
5
|
+
## License
|
|
6
|
+
AGPLv3
|
|
35
7
|
|
|
36
8
|
## Publishing changes
|
|
37
9
|
|
|
38
|
-
1.
|
|
39
|
-
2. Check you are logged in to npm `npm whoami`
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
4. Run `gpg --list-keys` to get your <id>
|
|
43
|
-
5. Tell the GIT to use your key `git config user.signingkey <id>`
|
|
10
|
+
1. Increment the version in `package.json`
|
|
11
|
+
2. Check you are logged in to npm using `npm whoami`
|
|
12
|
+
3. You may need to login using `npm login` note your npm user must have 2FA enabled (in your profile)
|
|
13
|
+
4. Run `npm install` then `npm publish`
|
package/package.json
CHANGED
package/src/astro.js
CHANGED
|
@@ -1703,7 +1703,7 @@ const getLeoRpoData = (line1, line2, sats, startTime, endTime) => {
|
|
|
1703
1703
|
Line2: line2,
|
|
1704
1704
|
};
|
|
1705
1705
|
const pEphem = prop(pElset, start, end, 10000);
|
|
1706
|
-
if(pEphem.length === 0) return results; // Primary may have re-entered the atmosphere
|
|
1706
|
+
if (pEphem.length === 0) return results; // Primary may have re-entered the atmosphere
|
|
1707
1707
|
|
|
1708
1708
|
sats.forEach( (s) => {
|
|
1709
1709
|
const sEphem = prop(s, start, end, 10000);
|
|
@@ -1788,8 +1788,8 @@ const getGeoRpoData = (line1, line2, sats, startTime, endTime, lonTime) => {
|
|
|
1788
1788
|
Line1: line1,
|
|
1789
1789
|
Line2: line2,
|
|
1790
1790
|
}, start, end, 60000);
|
|
1791
|
-
if(pEphem.length === 0) return results; // Primary may have re-entered the atmosphere
|
|
1792
|
-
|
|
1791
|
+
if (pEphem.length === 0) return results; // Primary may have re-entered the atmosphere
|
|
1792
|
+
|
|
1793
1793
|
const lonEvalTime = lonTime ? new Date(lonTime) : new Date(end);
|
|
1794
1794
|
|
|
1795
1795
|
const pLonAndDrift = getLonAndDrift(line1, line2, lonEvalTime);
|
package/src/checkNetwork.cjs
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
|
-
const _ = require("lodash");
|
|
2
|
-
const {resolve4} = require("dns").promises;
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
const checkRecord = (hostName) => resolve4(hostName).
|
|
6
|
-
then((addresss) => !_.isEmpty(addresss)).
|
|
7
|
-
catch(_.stubFalse);
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
module.exports.checkRecord = checkRecord;
|
|
11
|
-
module.exports.checkNetwork = (domains) => Promise.all(
|
|
12
|
-
_.map(
|
|
13
|
-
domains,
|
|
14
|
-
(hostName) => checkRecord(hostName).
|
|
15
|
-
then((found) => found
|
|
16
|
-
? hostName
|
|
17
|
-
: null,
|
|
18
|
-
),
|
|
19
|
-
),
|
|
20
|
-
).then((resolved) => _.flow(_.compact, _.first)(resolved) || null);
|
|
1
|
+
const _ = require("lodash");
|
|
2
|
+
const {resolve4} = require("dns").promises;
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
const checkRecord = (hostName) => resolve4(hostName).
|
|
6
|
+
then((addresss) => !_.isEmpty(addresss)).
|
|
7
|
+
catch(_.stubFalse);
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
module.exports.checkRecord = checkRecord;
|
|
11
|
+
module.exports.checkNetwork = (domains) => Promise.all(
|
|
12
|
+
_.map(
|
|
13
|
+
domains,
|
|
14
|
+
(hostName) => checkRecord(hostName).
|
|
15
|
+
then((found) => found
|
|
16
|
+
? hostName
|
|
17
|
+
: null,
|
|
18
|
+
),
|
|
19
|
+
),
|
|
20
|
+
).then((resolved) => _.flow(_.compact, _.first)(resolved) || null);
|
package/src/index.js
CHANGED
package/src/loggerFactory.cjs
CHANGED
|
@@ -1,98 +1,98 @@
|
|
|
1
|
-
const winston = require("winston");
|
|
2
|
-
const {
|
|
3
|
-
createLogger,
|
|
4
|
-
format,
|
|
5
|
-
transports,
|
|
6
|
-
} = winston;
|
|
7
|
-
const cj = (data) => Object.keys(data).length > 0
|
|
8
|
-
? JSON.stringify(data, null, 2)
|
|
9
|
-
: null;
|
|
10
|
-
|
|
11
|
-
const isTest = process.env.NODE_ENV === "test";
|
|
12
|
-
|
|
13
|
-
const defaultLogger = new transports.Console({
|
|
14
|
-
silent: isTest,
|
|
15
|
-
level: "warn",
|
|
16
|
-
format: format.errors({stack: true}),
|
|
17
|
-
timestamp: true,
|
|
18
|
-
});
|
|
19
|
-
|
|
20
|
-
const errorLogger = new transports.Console({
|
|
21
|
-
silent: isTest,
|
|
22
|
-
format: format.errors({stack: true}),
|
|
23
|
-
});
|
|
24
|
-
|
|
25
|
-
/**
|
|
26
|
-
* Creates a logger
|
|
27
|
-
*
|
|
28
|
-
* @param {String} nameSpace Namespace for the logger
|
|
29
|
-
* @param {Object} additionalData additional data to include with logging
|
|
30
|
-
* @param {string} level Default logging level to set (overwrites environment setting)
|
|
31
|
-
* @return {winston.Logger}
|
|
32
|
-
*/
|
|
33
|
-
module.exports.loggerFactory = ({
|
|
34
|
-
nameSpace = "saber",
|
|
35
|
-
additionalData = {},
|
|
36
|
-
level,
|
|
37
|
-
} = {}) => {
|
|
38
|
-
const logLevel = process.env.LOG_LEVEL || "warn";
|
|
39
|
-
const isConsole = !!(process.env.CONSOLE_LOG);
|
|
40
|
-
|
|
41
|
-
const loggerTransport = isConsole
|
|
42
|
-
? new transports.Console({
|
|
43
|
-
defaultMeta: additionalData,
|
|
44
|
-
level: level || logLevel,
|
|
45
|
-
format: format.combine(
|
|
46
|
-
format.errors({stack: true}),
|
|
47
|
-
format.cli(),
|
|
48
|
-
// eslint-disable-next-line max-len
|
|
49
|
-
format.printf(({
|
|
50
|
-
level,
|
|
51
|
-
message,
|
|
52
|
-
nameSpace,
|
|
53
|
-
stack,
|
|
54
|
-
...rest
|
|
55
|
-
}) =>
|
|
56
|
-
[
|
|
57
|
-
level,
|
|
58
|
-
`[${nameSpace}]`,
|
|
59
|
-
":",
|
|
60
|
-
message,
|
|
61
|
-
stack,
|
|
62
|
-
cj(rest),
|
|
63
|
-
].filter((value) => !!value)
|
|
64
|
-
.join(" "),
|
|
65
|
-
),
|
|
66
|
-
),
|
|
67
|
-
})
|
|
68
|
-
: defaultLogger;
|
|
69
|
-
|
|
70
|
-
const config = {
|
|
71
|
-
levels: {
|
|
72
|
-
// RFC5424
|
|
73
|
-
emerg: 0,
|
|
74
|
-
alert: 1,
|
|
75
|
-
crit: 2,
|
|
76
|
-
error: 3,
|
|
77
|
-
warning: 4,
|
|
78
|
-
notice: 5,
|
|
79
|
-
info: 6,
|
|
80
|
-
debug: 7,
|
|
81
|
-
// npm levels
|
|
82
|
-
warn: 4,
|
|
83
|
-
verbose: 6,
|
|
84
|
-
silly: 8,
|
|
85
|
-
},
|
|
86
|
-
exitOnError: false,
|
|
87
|
-
transports: [loggerTransport],
|
|
88
|
-
// Console logger from above will log errors
|
|
89
|
-
exceptionHandlers: !isConsole ? [errorLogger] : [],
|
|
90
|
-
rejectionHandlers: !isConsole ? [errorLogger] : [],
|
|
91
|
-
};
|
|
92
|
-
|
|
93
|
-
return createLogger(config)
|
|
94
|
-
.child({
|
|
95
|
-
...additionalData,
|
|
96
|
-
nameSpace: nameSpace,
|
|
97
|
-
});
|
|
98
|
-
}
|
|
1
|
+
const winston = require("winston");
|
|
2
|
+
const {
|
|
3
|
+
createLogger,
|
|
4
|
+
format,
|
|
5
|
+
transports,
|
|
6
|
+
} = winston;
|
|
7
|
+
const cj = (data) => Object.keys(data).length > 0
|
|
8
|
+
? JSON.stringify(data, null, 2)
|
|
9
|
+
: null;
|
|
10
|
+
|
|
11
|
+
const isTest = process.env.NODE_ENV === "test";
|
|
12
|
+
|
|
13
|
+
const defaultLogger = new transports.Console({
|
|
14
|
+
silent: isTest,
|
|
15
|
+
level: "warn",
|
|
16
|
+
format: format.errors({stack: true}),
|
|
17
|
+
timestamp: true,
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
const errorLogger = new transports.Console({
|
|
21
|
+
silent: isTest,
|
|
22
|
+
format: format.errors({stack: true}),
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Creates a logger
|
|
27
|
+
*
|
|
28
|
+
* @param {String} nameSpace Namespace for the logger
|
|
29
|
+
* @param {Object} additionalData additional data to include with logging
|
|
30
|
+
* @param {string} level Default logging level to set (overwrites environment setting)
|
|
31
|
+
* @return {winston.Logger}
|
|
32
|
+
*/
|
|
33
|
+
module.exports.loggerFactory = ({
|
|
34
|
+
nameSpace = "saber",
|
|
35
|
+
additionalData = {},
|
|
36
|
+
level,
|
|
37
|
+
} = {}) => {
|
|
38
|
+
const logLevel = process.env.LOG_LEVEL || "warn";
|
|
39
|
+
const isConsole = !!(process.env.CONSOLE_LOG);
|
|
40
|
+
|
|
41
|
+
const loggerTransport = isConsole
|
|
42
|
+
? new transports.Console({
|
|
43
|
+
defaultMeta: additionalData,
|
|
44
|
+
level: level || logLevel,
|
|
45
|
+
format: format.combine(
|
|
46
|
+
format.errors({stack: true}),
|
|
47
|
+
format.cli(),
|
|
48
|
+
// eslint-disable-next-line max-len
|
|
49
|
+
format.printf(({
|
|
50
|
+
level,
|
|
51
|
+
message,
|
|
52
|
+
nameSpace,
|
|
53
|
+
stack,
|
|
54
|
+
...rest
|
|
55
|
+
}) =>
|
|
56
|
+
[
|
|
57
|
+
level,
|
|
58
|
+
`[${nameSpace}]`,
|
|
59
|
+
":",
|
|
60
|
+
message,
|
|
61
|
+
stack,
|
|
62
|
+
cj(rest),
|
|
63
|
+
].filter((value) => !!value)
|
|
64
|
+
.join(" "),
|
|
65
|
+
),
|
|
66
|
+
),
|
|
67
|
+
})
|
|
68
|
+
: defaultLogger;
|
|
69
|
+
|
|
70
|
+
const config = {
|
|
71
|
+
levels: {
|
|
72
|
+
// RFC5424
|
|
73
|
+
emerg: 0,
|
|
74
|
+
alert: 1,
|
|
75
|
+
crit: 2,
|
|
76
|
+
error: 3,
|
|
77
|
+
warning: 4,
|
|
78
|
+
notice: 5,
|
|
79
|
+
info: 6,
|
|
80
|
+
debug: 7,
|
|
81
|
+
// npm levels
|
|
82
|
+
warn: 4,
|
|
83
|
+
verbose: 6,
|
|
84
|
+
silly: 8,
|
|
85
|
+
},
|
|
86
|
+
exitOnError: false,
|
|
87
|
+
transports: [loggerTransport],
|
|
88
|
+
// Console logger from above will log errors
|
|
89
|
+
exceptionHandlers: !isConsole ? [errorLogger] : [],
|
|
90
|
+
rejectionHandlers: !isConsole ? [errorLogger] : [],
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
return createLogger(config)
|
|
94
|
+
.child({
|
|
95
|
+
...additionalData,
|
|
96
|
+
nameSpace: nameSpace,
|
|
97
|
+
});
|
|
98
|
+
}
|
package/src/udl.js
CHANGED
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
import {fixDate} from "./fixDate.js";
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
calcRegime,
|
|
4
|
+
getElsetUdlFromTle,
|
|
5
|
+
getLonAndDrift,
|
|
6
|
+
getRaanPrecession,
|
|
7
|
+
raDecToAzEl,
|
|
8
|
+
azElToRaDec,
|
|
9
|
+
raDecToGeodetic,
|
|
10
|
+
estimateSlantRange,
|
|
11
|
+
} from "./astro.js";
|
|
3
12
|
import {lowerCaseObjectKeys} from "./transform.js";
|
|
4
13
|
import {isDefined} from "./utils.js";
|
|
5
14
|
import _ from "lodash";
|
|
@@ -113,4 +122,110 @@ export function formatUdlData(topic, udlData) {
|
|
|
113
122
|
}
|
|
114
123
|
}
|
|
115
124
|
|
|
116
|
-
|
|
125
|
+
/**
|
|
126
|
+
* Enriches the Azimuth and Elevation fields of the given object, if needed.
|
|
127
|
+
*
|
|
128
|
+
* @param {object} npsOb - The object to be enriched.
|
|
129
|
+
* @param {array} enrichedFields - The array of already enriched fields.
|
|
130
|
+
* @return {array} The updated array of enriched fields.
|
|
131
|
+
*/
|
|
132
|
+
const enrichAzimuthAndElevation = (npsOb, enrichedFields) => {
|
|
133
|
+
const azel = raDecToAzEl(npsOb.ObTime, npsOb.Ra, npsOb.Dec, npsOb.SenLat, npsOb.SenLon);
|
|
134
|
+
const {Az, El} = azel;
|
|
135
|
+
if (Az && !npsOb.Azimuth) {
|
|
136
|
+
npsOb.Azimuth = Az;
|
|
137
|
+
enrichedFields.push("Azimuth");
|
|
138
|
+
}
|
|
139
|
+
if (El && !npsOb.Elevation) {
|
|
140
|
+
npsOb.Elevation = El;
|
|
141
|
+
enrichedFields.push("Elevation");
|
|
142
|
+
}
|
|
143
|
+
return enrichedFields;
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Enriches the Ra and Dec fields of the given object, if needed.
|
|
148
|
+
*
|
|
149
|
+
* @param {object} npsOb - The object to be enriched.
|
|
150
|
+
* @param {array} enrichedFields - The array of already enriched fields.
|
|
151
|
+
* @return {array} The updated array of enriched fields.
|
|
152
|
+
*/
|
|
153
|
+
const enrichRaAndDec = (npsOb, enrichedFields) => {
|
|
154
|
+
const radec = azElToRaDec(
|
|
155
|
+
npsOb.ObTime,
|
|
156
|
+
npsOb.Azimuth,
|
|
157
|
+
npsOb.Elevation,
|
|
158
|
+
npsOb.SenLat,
|
|
159
|
+
npsOb.SenLon,
|
|
160
|
+
);
|
|
161
|
+
const {ra, dec} = radec;
|
|
162
|
+
if (ra && !npsOb.Ra) {
|
|
163
|
+
npsOb.Ra = ra;
|
|
164
|
+
enrichedFields.push("Ra");
|
|
165
|
+
}
|
|
166
|
+
if (dec && !npsOb.Dec) {
|
|
167
|
+
npsOb.Dec = dec;
|
|
168
|
+
enrichedFields.push("Dec");
|
|
169
|
+
}
|
|
170
|
+
return enrichedFields;
|
|
171
|
+
};
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Enriches the GeoLon field of the given object, if needed.
|
|
175
|
+
*
|
|
176
|
+
* @param {object} npsOb - The object to be enriched.
|
|
177
|
+
* @param {array} enrichedFields - The array of already enriched fields.
|
|
178
|
+
* @return {array} The updated array of enriched fields.
|
|
179
|
+
*/
|
|
180
|
+
const enrichGeoLon = (npsOb, enrichedFields) => {
|
|
181
|
+
// In the absence of range, we estimate the slant range from the sensor to the sat
|
|
182
|
+
const slantRange = npsOb.Range ?? npsOb.GeoRange ?? estimateSlantRange(
|
|
183
|
+
npsOb.ObTime,
|
|
184
|
+
npsOb.Ra, npsOb.Dec,
|
|
185
|
+
npsOb.SenLat, npsOb.SenLon, npsOb.SenAlt,
|
|
186
|
+
);
|
|
187
|
+
const geodetic= raDecToGeodetic(
|
|
188
|
+
npsOb.ObTime,
|
|
189
|
+
npsOb.Ra,
|
|
190
|
+
npsOb.Dec,
|
|
191
|
+
npsOb.SenLat,
|
|
192
|
+
npsOb.SenLon,
|
|
193
|
+
npsOb.SenAlt,
|
|
194
|
+
slantRange,
|
|
195
|
+
);
|
|
196
|
+
npsOb.GeoLon = geodetic.Longitude;
|
|
197
|
+
npsOb.GeoLat = geodetic.Latitude;
|
|
198
|
+
npsOb.GeoRange = slantRange;
|
|
199
|
+
if (!npsOb.Range && !npsOb.GeoRange) enrichedFields.push("GeoRange");
|
|
200
|
+
if (npsOb.GeoLon) enrichedFields.push("GeoLon");
|
|
201
|
+
if (npsOb.GeoLat) enrichedFields.push("GeoLat");
|
|
202
|
+
return enrichedFields;
|
|
203
|
+
};
|
|
204
|
+
|
|
205
|
+
/**
|
|
206
|
+
* Enriches the given object by adding missing fields.
|
|
207
|
+
* This is the main function that uses helper functions to enrich different fields of the object.
|
|
208
|
+
*
|
|
209
|
+
* @param {object} npsOb - The object to be enriched.
|
|
210
|
+
* @return {object} The enriched object.
|
|
211
|
+
*/
|
|
212
|
+
const enrichUdlFields = (npsOb) => {
|
|
213
|
+
let enrichedFields = [];
|
|
214
|
+
|
|
215
|
+
if (!npsOb.Azimuth || !npsOb.Elevation) {
|
|
216
|
+
enrichedFields = enrichAzimuthAndElevation(npsOb, enrichedFields);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
if (!npsOb.Ra || !npsOb.Dec) {
|
|
220
|
+
enrichedFields = enrichRaAndDec(npsOb, enrichedFields);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (!npsOb.GeoLon) {
|
|
224
|
+
enrichedFields = enrichGeoLon(npsOb, enrichedFields);
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
npsOb.EnrichedFields = enrichedFields.join(",");
|
|
228
|
+
return npsOb;
|
|
229
|
+
};
|
|
230
|
+
|
|
231
|
+
export {udlToNpsElset, udlToNpsGroundSite, enrichUdlFields};
|