@hestia-earth/pipeline-utils 0.13.0 → 0.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api.js.map +1 -1
- package/dist/ec2.js +4 -2
- package/dist/ec2.js.map +1 -1
- package/dist/find-nodes.js +37 -41
- package/dist/find-nodes.js.map +1 -1
- package/dist/gitlab.js +1 -4
- package/dist/gitlab.js.map +1 -1
- package/dist/lambda.d.ts +1 -1
- package/dist/lambda.js +4 -2
- package/dist/lambda.js.map +1 -1
- package/dist/local.js +3 -6
- package/dist/local.js.map +1 -1
- package/dist/progress.js.map +1 -1
- package/dist/s3.js +77 -42
- package/dist/s3.js.map +1 -1
- package/dist/sns.js +6 -4
- package/dist/sns.js.map +1 -1
- package/dist/sqs.js +6 -4
- package/dist/sqs.js.map +1 -1
- package/dist/utils.js +22 -19
- package/dist/utils.js.map +1 -1
- package/package.json +21 -19
- package/src/api.ts +1 -2
- package/src/ec2.ts +16 -13
- package/src/find-nodes.ts +85 -80
- package/src/gitlab.ts +1 -4
- package/src/lambda.ts +19 -16
- package/src/local.ts +20 -24
- package/src/progress.ts +39 -22
- package/src/s3.ts +150 -89
- package/src/sns.ts +36 -28
- package/src/sqs.ts +36 -28
- package/src/utils.ts +45 -38
package/dist/sqs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sqs.js","sourceRoot":"","sources":["../src/sqs.ts"],"names":[],"mappings":";;;AAAA,+BAA+B;AAC/B,+BAAgC;AAChC,8CAA2C;AAE3C,+BAA8B;AAC9B,6BAAwD;AAMjD,MAAM,WAAW,GAAG,CACzB,YAAoB,
|
|
1
|
+
{"version":3,"file":"sqs.js","sourceRoot":"","sources":["../src/sqs.ts"],"names":[],"mappings":";;;AAAA,+BAA+B;AAC/B,+BAAgC;AAChC,8CAA2C;AAE3C,+BAA8B;AAC9B,6BAAwD;AAMjD,MAAM,WAAW,GAAG,CACzB,YAAoB,EACpB,MAAsB,EACtB,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,OAAO,EAC9B,SAAqB,EAAE,EACvB,EAAE,CACF,aAAQ;IACN,CAAC,CAAC,IAAA,WAAI,EACF,IAAI,GAAG,CAAC,GAAG,CAAC,EAAE,MAAM,EAAN,WAAM,EAAE,CAAC;SACpB,WAAW,CAAC;QACX,QAAQ,EAAE,QAAQ;QAClB,WAAW,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;QACnC,iBAAiB,EAAE;YACjB,YAAY,EAAE;gBACZ,QAAQ,EAAE,QAAQ;gBAClB,WAAW,EAAE,YAAY;aAC1B;YACD,GAAG,MAAM,CAAC,WAAW,CACnB,MAAM,CAAC,OAAO,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC;gBACjD,GAAG;gBACH;oBACE,QAAQ,EAAE,QAAQ;oBAClB,WAAW,EAAE,KAAK;iBACnB;aACF,CAAC,CACH;SACF;KACF,CAAC;SACD,OAAO,EAAE,CACb,CAAC,IAAI,CACJ,IAAA,gBAAI,EAAC,CAAC,CAAC,EACP,IAAA,eAAG,EAAC,GAAG,CAAC,EAAE;QACR,IAAA,WAAK,EAAC,oBAAoB,YAAY,EAAE,EAAE,GAAG,CAAC,CAAC;QAC/C,OAAO,GAAG,CAAC;IACb,CAAC,CAAC,CACH;IACH,CAAC,CAAC,IAAA,SAAE,EAAC,IAAI,CAAC,CAAC;AApCF,QAAA,WAAW,eAoCT"}
|
package/dist/utils.js
CHANGED
|
@@ -3,15 +3,17 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.stripBOM = exports.prependWithKey = exports.refToSchemaType = exports.isEmpty = exports.spliceAndProcess = exports.returnError = exports.returnResponse = exports.functionParamToSNSRecord = exports.functionParamToS3Record = exports.recordToFunctionParam = void 0;
|
|
4
4
|
const utils_1 = require("@hestia-earth/utils");
|
|
5
5
|
const log_1 = require("./log");
|
|
6
|
-
const recordToFunctionParam = (event) => (0, utils_1.reduceUndefinedValues)('Sns' in event
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
6
|
+
const recordToFunctionParam = (event) => (0, utils_1.reduceUndefinedValues)('Sns' in event
|
|
7
|
+
? {
|
|
8
|
+
...JSON.parse(event.Sns.Message),
|
|
9
|
+
timestamp: event.Sns.Timestamp ? new Date(event.Sns.Timestamp) : undefined
|
|
10
|
+
}
|
|
11
|
+
: {
|
|
12
|
+
bucket: event.s3.bucket.name,
|
|
13
|
+
key: event.s3.object.key,
|
|
14
|
+
filepath: event.s3.object.key,
|
|
15
|
+
timestamp: event.eventTime ? new Date(event.eventTime) : undefined
|
|
16
|
+
});
|
|
15
17
|
exports.recordToFunctionParam = recordToFunctionParam;
|
|
16
18
|
const functionParamToS3Record = ({ bucket, key, timestamp }) => ({
|
|
17
19
|
s3: {
|
|
@@ -40,7 +42,7 @@ const returnResponse = (data, statusCode = 200) => {
|
|
|
40
42
|
};
|
|
41
43
|
exports.returnResponse = returnResponse;
|
|
42
44
|
const defaultErrorStatus = 500;
|
|
43
|
-
const returnError =
|
|
45
|
+
const returnError = err => {
|
|
44
46
|
(0, log_1.error)('handleError', err, err === null || err === void 0 ? void 0 : err.stack, err === null || err === void 0 ? void 0 : err.meta);
|
|
45
47
|
const { error: errorContent, status = defaultErrorStatus } = typeof err === 'string'
|
|
46
48
|
? { error: err }
|
|
@@ -55,20 +57,21 @@ const spliceAndProcess = (docs, func, count = 1) => new Promise(resolve => {
|
|
|
55
57
|
return resolve();
|
|
56
58
|
}
|
|
57
59
|
const remaining = docs.splice(docs.length - count);
|
|
58
|
-
void func(remaining)
|
|
60
|
+
void func(remaining)
|
|
61
|
+
.then(() => (0, exports.spliceAndProcess)(docs, func, count))
|
|
62
|
+
.then(resolve);
|
|
59
63
|
});
|
|
60
64
|
exports.spliceAndProcess = spliceAndProcess;
|
|
61
|
-
const isEmpty = (value, minKeys = 1) => typeof value === 'object'
|
|
62
|
-
|
|
63
|
-
!value.length
|
|
64
|
-
Object.keys(value).filter(key => key !== 'type').length < minKeys
|
|
65
|
-
typeof value === 'undefined' || value === null || value === '';
|
|
65
|
+
const isEmpty = (value, minKeys = 1) => typeof value === 'object'
|
|
66
|
+
? Array.isArray(value)
|
|
67
|
+
? !value.length
|
|
68
|
+
: Object.keys(value).filter(key => key !== 'type').length < minKeys
|
|
69
|
+
: typeof value === 'undefined' || value === null || value === '';
|
|
66
70
|
exports.isEmpty = isEmpty;
|
|
67
71
|
const refToSchemaType = ref => ref.substring(2).replace('.json#', '');
|
|
68
72
|
exports.refToSchemaType = refToSchemaType;
|
|
69
|
-
const prependWithKey = (data, key) => Object.keys(data)
|
|
70
|
-
.reduce((prev, curr) => ({ ...prev, [`${key}.${curr}`]: data[curr] }), {});
|
|
73
|
+
const prependWithKey = (data, key) => Object.keys(data).reduce((prev, curr) => ({ ...prev, [`${key}.${curr}`]: data[curr] }), {});
|
|
71
74
|
exports.prependWithKey = prependWithKey;
|
|
72
|
-
const stripBOM = (content) => content.charCodeAt(0) ===
|
|
75
|
+
const stripBOM = (content) => (content.charCodeAt(0) === 0xfeff ? content.slice(1) : content);
|
|
73
76
|
exports.stripBOM = stripBOM;
|
|
74
77
|
//# sourceMappingURL=utils.js.map
|
package/dist/utils.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";;;AACA,+CAA4D;AAG5D,+BAA8B;AAEvB,MAAM,qBAAqB,GAAG,CAAC,KAAqC,EAAkB,EAAE,CAC7F,IAAA,6BAAqB,EACnB,KAAK,IAAI,KAAK,CAAC,CAAC
|
|
1
|
+
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";;;AACA,+CAA4D;AAG5D,+BAA8B;AAEvB,MAAM,qBAAqB,GAAG,CAAC,KAAqC,EAAkB,EAAE,CAC7F,IAAA,6BAAqB,EACnB,KAAK,IAAI,KAAK;IACZ,CAAC,CAAC;QACE,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC;QAChC,SAAS,EAAE,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS;KAC3E;IACH,CAAC,CAAC;QACE,MAAM,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI;QAC5B,GAAG,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG;QACxB,QAAQ,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG;QAC7B,SAAS,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS;KACnE,CACY,CAAC;AAbT,QAAA,qBAAqB,yBAaZ;AAEf,MAAM,uBAAuB,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,SAAS,EAAkB,EAAE,EAAE,CACpF,CAAC;IACC,EAAE,EAAE;QACF,MAAM,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE;QACxB,MAAM,EAAE,EAAE,GAAG,EAAE;KAChB;IACD,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,SAAS,EAAE,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;CACxD,CAA2B,CAAC;AAPlB,QAAA,uBAAuB,2BAOL;AAExB,MAAM,wBAAwB,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,SAAS,EAAkB,EAAE,EAAE,CACrF,CAAC;IACC,GAAG,EAAE;QACH,OAAO,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,CAAC;KACzC;IACD,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,SAAS,EAAE,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;CACxD,CAA4B,CAAC;AANnB,QAAA,wBAAwB,4BAML;AAEzB,MAAM,cAAc,GAAG,CAAC,IAAS,EAAE,UAAU,GAAG,GAAG,EAAE,EAAE;IAC5D,OAAO;QACL,UAAU;QACV,OAAO,EAAE;YACP,6BAA6B,EAAE,GAAG;YAClC,kCAAkC,EAAE,IAAI;SACzC;QACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;KACpC,CAAC;AACJ,CAAC,CAAC;AATW,QAAA,cAAc,kBASzB;AAEF,MAAM,kBAAkB,GAAG,GAAG,CAAC;AAExB,MAAM,WAAW,GAAG,GAAG,CAAC,EAAE;IAC/B,IAAA,WAAK,EAAC,aAAa,EAAE,GAAG,EAAE,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,KAAK,EAAE,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,CAAC,CAAC;IACjD,MAAM,EAAE,KAAK,EAAE,YAAY,EAAE,MAAM,GAAG,kBAAkB,EAAE,GACxD,OAAO,GAAG,KAAK,QAAQ;QACrB,CAAC,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE;QAChB,CAAC,CAAC,GAAG,CAAC,IAAI;YACV,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI;YACf,CAAC,CAAC,EAAE,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,kBAAkB,EAAE,CAAC;IACzD,OAAO,IAAA,sBAAc,EAAC,EAAE,KAAK,EAAE,YAAY,EAAE,EAAE,MAAM,IAAI,kBAAkB,CAAC,CAAC;AAC/E,CAAC,CAAC;AATW,QAAA,WAAW,eAStB;AAEK,MAAM,gBAAgB,GAAG,CAAO,IAAS,EAAE,IAA4B,EAAE,KAAK,GAAG,CAAC,EAAE,EAAE,CAC3F,IAAI,OAAO,CAAW,OAAO,CAAC,EAAE;IAC9B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;QACjB,OAAO,OAAO,EAAE,CAAC;IACnB,CAAC;IACD,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC;IAEnD,KAAK,IAAI,CAAC,SAAS,CAAC;SACjB,IAAI,CAAC,GAAG,EAAE,CAAC,IAAA,wBAAgB,EAAC,IAAI,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;SAC/C,IAAI,CAAC,OAAO,CAAC,CAAC;AACnB,CAAC,CAAC,CAAC;AAVQ,QAAA,gBAAgB,oBAUxB;AAEE,MAAM,OAAO,GAAG,CAAC,KAAU,EAAE,OAAO,GAAG,CAAC,EAAE,EAAE,CACjD,OAAO,KAAK,KAAK,QAAQ;IACvB,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACpB,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM;QACf,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,MAAM,CAAC,CAAC,MAAM,GAAG,OAAO;IACrE,CAAC,CAAC,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,EAAE,CAAC;AALxD,QAAA,OAAO,WAKiD;AAE9D,MAAM,eAAe,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;AAAhE,QAAA,eAAe,mBAAiD;AAEtE,MAAM,cAAc,GAAG,CAAC,IAAS,EAAE,GAAW,EAAE,EAAE,CACvD,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,GAAG,IAAI,EAAE,CAAC,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;AADjF,QAAA,cAAc,kBACmE;AAEvF,MAAM,QAAQ,GAAG,CAAC,OAAe,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;AAAhG,QAAA,QAAQ,YAAwF"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hestia-earth/pipeline-utils",
|
|
3
|
-
"version": "0.13.
|
|
3
|
+
"version": "0.13.1",
|
|
4
4
|
"description": "Utilities for running pipelines",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"typings": "dist/index.d.ts",
|
|
@@ -43,38 +43,40 @@
|
|
|
43
43
|
"@hestia-earth/utils": ">=0.11.2"
|
|
44
44
|
},
|
|
45
45
|
"devDependencies": {
|
|
46
|
-
"@commitlint/cli": "^17.
|
|
47
|
-
"@commitlint/config-conventional": "^17.
|
|
48
|
-
"@hestia-earth/api": "^0.21.
|
|
49
|
-
"@hestia-earth/eslint-config": "^0.0
|
|
50
|
-
"@hestia-earth/json-schema": "^22.
|
|
51
|
-
"@hestia-earth/schema": "^22.
|
|
52
|
-
"@hestia-earth/utils": "^0.11.
|
|
53
|
-
"@types/aws-lambda": "^8.10.
|
|
54
|
-
"@types/chai": "^4.3.
|
|
55
|
-
"@types/lodash.get": "^4.4.
|
|
56
|
-
"@types/lodash.pick": "^4.4.
|
|
57
|
-
"@types/mocha": "^10.0.
|
|
58
|
-
"@types/node": "^16.18.
|
|
59
|
-
"@typescript-eslint/eslint-plugin": "^5.
|
|
60
|
-
"@typescript-eslint/parser": "^5.
|
|
61
|
-
"chai": "^4.3.
|
|
46
|
+
"@commitlint/cli": "^17.8.1",
|
|
47
|
+
"@commitlint/config-conventional": "^17.8.1",
|
|
48
|
+
"@hestia-earth/api": "^0.21.13",
|
|
49
|
+
"@hestia-earth/eslint-config": "^0.1.0",
|
|
50
|
+
"@hestia-earth/json-schema": "^22.4.0",
|
|
51
|
+
"@hestia-earth/schema": "^22.4.0",
|
|
52
|
+
"@hestia-earth/utils": "^0.11.13",
|
|
53
|
+
"@types/aws-lambda": "^8.10.130",
|
|
54
|
+
"@types/chai": "^4.3.11",
|
|
55
|
+
"@types/lodash.get": "^4.4.9",
|
|
56
|
+
"@types/lodash.pick": "^4.4.9",
|
|
57
|
+
"@types/mocha": "^10.0.6",
|
|
58
|
+
"@types/node": "^16.18.68",
|
|
59
|
+
"@typescript-eslint/eslint-plugin": "^5.62.0",
|
|
60
|
+
"@typescript-eslint/parser": "^5.62.0",
|
|
61
|
+
"chai": "^4.3.10",
|
|
62
62
|
"eslint": "^7.32.0",
|
|
63
63
|
"eslint-plugin-jsdoc": "^30.7.13",
|
|
64
64
|
"husky": "^4.3.8",
|
|
65
65
|
"mocha": "^10.2.0",
|
|
66
66
|
"nyc": "^15.1.0",
|
|
67
|
+
"prettier": "^3.1.1",
|
|
67
68
|
"sinon": "^15.2.0",
|
|
68
69
|
"source-map-support": "^0.5.21",
|
|
69
70
|
"standard-version": "^9.5.0",
|
|
70
|
-
"ts-node": "^10.9.
|
|
71
|
-
"typescript": "^5.
|
|
71
|
+
"ts-node": "^10.9.2",
|
|
72
|
+
"typescript": "^5.3.3"
|
|
72
73
|
},
|
|
73
74
|
"husky": {
|
|
74
75
|
"hooks": {
|
|
75
76
|
"commit-msg": "commitlint -E HUSKY_GIT_PARAMS"
|
|
76
77
|
}
|
|
77
78
|
},
|
|
79
|
+
"prettier": "@hestia-earth/eslint-config/prettier.json",
|
|
78
80
|
"engines": {
|
|
79
81
|
"npm": ">=8.0.0",
|
|
80
82
|
"node": ">=16.0.0"
|
package/src/api.ts
CHANGED
|
@@ -20,8 +20,7 @@ export const getNode = async (id: string, type: NodeType) => {
|
|
|
20
20
|
const token = apiAccessToken();
|
|
21
21
|
const headers = token ? { 'X-Access-Token': token } : {};
|
|
22
22
|
return (await axios.get<any>(url, { headers })).data;
|
|
23
|
-
}
|
|
24
|
-
catch (_err) {
|
|
23
|
+
} catch (_err) {
|
|
25
24
|
return null;
|
|
26
25
|
}
|
|
27
26
|
};
|
package/src/ec2.ts
CHANGED
|
@@ -7,16 +7,19 @@ import { isS3Mode } from './s3';
|
|
|
7
7
|
|
|
8
8
|
const defaultInstanceIds = (process.env.START_INSTANCE_ID || '').split(',');
|
|
9
9
|
|
|
10
|
-
export const startInstances = (functionName: string, instanceIds = defaultInstanceIds) =>
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
10
|
+
export const startInstances = (functionName: string, instanceIds = defaultInstanceIds) =>
|
|
11
|
+
isS3Mode
|
|
12
|
+
? from(
|
|
13
|
+
new AWS.EC2()
|
|
14
|
+
.startInstances({
|
|
15
|
+
InstanceIds: instanceIds
|
|
16
|
+
})
|
|
17
|
+
.promise()
|
|
18
|
+
).pipe(
|
|
19
|
+
take(1),
|
|
20
|
+
map(res => {
|
|
21
|
+
debug(`EC2 start instances ${functionName}`, res);
|
|
22
|
+
return res;
|
|
23
|
+
})
|
|
24
|
+
)
|
|
25
|
+
: of(null);
|
package/src/find-nodes.ts
CHANGED
|
@@ -19,16 +19,19 @@ const filteredKeys = (keys: string[]) => ['@id', '@type', ...keys];
|
|
|
19
19
|
|
|
20
20
|
export const search = async (query: any, limit = 2, keys: string[] = []) => {
|
|
21
21
|
debug('search', JSON.stringify(query));
|
|
22
|
-
const hits =
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
22
|
+
const hits =
|
|
23
|
+
(
|
|
24
|
+
await client.search({
|
|
25
|
+
index: indexName,
|
|
26
|
+
size: limit,
|
|
27
|
+
body: {
|
|
28
|
+
_source: {
|
|
29
|
+
includes: filteredKeys(keys)
|
|
30
|
+
},
|
|
31
|
+
query
|
|
32
|
+
}
|
|
33
|
+
})
|
|
34
|
+
).body.hits.hits || [];
|
|
32
35
|
return hits.map(({ _source }) => _source);
|
|
33
36
|
};
|
|
34
37
|
|
|
@@ -43,19 +46,18 @@ interface IQueries {
|
|
|
43
46
|
should: ITypedQuery[];
|
|
44
47
|
}
|
|
45
48
|
|
|
46
|
-
const prependWithKey = (data: any, key: string) =>
|
|
47
|
-
.reduce((prev, [curr, value]) => ({ ...prev, [`${key}.${curr}`]: value }), {});
|
|
49
|
+
const prependWithKey = (data: any, key: string) =>
|
|
50
|
+
Object.entries(data).reduce((prev, [curr, value]) => ({ ...prev, [`${key}.${curr}`]: value }), {});
|
|
48
51
|
|
|
49
52
|
const buildFindNodeQuery = (type: SchemaType, must?: ITypedQuery[], should?: ITypedQuery[]) => ({
|
|
50
53
|
bool: {
|
|
51
|
-
must: [
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
} : {})
|
|
54
|
+
must: [{ match: { '@type': type } }, ...must],
|
|
55
|
+
...(should.length
|
|
56
|
+
? {
|
|
57
|
+
should,
|
|
58
|
+
minimum_should_match: must.length === 0 ? 1 : 0
|
|
59
|
+
}
|
|
60
|
+
: {})
|
|
59
61
|
}
|
|
60
62
|
});
|
|
61
63
|
|
|
@@ -65,16 +67,19 @@ const canUseCache = (value: any) => {
|
|
|
65
67
|
return totalSize + valueSize <= cacheMaxSize;
|
|
66
68
|
};
|
|
67
69
|
|
|
68
|
-
const setCache = (key: string, value: any) => canUseCache(value) ? cache.put(key, value) : null;
|
|
70
|
+
const setCache = (key: string, value: any) => (canUseCache(value) ? cache.put(key, value) : null);
|
|
69
71
|
|
|
70
72
|
export const cacheSearch = (query: any, ...args) => {
|
|
71
73
|
const cacheKey = JSON.stringify(query);
|
|
72
74
|
const cacheEntry = cache.get(cacheKey);
|
|
73
|
-
return
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
75
|
+
return (
|
|
76
|
+
cacheEntry ||
|
|
77
|
+
(async () => {
|
|
78
|
+
const results = await search(query, ...args);
|
|
79
|
+
setCache(cacheKey, results);
|
|
80
|
+
return results;
|
|
81
|
+
})()
|
|
82
|
+
);
|
|
78
83
|
};
|
|
79
84
|
|
|
80
85
|
export const execFindNode = async (query: any, keys: string[] = []) => {
|
|
@@ -91,31 +96,24 @@ const throwNotFound = () => {
|
|
|
91
96
|
};
|
|
92
97
|
|
|
93
98
|
// errors coming from ES, like server is overloaded
|
|
94
|
-
const retryErrors = [
|
|
95
|
-
'ResponseError',
|
|
96
|
-
'DeserializationError'
|
|
97
|
-
];
|
|
99
|
+
const retryErrors = ['ResponseError', 'DeserializationError'];
|
|
98
100
|
|
|
99
101
|
const retryFindNodeOnFailure = async (func: (retry: number) => Promise<any>, retries = 0) => {
|
|
100
102
|
try {
|
|
101
103
|
return await func(retries);
|
|
102
|
-
}
|
|
103
|
-
catch (err) {
|
|
104
|
+
} catch (err) {
|
|
104
105
|
const queryError = retryErrors.includes(err.name) || err.message === 'search_phase_execution_exception';
|
|
105
|
-
return queryError && retries < MAX_RETRIES ?
|
|
106
|
-
retryFindNodeOnFailure(func, retries + 1) :
|
|
107
|
-
null;
|
|
106
|
+
return queryError && retries < MAX_RETRIES ? retryFindNodeOnFailure(func, retries + 1) : null;
|
|
108
107
|
}
|
|
109
108
|
};
|
|
110
109
|
|
|
111
110
|
/* eslint-disable complexity */
|
|
112
111
|
export const findNodeByQueries = (type: SchemaType, { must, should }: IQueries, keys?: string[]) =>
|
|
113
|
-
retryFindNodeOnFailure(async
|
|
112
|
+
retryFindNodeOnFailure(async retries => {
|
|
114
113
|
const query = buildFindNodeQuery(type, must, should);
|
|
115
114
|
try {
|
|
116
|
-
return must.length || should.length ? (await execFindNode(query, keys) || throwNotFound()
|
|
117
|
-
}
|
|
118
|
-
catch (err) {
|
|
115
|
+
return must.length || should.length ? (await execFindNode(query, keys)) || throwNotFound() : null;
|
|
116
|
+
} catch (err) {
|
|
119
117
|
const logFunction = retries < MAX_RETRIES ? debug : error;
|
|
120
118
|
logFunction('Node not found after', retries, 'retries', JSON.stringify(query), err);
|
|
121
119
|
throw err;
|
|
@@ -139,22 +137,23 @@ export const findNodeById = async (type: NodeType | SchemaType, id: string, keys
|
|
|
139
137
|
return node ? pick(node, filteredKeys(keys)) : null;
|
|
140
138
|
};
|
|
141
139
|
|
|
142
|
-
const findChildNodes = (schemas: definitions, data: any) =>
|
|
143
|
-
|
|
144
|
-
.
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
)
|
|
140
|
+
const findChildNodes = (schemas: definitions, data: any) =>
|
|
141
|
+
Promise.all(
|
|
142
|
+
Object.entries(data)
|
|
143
|
+
.filter(([_key, value]) => isExpandable(value))
|
|
144
|
+
.map(([key, value]) => ({ key, value: value as any, isArray: Array.isArray(value) }))
|
|
145
|
+
.map(async ({ isArray, key, value }) => {
|
|
146
|
+
const res = isArray
|
|
147
|
+
? await Promise.all(value.map(val => (isExpandable(val) ? findNodes(schemas, val) : val)))
|
|
148
|
+
: await findNodes(schemas, value);
|
|
149
|
+
return { [key]: res };
|
|
150
|
+
})
|
|
151
|
+
);
|
|
153
152
|
|
|
154
153
|
const findNodeField = (type: SchemaType, field: string) =>
|
|
155
|
-
searchNormalizedEnabled && [SchemaType.Term].includes(type) && ['name'].includes(field)
|
|
156
|
-
`${field}Normalized`
|
|
157
|
-
`${field}.keyword`;
|
|
154
|
+
searchNormalizedEnabled && [SchemaType.Term].includes(type) && ['name'].includes(field)
|
|
155
|
+
? `${field}Normalized`
|
|
156
|
+
: `${field}.keyword`;
|
|
158
157
|
|
|
159
158
|
const findNodeQuery = (type: SchemaType, data: any, uniqueFields: string[], requiredFields: string[]): IQueries => ({
|
|
160
159
|
must: uniqueFields
|
|
@@ -166,19 +165,24 @@ const findNodeQuery = (type: SchemaType, data: any, uniqueFields: string[], requ
|
|
|
166
165
|
});
|
|
167
166
|
|
|
168
167
|
const findChildNodesQuery = (schemas: definitions, data: any, uniqueFields: string[]) =>
|
|
169
|
-
uniqueFields.reduce(
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
168
|
+
uniqueFields.reduce(
|
|
169
|
+
(queries, key) => {
|
|
170
|
+
const {
|
|
171
|
+
queries: { must, should }
|
|
172
|
+
} = findNodeData(schemas, data[key]);
|
|
173
|
+
return {
|
|
174
|
+
must: [
|
|
175
|
+
...queries.must,
|
|
176
|
+
...must.map(({ match_phrase }) => ({ match_phrase: prependWithKey(match_phrase, key) }))
|
|
177
|
+
],
|
|
178
|
+
should: [
|
|
179
|
+
...queries.should,
|
|
180
|
+
...should.map(({ match_phrase }) => ({ match_phrase: prependWithKey(match_phrase, key) }))
|
|
181
|
+
]
|
|
182
|
+
};
|
|
183
|
+
},
|
|
184
|
+
{ must: [], should: [] } as IQueries
|
|
185
|
+
);
|
|
182
186
|
|
|
183
187
|
const compileQueries = (schemas: definitions, type: SchemaType, data: any): IQueries => {
|
|
184
188
|
const schema = schemas[type];
|
|
@@ -201,13 +205,12 @@ const findByIdQueries = (id: string): IQueries => ({ must: [{ match_phrase: { '@
|
|
|
201
205
|
*
|
|
202
206
|
* @param param0 The node
|
|
203
207
|
*/
|
|
204
|
-
const nodeExistingId = (type: SchemaType, { '@id': _id, id }: any) =>
|
|
205
|
-
type === SchemaType.Term ? _id || id : _id;
|
|
208
|
+
const nodeExistingId = (type: SchemaType, { '@id': _id, id }: any) => (type === SchemaType.Term ? _id || id : _id);
|
|
206
209
|
|
|
207
210
|
const nodeExistingType = ({ '@type': _type, type }: any) =>
|
|
208
|
-
isTypeValid({ type: _type || type }) ? (_type || type) as SchemaType : undefined;
|
|
211
|
+
isTypeValid({ type: _type || type }) ? ((_type || type) as SchemaType) : undefined;
|
|
209
212
|
|
|
210
|
-
const schemaByType = (schemas: definitions, type?: SchemaType) => type && type in schemas ? schemas[type] : null;
|
|
213
|
+
const schemaByType = (schemas: definitions, type?: SchemaType) => (type && type in schemas ? schemas[type] : null);
|
|
211
214
|
|
|
212
215
|
export const findNodeData = (schemas: definitions, { '@context': context, ...node }: any) => {
|
|
213
216
|
const type = nodeExistingType(node);
|
|
@@ -215,19 +218,21 @@ export const findNodeData = (schemas: definitions, { '@context': context, ...nod
|
|
|
215
218
|
// remove keys from data
|
|
216
219
|
const { '@id': _id, '@type': _2, id, type: _4, ...data } = node;
|
|
217
220
|
const schema = schemaByType(schemas, type);
|
|
218
|
-
return schema
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
221
|
+
return schema
|
|
222
|
+
? {
|
|
223
|
+
id: (_id || id) as string,
|
|
224
|
+
type,
|
|
225
|
+
existingId,
|
|
226
|
+
queries: existingId ? findByIdQueries(existingId) : compileQueries(schemas, type, data),
|
|
227
|
+
// can only fetch searchable properties
|
|
228
|
+
queryKeys: extendableProperties(schema),
|
|
229
|
+
data
|
|
230
|
+
}
|
|
231
|
+
: { data: node };
|
|
227
232
|
};
|
|
228
233
|
|
|
229
234
|
const findNode = async (id: string, type: SchemaType, queries: IQueries, queryKeys: string[]) =>
|
|
230
|
-
await findNodeById(type, id, queryKeys) || await findNodeByQueries(type, queries, queryKeys);
|
|
235
|
+
(await findNodeById(type, id, queryKeys)) || (await findNodeByQueries(type, queries, queryKeys));
|
|
231
236
|
|
|
232
237
|
export const findNodes = async (
|
|
233
238
|
schemas: definitions,
|
package/src/gitlab.ts
CHANGED
|
@@ -24,10 +24,7 @@ export const listFiles = async (path: string, page = 1, limit = 100): Promise<IG
|
|
|
24
24
|
headers,
|
|
25
25
|
params: { page, per_page: limit, ref: branch, path }
|
|
26
26
|
});
|
|
27
|
-
return [
|
|
28
|
-
...data,
|
|
29
|
-
...(data.length === limit ? await listFiles(path, page + 1) : [])
|
|
30
|
-
];
|
|
27
|
+
return [...data, ...(data.length === limit ? await listFiles(path, page + 1) : [])];
|
|
31
28
|
};
|
|
32
29
|
|
|
33
30
|
export const readFile = async ({ path }: Partial<IGitlabTreeFile>) => {
|
package/src/lambda.ts
CHANGED
|
@@ -5,19 +5,22 @@ import { take, map } from 'rxjs/operators';
|
|
|
5
5
|
import { debug } from './log';
|
|
6
6
|
import { IFunctionParam, isS3Mode } from './s3';
|
|
7
7
|
|
|
8
|
-
export const invoke = (functionName: string, params: IFunctionParam) =>
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
8
|
+
export const invoke = (functionName: string, params: IFunctionParam | Record<string, any>) =>
|
|
9
|
+
isS3Mode
|
|
10
|
+
? from(
|
|
11
|
+
new AWS.Lambda()
|
|
12
|
+
.invoke({
|
|
13
|
+
FunctionName: functionName,
|
|
14
|
+
InvocationType: 'Event',
|
|
15
|
+
LogType: 'None',
|
|
16
|
+
Payload: JSON.stringify(params)
|
|
17
|
+
})
|
|
18
|
+
.promise()
|
|
19
|
+
).pipe(
|
|
20
|
+
take(1),
|
|
21
|
+
map(res => {
|
|
22
|
+
debug(`invoke ${functionName}`, res);
|
|
23
|
+
return res;
|
|
24
|
+
})
|
|
25
|
+
)
|
|
26
|
+
: of(null);
|
package/src/local.ts
CHANGED
|
@@ -17,25 +17,22 @@ const currentDir = (key: string) => {
|
|
|
17
17
|
};
|
|
18
18
|
|
|
19
19
|
export const loadTextFile = (folder: string) => (file: string) =>
|
|
20
|
-
mkdirAsObservable(currentDir(join(folder, file)), { recursive: true })
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
);
|
|
20
|
+
mkdirAsObservable(currentDir(join(folder, file)), { recursive: true }).pipe(
|
|
21
|
+
mergeMap(() => readFileAsObservable(join(folder, file), 'utf8')),
|
|
22
|
+
take(1),
|
|
23
|
+
map((res: any) => res as string)
|
|
24
|
+
);
|
|
26
25
|
|
|
27
|
-
export const loadJSONFile =
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
);
|
|
26
|
+
export const loadJSONFile =
|
|
27
|
+
(folder: string) =>
|
|
28
|
+
<T>(file: string) =>
|
|
29
|
+
loadTextFile(folder)(file).pipe(map(res => JSON.parse(stripBOM(res)) as T));
|
|
32
30
|
|
|
33
31
|
export const uploadText = (folder: string) => (file: string, data: string) =>
|
|
34
|
-
mkdirAsObservable(currentDir(join(folder, file)), { recursive: true })
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
);
|
|
32
|
+
mkdirAsObservable(currentDir(join(folder, file)), { recursive: true }).pipe(
|
|
33
|
+
mergeMap(() => writeFileAsObservable(join(folder, file), data, 'utf8')),
|
|
34
|
+
map(() => data)
|
|
35
|
+
);
|
|
39
36
|
|
|
40
37
|
export const uploadJSON = (folder: string) => (file: string, data) =>
|
|
41
38
|
uploadText(folder)(file, JSON.stringify(data, null, 2));
|
|
@@ -43,8 +40,7 @@ export const uploadJSON = (folder: string) => (file: string, data) =>
|
|
|
43
40
|
export const lastModified = (folder: string) => (file: string) => {
|
|
44
41
|
try {
|
|
45
42
|
return of(statSync(join(folder, file)).mtime);
|
|
46
|
-
}
|
|
47
|
-
catch (err) {
|
|
43
|
+
} catch (err) {
|
|
48
44
|
return of(new Date());
|
|
49
45
|
}
|
|
50
46
|
};
|
|
@@ -53,8 +49,7 @@ export const fileExists = (folder: string) => (file: string) => {
|
|
|
53
49
|
try {
|
|
54
50
|
const success = statSync(join(folder, file)).isFile();
|
|
55
51
|
return of(success);
|
|
56
|
-
}
|
|
57
|
-
catch (err) {
|
|
52
|
+
} catch (err) {
|
|
58
53
|
return of(false);
|
|
59
54
|
}
|
|
60
55
|
};
|
|
@@ -62,11 +57,12 @@ export const fileExists = (folder: string) => (file: string) => {
|
|
|
62
57
|
export const fileSize = (folder: string) => (file: string) => {
|
|
63
58
|
try {
|
|
64
59
|
return of(statSync(join(folder, file)).size);
|
|
65
|
-
}
|
|
66
|
-
catch (err) {
|
|
60
|
+
} catch (err) {
|
|
67
61
|
return of(undefined as number);
|
|
68
62
|
}
|
|
69
63
|
};
|
|
70
64
|
|
|
71
|
-
export const deleteFile =
|
|
72
|
-
|
|
65
|
+
export const deleteFile =
|
|
66
|
+
(folder: string) =>
|
|
67
|
+
(file: string): Observable<any> =>
|
|
68
|
+
unlinkAsObservable(join(folder, file));
|