@knocklabs/cli 0.1.0-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +377 -0
- package/bin/dev +17 -0
- package/bin/dev.cmd +3 -0
- package/bin/run +5 -0
- package/bin/run.cmd +3 -0
- package/dist/commands/ping.js +24 -0
- package/dist/commands/workflow/get.js +161 -0
- package/dist/commands/workflow/list.js +128 -0
- package/dist/commands/workflow/pull.js +139 -0
- package/dist/commands/workflow/push.js +128 -0
- package/dist/index.js +9 -0
- package/dist/lib/api-v1.js +76 -0
- package/dist/lib/base-command.js +91 -0
- package/dist/lib/helpers/date.js +21 -0
- package/dist/lib/helpers/dir-context.js +33 -0
- package/dist/lib/helpers/env.js +63 -0
- package/dist/lib/helpers/error.js +60 -0
- package/dist/lib/helpers/json.js +72 -0
- package/dist/lib/helpers/object.js +111 -0
- package/dist/lib/helpers/page.js +92 -0
- package/dist/lib/helpers/request.js +38 -0
- package/dist/lib/helpers/string.js +34 -0
- package/dist/lib/marshal/conditions/helpers.js +26 -0
- package/dist/lib/marshal/conditions/index.js +17 -0
- package/dist/lib/marshal/conditions/types.js +4 -0
- package/dist/lib/marshal/shared/types.js +4 -0
- package/dist/lib/marshal/workflow/helpers.js +139 -0
- package/dist/lib/marshal/workflow/index.js +19 -0
- package/dist/lib/marshal/workflow/reader.js +233 -0
- package/dist/lib/marshal/workflow/types.js +15 -0
- package/dist/lib/marshal/workflow/writer.js +214 -0
- package/dist/lib/run-context.js +79 -0
- package/dist/lib/user-config.js +83 -0
- package/oclif.manifest.json +220 -0
- package/package.json +94 -0
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
validateTemplateFilePathFormat: ()=>validateTemplateFilePathFormat,
|
|
13
|
+
readWorkflowDir: ()=>readWorkflowDir
|
|
14
|
+
});
|
|
15
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
16
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
17
|
+
const _lodash = require("lodash");
|
|
18
|
+
const _error = require("../../helpers/error");
|
|
19
|
+
const _json = require("../../helpers/json");
|
|
20
|
+
const _object = require("../../helpers/object");
|
|
21
|
+
const _helpers = require("./helpers");
|
|
22
|
+
const _types = require("./types");
|
|
23
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
24
|
+
if (typeof WeakMap !== "function") return null;
|
|
25
|
+
var cacheBabelInterop = new WeakMap();
|
|
26
|
+
var cacheNodeInterop = new WeakMap();
|
|
27
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
28
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
29
|
+
})(nodeInterop);
|
|
30
|
+
}
|
|
31
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
32
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
33
|
+
return obj;
|
|
34
|
+
}
|
|
35
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
36
|
+
return {
|
|
37
|
+
default: obj
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
41
|
+
if (cache && cache.has(obj)) {
|
|
42
|
+
return cache.get(obj);
|
|
43
|
+
}
|
|
44
|
+
var newObj = {};
|
|
45
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
46
|
+
for(var key in obj){
|
|
47
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
48
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
49
|
+
if (desc && (desc.get || desc.set)) {
|
|
50
|
+
Object.defineProperty(newObj, key, desc);
|
|
51
|
+
} else {
|
|
52
|
+
newObj[key] = obj[key];
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
newObj.default = obj;
|
|
57
|
+
if (cache) {
|
|
58
|
+
cache.set(obj, newObj);
|
|
59
|
+
}
|
|
60
|
+
return newObj;
|
|
61
|
+
}
|
|
62
|
+
const validateTemplateFilePathFormat = (relpath, workflowDirCtx)=>{
|
|
63
|
+
if (typeof relpath !== "string") return false;
|
|
64
|
+
if (_nodePath.isAbsolute(relpath)) return false;
|
|
65
|
+
const abspath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
|
|
66
|
+
const pathDiff = _nodePath.relative(workflowDirCtx.abspath, abspath);
|
|
67
|
+
return !pathDiff.startsWith("..");
|
|
68
|
+
};
|
|
69
|
+
/*
|
|
70
|
+
* Validate that a file exists at the given relative path in the directory.
|
|
71
|
+
*/ const validateTemplateFileExists = async (relpath, workflowDirCtx)=>{
|
|
72
|
+
const abspath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
|
|
73
|
+
return _fsExtra.pathExists(abspath);
|
|
74
|
+
};
|
|
75
|
+
const readTemplateFile = async (relpath, workflowDirCtx)=>{
|
|
76
|
+
const abspath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
|
|
77
|
+
switch(true){
|
|
78
|
+
case abspath.toLowerCase().endsWith(".json"):
|
|
79
|
+
{
|
|
80
|
+
const [obj, errors] = await (0, _json.readJson)(abspath);
|
|
81
|
+
const content = obj && JSON.stringify(obj);
|
|
82
|
+
return [
|
|
83
|
+
content,
|
|
84
|
+
errors
|
|
85
|
+
];
|
|
86
|
+
}
|
|
87
|
+
default:
|
|
88
|
+
{
|
|
89
|
+
const content = await _fsExtra.readFile(abspath, "utf8");
|
|
90
|
+
return [
|
|
91
|
+
content,
|
|
92
|
+
[]
|
|
93
|
+
];
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
/*
|
|
98
|
+
* Validates that a given value is a valid template file path and the file
|
|
99
|
+
* actually exists, before reading the file content.
|
|
100
|
+
*/ const maybeReadTemplateFile = async (val, workflowDirCtx, extractedFilePaths, pathToFieldStr)=>{
|
|
101
|
+
// Validate the file path format, and that it is unique per workflow.
|
|
102
|
+
if (!validateTemplateFilePathFormat(val, workflowDirCtx) || typeof val !== "string" || val in extractedFilePaths) {
|
|
103
|
+
const error = new _error.JsonError("must be a relative path string to a unique file within the directory", pathToFieldStr);
|
|
104
|
+
return [
|
|
105
|
+
undefined,
|
|
106
|
+
error
|
|
107
|
+
];
|
|
108
|
+
}
|
|
109
|
+
// Keep track of all the extracted file paths that have been seen, so we
|
|
110
|
+
// can validate each file path's uniqueness as we traverse.
|
|
111
|
+
extractedFilePaths[val] = true;
|
|
112
|
+
// Check a file actually exists at the given file path.
|
|
113
|
+
const exists = await validateTemplateFileExists(val, workflowDirCtx);
|
|
114
|
+
if (!exists) {
|
|
115
|
+
const error = new _error.JsonError("must be a relative path string to a file that exists", pathToFieldStr);
|
|
116
|
+
return [
|
|
117
|
+
undefined,
|
|
118
|
+
error
|
|
119
|
+
];
|
|
120
|
+
}
|
|
121
|
+
// Read the template file and inline the content into the workflow json
|
|
122
|
+
// under the same field name but without the @ filepath marker.
|
|
123
|
+
const [content, contentErrors] = await readTemplateFile(val, workflowDirCtx);
|
|
124
|
+
if (contentErrors.length > 0) {
|
|
125
|
+
const error = new _error.JsonError(`points to a file with invalid content (${val})\n\n` + (0, _error.formatErrors)(contentErrors), pathToFieldStr);
|
|
126
|
+
return [
|
|
127
|
+
undefined,
|
|
128
|
+
error
|
|
129
|
+
];
|
|
130
|
+
}
|
|
131
|
+
return [
|
|
132
|
+
content,
|
|
133
|
+
undefined
|
|
134
|
+
];
|
|
135
|
+
};
|
|
136
|
+
const compileTemplateFiles = async (workflowDirCtx, workflowJson)=>{
|
|
137
|
+
const errors = [];
|
|
138
|
+
const extractedFilePaths = {};
|
|
139
|
+
const objPath = new _object.ObjPath();
|
|
140
|
+
// 1. Make sure we have a list of steps to look through.
|
|
141
|
+
if (workflowJson.steps === undefined) {
|
|
142
|
+
return [
|
|
143
|
+
workflowJson,
|
|
144
|
+
errors
|
|
145
|
+
];
|
|
146
|
+
}
|
|
147
|
+
if (!Array.isArray(workflowJson.steps)) {
|
|
148
|
+
errors.push(new _error.JsonError("must be an array of workflow steps", objPath.to("steps").str));
|
|
149
|
+
return [
|
|
150
|
+
workflowJson,
|
|
151
|
+
errors
|
|
152
|
+
];
|
|
153
|
+
}
|
|
154
|
+
// 2. Make sure we can reach `steps[i].template` for channel steps.
|
|
155
|
+
const steps = workflowJson.steps || [];
|
|
156
|
+
const pathToSteps = objPath.push("steps").checkout();
|
|
157
|
+
for (const [stepIdx, step] of steps.entries()){
|
|
158
|
+
objPath.reset(pathToSteps).push(stepIdx);
|
|
159
|
+
if (!(0, _lodash.isPlainObject)(step)) {
|
|
160
|
+
errors.push(new _error.JsonError("must be a workflow step object", objPath.str));
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
if (step.type === undefined) {
|
|
164
|
+
errors.push(new _error.JsonError("must have a `type` field", objPath.str));
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
// Not a channel step, nothing more to do.
|
|
168
|
+
if (step.type !== _types.StepType.Channel) {
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
if (step.template === undefined) {
|
|
172
|
+
errors.push(new _error.JsonError("must have a `template` field containing a template object", objPath.str));
|
|
173
|
+
continue;
|
|
174
|
+
}
|
|
175
|
+
if (!(0, _lodash.isPlainObject)(step.template)) {
|
|
176
|
+
errors.push(new _error.JsonError("must be a template object", objPath.to("template").str));
|
|
177
|
+
continue;
|
|
178
|
+
}
|
|
179
|
+
// 3. For a given template, look for any extracted template content, read
|
|
180
|
+
// the extracted template files, then inline the content.
|
|
181
|
+
objPath.push("template");
|
|
182
|
+
for (const [field, val] of Object.entries(step.template)){
|
|
183
|
+
if (field.startsWith("settings")) continue;
|
|
184
|
+
if (!_helpers.FILEPATH_MARKED_RE.test(field)) continue;
|
|
185
|
+
const pathToFieldStr = objPath.to(field).str;
|
|
186
|
+
// eslint-disable-next-line no-await-in-loop
|
|
187
|
+
const [content, error] = await maybeReadTemplateFile(val, workflowDirCtx, extractedFilePaths, pathToFieldStr);
|
|
188
|
+
if (error) {
|
|
189
|
+
errors.push(error);
|
|
190
|
+
continue;
|
|
191
|
+
}
|
|
192
|
+
const inlinePathStr = pathToFieldStr.replace(_helpers.FILEPATH_MARKED_RE, "");
|
|
193
|
+
(0, _lodash.set)(workflowJson, inlinePathStr, content);
|
|
194
|
+
}
|
|
195
|
+
if (!step.template.settings) continue;
|
|
196
|
+
objPath.push("settings");
|
|
197
|
+
for (const [field, val] of Object.entries(step.template.settings)){
|
|
198
|
+
if (!_helpers.FILEPATH_MARKED_RE.test(field)) continue;
|
|
199
|
+
const pathToFieldStr = objPath.to(field).str;
|
|
200
|
+
// eslint-disable-next-line no-await-in-loop
|
|
201
|
+
const [content, error] = await maybeReadTemplateFile(val, workflowDirCtx, extractedFilePaths, pathToFieldStr);
|
|
202
|
+
if (error) {
|
|
203
|
+
errors.push(error);
|
|
204
|
+
continue;
|
|
205
|
+
}
|
|
206
|
+
const inlinePathStr = pathToFieldStr.replace(_helpers.FILEPATH_MARKED_RE, "");
|
|
207
|
+
(0, _lodash.set)(workflowJson, inlinePathStr, content);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
// TODO: Consider validating content for liquid syntax too maybe?
|
|
211
|
+
return [
|
|
212
|
+
workflowJson,
|
|
213
|
+
errors
|
|
214
|
+
];
|
|
215
|
+
};
|
|
216
|
+
const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
|
|
217
|
+
const { abspath } = workflowDirCtx;
|
|
218
|
+
const { withTemplateFiles =false , withReadonlyField =false } = opts;
|
|
219
|
+
const dirExists = await _fsExtra.pathExists(abspath);
|
|
220
|
+
if (!dirExists) throw new Error(`${abspath} does not exist`);
|
|
221
|
+
const workflowJsonPath = await (0, _helpers.lsWorkflowJson)(abspath);
|
|
222
|
+
if (!workflowJsonPath) throw new Error(`${abspath} is not a workflow directory`);
|
|
223
|
+
const result = await (0, _json.readJson)(workflowJsonPath);
|
|
224
|
+
if (!result[0]) return result;
|
|
225
|
+
let [workflowJson] = result;
|
|
226
|
+
workflowJson = withReadonlyField ? workflowJson : (0, _object.omitDeep)(workflowJson, [
|
|
227
|
+
"__readonly"
|
|
228
|
+
]);
|
|
229
|
+
return withTemplateFiles ? compileTemplateFiles(workflowDirCtx, workflowJson) : [
|
|
230
|
+
workflowJson,
|
|
231
|
+
[]
|
|
232
|
+
];
|
|
233
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
Object.defineProperty(exports, "StepType", {
|
|
6
|
+
enumerable: true,
|
|
7
|
+
get: ()=>StepType
|
|
8
|
+
});
|
|
9
|
+
var StepType;
|
|
10
|
+
(function(StepType) {
|
|
11
|
+
StepType["Channel"] = "channel";
|
|
12
|
+
StepType["Batch"] = "batch";
|
|
13
|
+
StepType["Delay"] = "delay";
|
|
14
|
+
StepType["HttpFetch"] = "http_fetch";
|
|
15
|
+
})(StepType || (StepType = {}));
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
writeWorkflowDir: ()=>writeWorkflowDir,
|
|
13
|
+
buildWorkflowDirBundle: ()=>buildWorkflowDirBundle,
|
|
14
|
+
toWorkflowJson: ()=>toWorkflowJson
|
|
15
|
+
});
|
|
16
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
17
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
18
|
+
const _lodash = require("lodash");
|
|
19
|
+
const _env = require("../../helpers/env");
|
|
20
|
+
const _json = require("../../helpers/json");
|
|
21
|
+
const _object = require("../../helpers/object");
|
|
22
|
+
const _helpers = require("./helpers");
|
|
23
|
+
const _reader = require("./reader");
|
|
24
|
+
const _types = require("./types");
|
|
25
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
26
|
+
if (typeof WeakMap !== "function") return null;
|
|
27
|
+
var cacheBabelInterop = new WeakMap();
|
|
28
|
+
var cacheNodeInterop = new WeakMap();
|
|
29
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
30
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
31
|
+
})(nodeInterop);
|
|
32
|
+
}
|
|
33
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
34
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
35
|
+
return obj;
|
|
36
|
+
}
|
|
37
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
38
|
+
return {
|
|
39
|
+
default: obj
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
43
|
+
if (cache && cache.has(obj)) {
|
|
44
|
+
return cache.get(obj);
|
|
45
|
+
}
|
|
46
|
+
var newObj = {};
|
|
47
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
48
|
+
for(var key in obj){
|
|
49
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
50
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
51
|
+
if (desc && (desc.get || desc.set)) {
|
|
52
|
+
Object.defineProperty(newObj, key, desc);
|
|
53
|
+
} else {
|
|
54
|
+
newObj[key] = obj[key];
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
newObj.default = obj;
|
|
59
|
+
if (cache) {
|
|
60
|
+
cache.set(obj, newObj);
|
|
61
|
+
}
|
|
62
|
+
return newObj;
|
|
63
|
+
}
|
|
64
|
+
/*
|
|
65
|
+
* For a given workflow step and a template field, return the template file path
|
|
66
|
+
* we can extract out the content to.
|
|
67
|
+
*
|
|
68
|
+
* Note, this is a default "recommended" convention but the template file can
|
|
69
|
+
* be located at any arbitrary path (as long as it is a relative path that is
|
|
70
|
+
* inside the workflow directory and unique to the field)
|
|
71
|
+
*/ const newTemplateFilePath = (stepRef, fileName, fileExt)=>_nodePath.join(stepRef, `${fileName}.${fileExt}`).toLowerCase();
|
|
72
|
+
/*
|
|
73
|
+
* For a given workflow step and a template field, return the path of object
|
|
74
|
+
* which we can use to check whether the field has been extracted (hence, with
|
|
75
|
+
* the filepath marker).
|
|
76
|
+
*/ const objPathToExtractableField = (stepRef, pathToFieldInTemplate)=>`${stepRef}.template.${pathToFieldInTemplate}${_helpers.FILEPATH_MARKER}`;
|
|
77
|
+
/*
|
|
78
|
+
* Sanitize the workflow content into a format that's appropriate for reading
|
|
79
|
+
* and writing, by stripping out any annotation fields and handling readonly
|
|
80
|
+
* fields.
|
|
81
|
+
*/ const toWorkflowJson = (workflow)=>{
|
|
82
|
+
var _workflow___annotation;
|
|
83
|
+
// Move read only fields of a workflow under the dedicated field "__readonly".
|
|
84
|
+
const readonlyFields = ((_workflow___annotation = workflow.__annotation) === null || _workflow___annotation === void 0 ? void 0 : _workflow___annotation.readonly_fields) || [];
|
|
85
|
+
const [readonly, remainder] = (0, _object.split)(workflow, readonlyFields);
|
|
86
|
+
const worklfowJson = {
|
|
87
|
+
...remainder,
|
|
88
|
+
__readonly: readonly
|
|
89
|
+
};
|
|
90
|
+
// Strip out all schema annotations, so not to expose them to end users.
|
|
91
|
+
return (0, _object.omitDeep)(worklfowJson, [
|
|
92
|
+
"__annotation"
|
|
93
|
+
]);
|
|
94
|
+
};
|
|
95
|
+
/*
|
|
96
|
+
* Compile and return extractable fields settings from the template and its
|
|
97
|
+
* template settings if present.
|
|
98
|
+
*
|
|
99
|
+
* For example, for a channel step like this:
|
|
100
|
+
* {
|
|
101
|
+
* ref: "email_1",
|
|
102
|
+
* type: "channel",
|
|
103
|
+
* channel_key: "email-provider",
|
|
104
|
+
* template: {
|
|
105
|
+
* settings: {
|
|
106
|
+
* layout_key: "default",
|
|
107
|
+
* __annotation: {
|
|
108
|
+
* extractable_fields: {
|
|
109
|
+
* pre_content: { default: true, file_ext: "txt" },
|
|
110
|
+
* },
|
|
111
|
+
* readonly_fields: [],
|
|
112
|
+
* },
|
|
113
|
+
* },
|
|
114
|
+
* subject: "New activity",
|
|
115
|
+
* html_body: "<p>Hi <strong>{{ recipient.name }}</strong>.</p>",
|
|
116
|
+
* __annotation: {
|
|
117
|
+
* extractable_fields: {
|
|
118
|
+
* subject: { default: false, file_ext: "txt" },
|
|
119
|
+
* json_body: { default: true, file_ext: "json" },
|
|
120
|
+
* html_body: { default: true, file_ext: "html" },
|
|
121
|
+
* text_body: { default: true, file_ext: "txt" },
|
|
122
|
+
* },
|
|
123
|
+
* readonly_fields: [],
|
|
124
|
+
* },
|
|
125
|
+
* },
|
|
126
|
+
* }
|
|
127
|
+
*
|
|
128
|
+
* Takes the template data and returns a merged map of extractable fields like
|
|
129
|
+
* this:
|
|
130
|
+
*
|
|
131
|
+
* {
|
|
132
|
+
* subject: { default: false, file_ext: "txt" },
|
|
133
|
+
* json_body: { default: true, file_ext: "json" },
|
|
134
|
+
* html_body: { default: true, file_ext: "html" },
|
|
135
|
+
* text_body: { default: true, file_ext: "txt" },
|
|
136
|
+
* settings.pre_content: { default: true, file_ext: "txt" },
|
|
137
|
+
* }
|
|
138
|
+
*/ const collateExtractableFields = (template)=>{
|
|
139
|
+
var _template___annotation, _template_settings___annotation;
|
|
140
|
+
const extractableFields = ((_template___annotation = template.__annotation) === null || _template___annotation === void 0 ? void 0 : _template___annotation.extractable_fields) || {};
|
|
141
|
+
if (!template.settings) return extractableFields;
|
|
142
|
+
// If the template has template settings, then merge in the extractable fields
|
|
143
|
+
// for the template settings (with the field names prefixed with "settings.")
|
|
144
|
+
let settingsExtractableFields = ((_template_settings___annotation = template.settings.__annotation) === null || _template_settings___annotation === void 0 ? void 0 : _template_settings___annotation.extractable_fields) || {};
|
|
145
|
+
settingsExtractableFields = Object.fromEntries(Object.entries(settingsExtractableFields).map(([key, val])=>[
|
|
146
|
+
`settings.${key}`,
|
|
147
|
+
val
|
|
148
|
+
]));
|
|
149
|
+
return {
|
|
150
|
+
...extractableFields,
|
|
151
|
+
...settingsExtractableFields
|
|
152
|
+
};
|
|
153
|
+
};
|
|
154
|
+
const buildWorkflowDirBundle = (workflowDirCtx, remoteWorkflow, localWorkflow = {})=>{
|
|
155
|
+
const bundle = {};
|
|
156
|
+
const mutWorkflow = (0, _lodash.cloneDeep)(remoteWorkflow);
|
|
157
|
+
const localWorkflowStepsByRef = (0, _lodash.keyBy)(localWorkflow.steps || [], "ref");
|
|
158
|
+
// For each channel step, extract out any template content into seperate
|
|
159
|
+
// template files where appropriate.
|
|
160
|
+
for (const step of mutWorkflow.steps){
|
|
161
|
+
if (step.type !== _types.StepType.Channel) continue;
|
|
162
|
+
if (!step.template) continue;
|
|
163
|
+
const template = step.template;
|
|
164
|
+
const extractableFields = collateExtractableFields(template);
|
|
165
|
+
for (const [pathToField, { default: extractByDefault , file_ext: fileExt }] of Object.entries(extractableFields)){
|
|
166
|
+
// If this template doesn't have this path, then it's not relevant so
|
|
167
|
+
// nothing more to do here.
|
|
168
|
+
if (!(0, _lodash.has)(template, pathToField)) continue;
|
|
169
|
+
// If the field at this path is extracted in the local workflow, then
|
|
170
|
+
// always extract; otherwise extract based on the field settings default.
|
|
171
|
+
const extractedTemplateFilePath = (0, _lodash.get)(localWorkflowStepsByRef, objPathToExtractableField(step.ref, pathToField));
|
|
172
|
+
const isValidTemplateFilePath = Boolean(extractedTemplateFilePath) && (0, _reader.validateTemplateFilePathFormat)(extractedTemplateFilePath, workflowDirCtx);
|
|
173
|
+
if (!isValidTemplateFilePath && !extractByDefault) continue;
|
|
174
|
+
// Add the template content being extracted and its relative file path
|
|
175
|
+
// within the workflow directory to the bundle.
|
|
176
|
+
const relpath = extractedTemplateFilePath || newTemplateFilePath(step.ref, pathToField, fileExt);
|
|
177
|
+
(0, _lodash.set)(bundle, [
|
|
178
|
+
relpath
|
|
179
|
+
], (0, _lodash.get)(template, pathToField));
|
|
180
|
+
// Replace the extracted field content with the file path, and
|
|
181
|
+
// append the @ suffix to the field name to mark it as such.
|
|
182
|
+
(0, _lodash.set)(template, [
|
|
183
|
+
`${pathToField}${_helpers.FILEPATH_MARKER}`
|
|
184
|
+
], relpath);
|
|
185
|
+
(0, _lodash.unset)(template, pathToField);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
// Finally, prepare the workflow data to be written into a workflow json file.
|
|
189
|
+
return (0, _lodash.set)(bundle, [
|
|
190
|
+
_helpers.WORKFLOW_JSON
|
|
191
|
+
], toWorkflowJson(mutWorkflow));
|
|
192
|
+
};
|
|
193
|
+
const writeWorkflowDir = async (remoteWorkflow, workflowDirCtx)=>{
|
|
194
|
+
// If the workflow directory exists on the file system (i.e. previously
|
|
195
|
+
// pulled before), then read the workflow file to use as a reference.
|
|
196
|
+
const [localWorkflow] = workflowDirCtx.exists ? await (0, _reader.readWorkflowDir)(workflowDirCtx) : [];
|
|
197
|
+
const bundle = buildWorkflowDirBundle(workflowDirCtx, remoteWorkflow, localWorkflow);
|
|
198
|
+
const workflowDirPath = _env.isTestEnv ? _nodePath.join(_env.sandboxDir, remoteWorkflow.key) : workflowDirCtx.abspath;
|
|
199
|
+
try {
|
|
200
|
+
// TODO(KNO-2794): Should rather clean up any orphaned template files
|
|
201
|
+
// individually after successfully writing the workflow directory.
|
|
202
|
+
await _fsExtra.remove(workflowDirPath);
|
|
203
|
+
const promises = Object.entries(bundle).map(([relpath, fileContent])=>{
|
|
204
|
+
const filePath = _nodePath.join(workflowDirPath, relpath);
|
|
205
|
+
return relpath === _helpers.WORKFLOW_JSON ? _fsExtra.outputJson(filePath, fileContent, {
|
|
206
|
+
spaces: _json.DOUBLE_SPACES
|
|
207
|
+
}) : _fsExtra.outputFile(filePath, fileContent);
|
|
208
|
+
});
|
|
209
|
+
await Promise.all(promises);
|
|
210
|
+
} catch (error) {
|
|
211
|
+
await _fsExtra.remove(workflowDirPath);
|
|
212
|
+
throw error;
|
|
213
|
+
}
|
|
214
|
+
};
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Module for surveying the cwd location of the command run and its parent dirs
|
|
3
|
+
* to gather context about a knock resource or the project that the command may
|
|
4
|
+
* be refering to.
|
|
5
|
+
*/ "use strict";
|
|
6
|
+
Object.defineProperty(exports, "__esModule", {
|
|
7
|
+
value: true
|
|
8
|
+
});
|
|
9
|
+
Object.defineProperty(exports, "load", {
|
|
10
|
+
enumerable: true,
|
|
11
|
+
get: ()=>load
|
|
12
|
+
});
|
|
13
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
14
|
+
const _workflow = /*#__PURE__*/ _interopRequireWildcard(require("./marshal/workflow"));
|
|
15
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
16
|
+
if (typeof WeakMap !== "function") return null;
|
|
17
|
+
var cacheBabelInterop = new WeakMap();
|
|
18
|
+
var cacheNodeInterop = new WeakMap();
|
|
19
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
20
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
21
|
+
})(nodeInterop);
|
|
22
|
+
}
|
|
23
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
24
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
25
|
+
return obj;
|
|
26
|
+
}
|
|
27
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
28
|
+
return {
|
|
29
|
+
default: obj
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
33
|
+
if (cache && cache.has(obj)) {
|
|
34
|
+
return cache.get(obj);
|
|
35
|
+
}
|
|
36
|
+
var newObj = {};
|
|
37
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
38
|
+
for(var key in obj){
|
|
39
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
40
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
41
|
+
if (desc && (desc.get || desc.set)) {
|
|
42
|
+
Object.defineProperty(newObj, key, desc);
|
|
43
|
+
} else {
|
|
44
|
+
newObj[key] = obj[key];
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
newObj.default = obj;
|
|
49
|
+
if (cache) {
|
|
50
|
+
cache.set(obj, newObj);
|
|
51
|
+
}
|
|
52
|
+
return newObj;
|
|
53
|
+
}
|
|
54
|
+
const evaluateRecursively = async (ctx, currDir)=>{
|
|
55
|
+
// If we reached the root of the filesystem, nothing more to do.
|
|
56
|
+
const { root } = _nodePath.parse(currDir);
|
|
57
|
+
if (currDir === root) return ctx;
|
|
58
|
+
// Check if we are inside a workflow directory, and if so update the context.
|
|
59
|
+
const isWorkflowDir = await _workflow.isWorkflowDir(currDir);
|
|
60
|
+
if (!ctx.resourceDir && isWorkflowDir) {
|
|
61
|
+
ctx.resourceDir = {
|
|
62
|
+
type: "workflow",
|
|
63
|
+
key: _nodePath.basename(currDir),
|
|
64
|
+
abspath: currDir,
|
|
65
|
+
exists: true
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
// If we've identified the resource context, no need to go further.
|
|
69
|
+
// TODO: In the future also check for knock project dir context.
|
|
70
|
+
if (ctx.resourceDir) return ctx;
|
|
71
|
+
const parentDir = _nodePath.resolve(currDir, "..");
|
|
72
|
+
return evaluateRecursively(ctx, parentDir);
|
|
73
|
+
};
|
|
74
|
+
const load = async ()=>{
|
|
75
|
+
const ctx = {
|
|
76
|
+
cwd: process.cwd()
|
|
77
|
+
};
|
|
78
|
+
return evaluateRecursively(ctx, ctx.cwd);
|
|
79
|
+
};
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Module for loading and retrieving user configs from a knock config file.
|
|
3
|
+
*/ "use strict";
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
Object.defineProperty(exports, "default", {
|
|
8
|
+
enumerable: true,
|
|
9
|
+
get: ()=>_default
|
|
10
|
+
});
|
|
11
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
12
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
13
|
+
const _yup = /*#__PURE__*/ _interopRequireWildcard(require("yup"));
|
|
14
|
+
const _env = require("./helpers/env");
|
|
15
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
16
|
+
if (typeof WeakMap !== "function") return null;
|
|
17
|
+
var cacheBabelInterop = new WeakMap();
|
|
18
|
+
var cacheNodeInterop = new WeakMap();
|
|
19
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
20
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
21
|
+
})(nodeInterop);
|
|
22
|
+
}
|
|
23
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
24
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
25
|
+
return obj;
|
|
26
|
+
}
|
|
27
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
28
|
+
return {
|
|
29
|
+
default: obj
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
33
|
+
if (cache && cache.has(obj)) {
|
|
34
|
+
return cache.get(obj);
|
|
35
|
+
}
|
|
36
|
+
var newObj = {};
|
|
37
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
38
|
+
for(var key in obj){
|
|
39
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
40
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
41
|
+
if (desc && (desc.get || desc.set)) {
|
|
42
|
+
Object.defineProperty(newObj, key, desc);
|
|
43
|
+
} else {
|
|
44
|
+
newObj[key] = obj[key];
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
newObj.default = obj;
|
|
49
|
+
if (cache) {
|
|
50
|
+
cache.set(obj, newObj);
|
|
51
|
+
}
|
|
52
|
+
return newObj;
|
|
53
|
+
}
|
|
54
|
+
const userConfigSchema = _yup.object({
|
|
55
|
+
serviceToken: _yup.string(),
|
|
56
|
+
apiOrigin: _yup.string()
|
|
57
|
+
});
|
|
58
|
+
let USER_CONFIG;
|
|
59
|
+
const maybeReadJsonConfig = async (configDir)=>{
|
|
60
|
+
// Don't use a user config file in tests.
|
|
61
|
+
if (_env.isTestEnv) return null;
|
|
62
|
+
const pathToJsonConfig = _nodePath.join(configDir, "config.json");
|
|
63
|
+
const exists = await _fsExtra.pathExists(pathToJsonConfig);
|
|
64
|
+
if (!exists) return null;
|
|
65
|
+
return _fsExtra.readJSON(pathToJsonConfig);
|
|
66
|
+
};
|
|
67
|
+
const load = async (configDir)=>{
|
|
68
|
+
const readConfig = await maybeReadJsonConfig(configDir);
|
|
69
|
+
const validConfig = await userConfigSchema.validate(readConfig || {});
|
|
70
|
+
// If no valid user config was available, give it an empty map.
|
|
71
|
+
USER_CONFIG = validConfig || {};
|
|
72
|
+
return USER_CONFIG;
|
|
73
|
+
};
|
|
74
|
+
const get = ()=>{
|
|
75
|
+
if (!USER_CONFIG) {
|
|
76
|
+
throw new Error("User config must be loaded first.");
|
|
77
|
+
}
|
|
78
|
+
return USER_CONFIG;
|
|
79
|
+
};
|
|
80
|
+
const _default = {
|
|
81
|
+
load,
|
|
82
|
+
get
|
|
83
|
+
};
|