@knocklabs/cli 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +416 -9
- package/dist/commands/layout/get.js +98 -0
- package/dist/commands/layout/list.js +86 -0
- package/dist/commands/layout/pull.js +199 -0
- package/dist/commands/layout/push.js +159 -0
- package/dist/commands/layout/validate.js +134 -0
- package/dist/commands/workflow/get.js +39 -7
- package/dist/commands/workflow/list.js +4 -1
- package/dist/commands/workflow/run.js +6 -8
- package/dist/lib/api-v1.js +51 -2
- package/dist/lib/helpers/flag.js +19 -1
- package/dist/lib/helpers/json.js +9 -0
- package/dist/lib/marshal/email-layout/helpers.js +124 -0
- package/dist/lib/marshal/email-layout/index.js +19 -0
- package/dist/lib/marshal/email-layout/reader.js +193 -0
- package/dist/lib/marshal/email-layout/types.js +4 -0
- package/dist/lib/marshal/email-layout/writer.js +240 -0
- package/dist/lib/marshal/shared/helpers.js +135 -0
- package/dist/lib/marshal/workflow/generator.js +4 -2
- package/dist/lib/marshal/workflow/helpers.js +41 -6
- package/dist/lib/marshal/workflow/reader.js +8 -106
- package/dist/lib/marshal/workflow/types.js +2 -0
- package/dist/lib/marshal/workflow/writer.js +76 -40
- package/dist/lib/run-context/loader.js +11 -0
- package/oclif.manifest.json +338 -7
- package/package.json +13 -10
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
writeEmailLayoutDirFromData: ()=>writeEmailLayoutDirFromData,
|
|
13
|
+
writeEmailLayoutIndexDir: ()=>writeEmailLayoutIndexDir,
|
|
14
|
+
buildEmailLayoutDirBundle: ()=>buildEmailLayoutDirBundle,
|
|
15
|
+
pruneLayoutsIndexDir: ()=>pruneLayoutsIndexDir,
|
|
16
|
+
toEmailLayoutJson: ()=>toEmailLayoutJson
|
|
17
|
+
});
|
|
18
|
+
const _nodePath = /*#__PURE__*/ _interopRequireDefault(require("node:path"));
|
|
19
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
20
|
+
const _lodash = require("lodash");
|
|
21
|
+
const _const = require("../../helpers/const");
|
|
22
|
+
const _json = require("../../helpers/json");
|
|
23
|
+
const _object = require("../../helpers/object");
|
|
24
|
+
const _helpers = require("../shared/helpers");
|
|
25
|
+
const _helpers1 = require("./helpers");
|
|
26
|
+
const _reader = require("./reader");
|
|
27
|
+
function _interopRequireDefault(obj) {
|
|
28
|
+
return obj && obj.__esModule ? obj : {
|
|
29
|
+
default: obj
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
33
|
+
if (typeof WeakMap !== "function") return null;
|
|
34
|
+
var cacheBabelInterop = new WeakMap();
|
|
35
|
+
var cacheNodeInterop = new WeakMap();
|
|
36
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
37
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
38
|
+
})(nodeInterop);
|
|
39
|
+
}
|
|
40
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
41
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
42
|
+
return obj;
|
|
43
|
+
}
|
|
44
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
45
|
+
return {
|
|
46
|
+
default: obj
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
50
|
+
if (cache && cache.has(obj)) {
|
|
51
|
+
return cache.get(obj);
|
|
52
|
+
}
|
|
53
|
+
var newObj = {};
|
|
54
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
55
|
+
for(var key in obj){
|
|
56
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
57
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
58
|
+
if (desc && (desc.get || desc.set)) {
|
|
59
|
+
Object.defineProperty(newObj, key, desc);
|
|
60
|
+
} else {
|
|
61
|
+
newObj[key] = obj[key];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
newObj.default = obj;
|
|
66
|
+
if (cache) {
|
|
67
|
+
cache.set(obj, newObj);
|
|
68
|
+
}
|
|
69
|
+
return newObj;
|
|
70
|
+
}
|
|
71
|
+
/* Traverse a given email layout data and compile extraction settings of every extractable
|
|
72
|
+
* field into a sorted map.
|
|
73
|
+
*
|
|
74
|
+
* NOTE: Currently we do NOT support content extraction at nested levels for email layouts.
|
|
75
|
+
*/ const compileExtractionSettings = (emailLayout)=>{
|
|
76
|
+
const extractableFields = (0, _lodash.get)(emailLayout, [
|
|
77
|
+
"__annotation",
|
|
78
|
+
"extractable_fields"
|
|
79
|
+
], {});
|
|
80
|
+
const map = new Map();
|
|
81
|
+
for (const [key] of Object.entries(emailLayout)){
|
|
82
|
+
// If the field we are on is extractable, then add its extraction
|
|
83
|
+
// settings to the map with the current object path.
|
|
84
|
+
if (key in extractableFields) {
|
|
85
|
+
map.set([
|
|
86
|
+
key
|
|
87
|
+
], extractableFields[key]);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
return map;
|
|
91
|
+
};
|
|
92
|
+
/* Sanitize the email layout content into a format that's appropriate for reading
|
|
93
|
+
* and writing, by stripping out any annotation fields and handling readonly
|
|
94
|
+
* fields.
|
|
95
|
+
*/ const toEmailLayoutJson = (emailLayout)=>{
|
|
96
|
+
var _emailLayout___annotation;
|
|
97
|
+
// Move read only field under the dedicated field "__readonly".
|
|
98
|
+
const readonlyFields = ((_emailLayout___annotation = emailLayout.__annotation) === null || _emailLayout___annotation === void 0 ? void 0 : _emailLayout___annotation.readonly_fields) || [];
|
|
99
|
+
const [readonly, remainder] = (0, _object.split)(emailLayout, readonlyFields);
|
|
100
|
+
const emailLayoutjson = {
|
|
101
|
+
...remainder,
|
|
102
|
+
__readonly: readonly
|
|
103
|
+
};
|
|
104
|
+
// Strip out all schema annotations, so not to expose them to end users.
|
|
105
|
+
return (0, _object.omitDeep)(emailLayoutjson, [
|
|
106
|
+
"__annotation"
|
|
107
|
+
]);
|
|
108
|
+
};
|
|
109
|
+
const writeEmailLayoutDirFromData = async (emailLayoutDirCtx, remoteEmailLayout)=>{
|
|
110
|
+
// If the layout directory exists on the file system (i.e. previously
|
|
111
|
+
// pulled before), then read the layout file to use as a reference.
|
|
112
|
+
const [localEmailLayout] = emailLayoutDirCtx.exists ? await (0, _reader.readEmailLayoutDir)(emailLayoutDirCtx, {
|
|
113
|
+
withExtractedFiles: true
|
|
114
|
+
}) : [];
|
|
115
|
+
const bundle = buildEmailLayoutDirBundle(remoteEmailLayout, localEmailLayout);
|
|
116
|
+
const backupDirPath = _nodePath.default.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
117
|
+
try {
|
|
118
|
+
// We store a backup in case there's an error.
|
|
119
|
+
if (emailLayoutDirCtx.exists) {
|
|
120
|
+
await _fsExtra.copy(emailLayoutDirCtx.abspath, backupDirPath);
|
|
121
|
+
await _fsExtra.emptyDir(emailLayoutDirCtx.abspath);
|
|
122
|
+
}
|
|
123
|
+
const promises = Object.entries(bundle).map(([relpath, fileContent])=>{
|
|
124
|
+
const filePath = _nodePath.default.resolve(emailLayoutDirCtx.abspath, relpath);
|
|
125
|
+
return relpath === _helpers1.LAYOUT_JSON ? _fsExtra.outputJson(filePath, fileContent, {
|
|
126
|
+
spaces: _json.DOUBLE_SPACES
|
|
127
|
+
}) : _fsExtra.outputFile(filePath, fileContent);
|
|
128
|
+
});
|
|
129
|
+
await Promise.all(promises);
|
|
130
|
+
} catch (error) {
|
|
131
|
+
// In case of any error, wipe the target directory that is likely in a bad
|
|
132
|
+
// state then restore the backup if one existed before.
|
|
133
|
+
if (emailLayoutDirCtx.exists) {
|
|
134
|
+
await _fsExtra.emptyDir(emailLayoutDirCtx.abspath);
|
|
135
|
+
await _fsExtra.copy(backupDirPath, emailLayoutDirCtx.abspath);
|
|
136
|
+
} else {
|
|
137
|
+
await _fsExtra.remove(emailLayoutDirCtx.abspath);
|
|
138
|
+
}
|
|
139
|
+
throw error;
|
|
140
|
+
} finally{
|
|
141
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
142
|
+
await _fsExtra.remove(backupDirPath);
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
/* For a given email layout payload, this function builds a "email layout directoy bundle".
|
|
146
|
+
* This is an object which contains all the relative paths and its file content.
|
|
147
|
+
* It includes the extractable fields, which are extracted out and added to the bundle as separate files.
|
|
148
|
+
*/ const buildEmailLayoutDirBundle = (remoteEmailLayout, localEmailLayout = {})=>{
|
|
149
|
+
const bundle = {};
|
|
150
|
+
const mutRemoteEmailLayout = (0, _lodash.cloneDeep)(remoteEmailLayout);
|
|
151
|
+
// A map of extraction settings of every field in the email layout
|
|
152
|
+
const compiledExtractionSettings = compileExtractionSettings(mutRemoteEmailLayout);
|
|
153
|
+
// Iterate through each extractable field, determine whether we need to
|
|
154
|
+
// extract the field content, and if so, perform the
|
|
155
|
+
// extraction.
|
|
156
|
+
for (const [objPathParts, extractionSettings] of compiledExtractionSettings){
|
|
157
|
+
// If this layout doesn't have this field path, then we don't extract.
|
|
158
|
+
if (!(0, _lodash.has)(mutRemoteEmailLayout, objPathParts)) continue;
|
|
159
|
+
// If the field at this path is extracted in the local layout, then
|
|
160
|
+
// always extract; otherwise extract based on the field settings default.
|
|
161
|
+
const objPathStr = _object.ObjPath.stringify(objPathParts);
|
|
162
|
+
const extractedFilePath = (0, _lodash.get)(localEmailLayout, `${objPathStr}${_helpers.FILEPATH_MARKER}`);
|
|
163
|
+
const { default: extractByDefault , file_ext: fileExt } = extractionSettings;
|
|
164
|
+
if (!extractedFilePath && !extractByDefault) continue;
|
|
165
|
+
// By this point, we have a field where we need to extract its content.
|
|
166
|
+
const data = (0, _lodash.get)(mutRemoteEmailLayout, objPathParts);
|
|
167
|
+
const fileName = objPathParts.pop();
|
|
168
|
+
// If we have an extracted file path from the local layout, we use that. In the other
|
|
169
|
+
// case we use the default path.
|
|
170
|
+
const relpath = typeof extractedFilePath === "string" ? extractedFilePath : `${fileName}.${fileExt}`;
|
|
171
|
+
// Perform the extraction by adding the content and its file path to the
|
|
172
|
+
// bundle for writing to the file system later. Then replace the field
|
|
173
|
+
// content with the extracted file path and mark the field as extracted
|
|
174
|
+
// with @ suffix.
|
|
175
|
+
(0, _lodash.set)(bundle, [
|
|
176
|
+
relpath
|
|
177
|
+
], data);
|
|
178
|
+
(0, _lodash.set)(mutRemoteEmailLayout, `${objPathStr}${_helpers.FILEPATH_MARKER}`, relpath);
|
|
179
|
+
(0, _lodash.unset)(mutRemoteEmailLayout, objPathStr);
|
|
180
|
+
}
|
|
181
|
+
// At this point the bundle contains all extractable files, so we finally add the layout
|
|
182
|
+
// JSON realtive path + the file content.
|
|
183
|
+
return (0, _lodash.set)(bundle, [
|
|
184
|
+
_helpers1.LAYOUT_JSON
|
|
185
|
+
], toEmailLayoutJson(mutRemoteEmailLayout));
|
|
186
|
+
};
|
|
187
|
+
const writeEmailLayoutIndexDir = async (indexDirCtx, remoteEmailLayouts)=>{
|
|
188
|
+
const backupDirPath = _nodePath.default.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
189
|
+
try {
|
|
190
|
+
if (indexDirCtx.exists) {
|
|
191
|
+
await _fsExtra.copy(indexDirCtx.abspath, backupDirPath);
|
|
192
|
+
await pruneLayoutsIndexDir(indexDirCtx, remoteEmailLayouts);
|
|
193
|
+
}
|
|
194
|
+
const writeEmailLayoutDirPromises = remoteEmailLayouts.map(async (remoteEmailLayout)=>{
|
|
195
|
+
const emailLayoutDirPath = _nodePath.default.resolve(indexDirCtx.abspath, remoteEmailLayout.key);
|
|
196
|
+
const emailLayoutDirCtx = {
|
|
197
|
+
type: "email_layout",
|
|
198
|
+
key: remoteEmailLayout.key,
|
|
199
|
+
abspath: emailLayoutDirPath,
|
|
200
|
+
exists: indexDirCtx.exists ? await (0, _helpers1.isEmailLayoutDir)(emailLayoutDirPath) : false
|
|
201
|
+
};
|
|
202
|
+
return writeEmailLayoutDirFromData(emailLayoutDirCtx, remoteEmailLayout);
|
|
203
|
+
});
|
|
204
|
+
await Promise.all(writeEmailLayoutDirPromises);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
if (indexDirCtx.exists) {
|
|
207
|
+
await _fsExtra.emptyDir(indexDirCtx.abspath);
|
|
208
|
+
await _fsExtra.copy(backupDirPath, indexDirCtx.abspath);
|
|
209
|
+
} else {
|
|
210
|
+
await _fsExtra.remove(indexDirCtx.abspath);
|
|
211
|
+
}
|
|
212
|
+
throw error;
|
|
213
|
+
} finally{
|
|
214
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
215
|
+
await _fsExtra.remove(backupDirPath);
|
|
216
|
+
}
|
|
217
|
+
};
|
|
218
|
+
/*
|
|
219
|
+
* Prunes the index directory by removing any files, or directories that aren't
|
|
220
|
+
* layout dirs found in fetched layouts. We want to preserve any layout
|
|
221
|
+
* dirs that are going to be updated with remote layouts, so extracted links
|
|
222
|
+
* can be respected.
|
|
223
|
+
*/ const pruneLayoutsIndexDir = async (indexDirCtx, remoteEmailLayouts)=>{
|
|
224
|
+
const emailLayoutsByKey = Object.fromEntries(remoteEmailLayouts.map((e)=>[
|
|
225
|
+
e.key.toLowerCase(),
|
|
226
|
+
e
|
|
227
|
+
]));
|
|
228
|
+
const dirents = await _fsExtra.readdir(indexDirCtx.abspath, {
|
|
229
|
+
withFileTypes: true
|
|
230
|
+
});
|
|
231
|
+
const promises = dirents.map(async (dirent)=>{
|
|
232
|
+
const direntName = dirent.name.toLowerCase();
|
|
233
|
+
const direntPath = _nodePath.default.resolve(indexDirCtx.abspath, direntName);
|
|
234
|
+
if (await (0, _helpers1.isEmailLayoutDir)(direntPath) && emailLayoutsByKey[direntName]) {
|
|
235
|
+
return;
|
|
236
|
+
}
|
|
237
|
+
await _fsExtra.remove(direntPath);
|
|
238
|
+
});
|
|
239
|
+
await Promise.all(promises);
|
|
240
|
+
};
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
13
|
+
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE,
|
|
14
|
+
readExtractedFileSync: ()=>readExtractedFileSync,
|
|
15
|
+
validateExtractedFilePath: ()=>validateExtractedFilePath,
|
|
16
|
+
checkIfValidExtractedFilePathFormat: ()=>checkIfValidExtractedFilePathFormat
|
|
17
|
+
});
|
|
18
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
19
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
20
|
+
const _error = require("../../helpers/error");
|
|
21
|
+
const _json = require("../../helpers/json");
|
|
22
|
+
const _liquid = require("../../helpers/liquid");
|
|
23
|
+
const _workflow = require("../workflow");
|
|
24
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
25
|
+
if (typeof WeakMap !== "function") return null;
|
|
26
|
+
var cacheBabelInterop = new WeakMap();
|
|
27
|
+
var cacheNodeInterop = new WeakMap();
|
|
28
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
29
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
30
|
+
})(nodeInterop);
|
|
31
|
+
}
|
|
32
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
33
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
34
|
+
return obj;
|
|
35
|
+
}
|
|
36
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
37
|
+
return {
|
|
38
|
+
default: obj
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
42
|
+
if (cache && cache.has(obj)) {
|
|
43
|
+
return cache.get(obj);
|
|
44
|
+
}
|
|
45
|
+
var newObj = {};
|
|
46
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
47
|
+
for(var key in obj){
|
|
48
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
49
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
50
|
+
if (desc && (desc.get || desc.set)) {
|
|
51
|
+
Object.defineProperty(newObj, key, desc);
|
|
52
|
+
} else {
|
|
53
|
+
newObj[key] = obj[key];
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
newObj.default = obj;
|
|
58
|
+
if (cache) {
|
|
59
|
+
cache.set(obj, newObj);
|
|
60
|
+
}
|
|
61
|
+
return newObj;
|
|
62
|
+
}
|
|
63
|
+
const FILEPATH_MARKER = "@";
|
|
64
|
+
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
65
|
+
// The following files are exepected to have valid json content, and should be
|
|
66
|
+
// decoded and joined into the main JSON file.
|
|
67
|
+
const DECODABLE_JSON_FILES = new Set([
|
|
68
|
+
_workflow.VISUAL_BLOCKS_JSON
|
|
69
|
+
]);
|
|
70
|
+
const readExtractedFileSync = (relpath, dirCtx, objPathToFieldStr = "")=>{
|
|
71
|
+
// Check if the file actually exists at the given file path.
|
|
72
|
+
const abspath = _nodePath.resolve(dirCtx.abspath, relpath);
|
|
73
|
+
const exists = _fsExtra.pathExistsSync(abspath);
|
|
74
|
+
if (!exists) {
|
|
75
|
+
const error = new _error.JsonDataError("must be a relative path string to a file that exists", objPathToFieldStr);
|
|
76
|
+
return [
|
|
77
|
+
undefined,
|
|
78
|
+
error
|
|
79
|
+
];
|
|
80
|
+
}
|
|
81
|
+
// Read the file and check for valid liquid syntax given it is supported
|
|
82
|
+
// across all message templates and file extensions.
|
|
83
|
+
const contentStr = _fsExtra.readFileSync(abspath, "utf8");
|
|
84
|
+
const liquidParseError = (0, _liquid.validateLiquidSyntax)(contentStr);
|
|
85
|
+
if (liquidParseError) {
|
|
86
|
+
const error = new _error.JsonDataError(`points to a file that contains invalid liquid syntax (${relpath})\n\n` + (0, _error.formatErrors)([
|
|
87
|
+
liquidParseError
|
|
88
|
+
], {
|
|
89
|
+
indentBy: 2
|
|
90
|
+
}), objPathToFieldStr);
|
|
91
|
+
return [
|
|
92
|
+
undefined,
|
|
93
|
+
error
|
|
94
|
+
];
|
|
95
|
+
}
|
|
96
|
+
// If the file is expected to contain decodable json, then parse the contentStr
|
|
97
|
+
// as such.
|
|
98
|
+
const fileName = _nodePath.basename(abspath.toLowerCase());
|
|
99
|
+
const decodable = DECODABLE_JSON_FILES.has(fileName);
|
|
100
|
+
const [content, jsonParseErrors] = decodable ? (0, _json.parseJson)(contentStr) : [
|
|
101
|
+
contentStr,
|
|
102
|
+
[]
|
|
103
|
+
];
|
|
104
|
+
if (jsonParseErrors.length > 0) {
|
|
105
|
+
const error = new _error.JsonDataError(`points to a file with invalid content (${relpath})\n\n` + (0, _error.formatErrors)(jsonParseErrors, {
|
|
106
|
+
indentBy: 2
|
|
107
|
+
}), objPathToFieldStr);
|
|
108
|
+
return [
|
|
109
|
+
undefined,
|
|
110
|
+
error
|
|
111
|
+
];
|
|
112
|
+
}
|
|
113
|
+
return [
|
|
114
|
+
content,
|
|
115
|
+
undefined
|
|
116
|
+
];
|
|
117
|
+
};
|
|
118
|
+
const validateExtractedFilePath = (val, sourceFileAbspath, uniqueFilePaths, objPathToFieldStr)=>{
|
|
119
|
+
// Validate the file path format, and that it is unique per entity.
|
|
120
|
+
if (!checkIfValidExtractedFilePathFormat(val, sourceFileAbspath) || typeof val !== "string" || val in uniqueFilePaths) {
|
|
121
|
+
const error = new _error.JsonDataError("must be a relative path string to a unique file within the directory", objPathToFieldStr);
|
|
122
|
+
return error;
|
|
123
|
+
}
|
|
124
|
+
// Keep track of all the valid extracted file paths that have been seen, so
|
|
125
|
+
// we can validate each file path's uniqueness as we traverse.
|
|
126
|
+
uniqueFilePaths[val] = true;
|
|
127
|
+
return undefined;
|
|
128
|
+
};
|
|
129
|
+
const checkIfValidExtractedFilePathFormat = (relpath, sourceFileAbspath)=>{
|
|
130
|
+
if (typeof relpath !== "string") return false;
|
|
131
|
+
if (_nodePath.isAbsolute(relpath)) return false;
|
|
132
|
+
const extractedFileAbspath = _nodePath.resolve(sourceFileAbspath, relpath);
|
|
133
|
+
const pathDiff = _nodePath.relative(sourceFileAbspath, extractedFileAbspath);
|
|
134
|
+
return !pathDiff.startsWith("..");
|
|
135
|
+
};
|
|
@@ -15,7 +15,8 @@ _export(exports, {
|
|
|
15
15
|
});
|
|
16
16
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
17
17
|
const _lodash = require("lodash");
|
|
18
|
-
const _helpers = require("
|
|
18
|
+
const _helpers = require("../shared/helpers");
|
|
19
|
+
const _helpers1 = require("./helpers");
|
|
19
20
|
const _types = require("./types");
|
|
20
21
|
const _writer = require("./writer");
|
|
21
22
|
function _getRequireWildcardCache(nodeInterop) {
|
|
@@ -80,6 +81,7 @@ const scaffoldBatchStep = (refSuffix)=>{
|
|
|
80
81
|
type: _types.StepType.Batch,
|
|
81
82
|
settings: {
|
|
82
83
|
batch_order: "asc",
|
|
84
|
+
batch_window_type: "sliding",
|
|
83
85
|
batch_window: {
|
|
84
86
|
unit: "seconds",
|
|
85
87
|
value: 30
|
|
@@ -262,7 +264,7 @@ const scaffoldWorkflowDirBundle = (attrs)=>{
|
|
|
262
264
|
steps: scaffoldedSteps
|
|
263
265
|
};
|
|
264
266
|
return (0, _lodash.assign)({
|
|
265
|
-
[
|
|
267
|
+
[_helpers1.WORKFLOW_JSON]: workflowJson
|
|
266
268
|
}, ...bundleFragments);
|
|
267
269
|
};
|
|
268
270
|
const generateWorkflowDir = async (workflowDirCtx, attrs)=>{
|
|
@@ -12,15 +12,14 @@ _export(exports, {
|
|
|
12
12
|
WORKFLOW_JSON: ()=>WORKFLOW_JSON,
|
|
13
13
|
VISUAL_BLOCKS_JSON: ()=>VISUAL_BLOCKS_JSON,
|
|
14
14
|
workflowJsonPath: ()=>workflowJsonPath,
|
|
15
|
-
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
16
|
-
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE,
|
|
17
15
|
validateWorkflowKey: ()=>validateWorkflowKey,
|
|
18
16
|
lsWorkflowJson: ()=>lsWorkflowJson,
|
|
19
17
|
isWorkflowDir: ()=>isWorkflowDir,
|
|
20
18
|
formatCategories: ()=>formatCategories,
|
|
21
19
|
formatStepSummary: ()=>formatStepSummary,
|
|
22
20
|
formatStatus: ()=>formatStatus,
|
|
23
|
-
ensureValidCommandTarget: ()=>ensureValidCommandTarget
|
|
21
|
+
ensureValidCommandTarget: ()=>ensureValidCommandTarget,
|
|
22
|
+
countSteps: ()=>countSteps
|
|
24
23
|
});
|
|
25
24
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
26
25
|
const _core = require("@oclif/core");
|
|
@@ -70,8 +69,6 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
70
69
|
const WORKFLOW_JSON = "workflow.json";
|
|
71
70
|
const VISUAL_BLOCKS_JSON = "visual_blocks.json";
|
|
72
71
|
const workflowJsonPath = (workflowDirCtx)=>_nodePath.resolve(workflowDirCtx.abspath, WORKFLOW_JSON);
|
|
73
|
-
const FILEPATH_MARKER = "@";
|
|
74
|
-
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
75
72
|
const validateWorkflowKey = (input)=>{
|
|
76
73
|
if (!(0, _string.checkSlugifiedFormat)(input, {
|
|
77
74
|
onlyLowerCase: true
|
|
@@ -106,14 +103,37 @@ const formatCategories = (workflow, opts = {})=>{
|
|
|
106
103
|
};
|
|
107
104
|
const batchStepSummaryLines = (step)=>{
|
|
108
105
|
if (step.type !== _types.StepType.Batch) return [];
|
|
109
|
-
const { batch_key , batch_window: duration , batch_until_field_path: field_path , batch_order } = step.settings;
|
|
106
|
+
const { batch_key , batch_window_type , batch_window: duration , batch_window_extension_limit , batch_until_field_path: field_path , batch_order } = step.settings;
|
|
110
107
|
return [
|
|
111
108
|
batch_key && `Batch key: ${batch_key}`,
|
|
112
109
|
duration && `Batch window: ${duration.value} ${duration.unit}`,
|
|
113
110
|
field_path && `Batch window: "${field_path}"`,
|
|
111
|
+
`Batch window type: ${batch_window_type}`,
|
|
112
|
+
batch_window_extension_limit && `Batch window extension limit: ${batch_window_extension_limit.value} ${batch_window_extension_limit.unit}`,
|
|
114
113
|
`Batch order: ${batch_order}`
|
|
115
114
|
];
|
|
116
115
|
};
|
|
116
|
+
const throttleStepSummaryLines = (step)=>{
|
|
117
|
+
if (step.type !== _types.StepType.Throttle) return [];
|
|
118
|
+
const { throttle_key , throttle_window: duration , throttle_window_field_path: field_path , throttle_limit } = step.settings;
|
|
119
|
+
return [
|
|
120
|
+
throttle_key && `Throttle key: ${throttle_key}`,
|
|
121
|
+
duration && `Throttle window: ${duration.value} ${duration.unit}`,
|
|
122
|
+
field_path && `Throttle window: "${field_path}"`,
|
|
123
|
+
`Throttle limit: ${throttle_limit}`
|
|
124
|
+
];
|
|
125
|
+
};
|
|
126
|
+
const branchStepSummaryLines = (step)=>{
|
|
127
|
+
if (step.type !== _types.StepType.Branch) return [];
|
|
128
|
+
let stepsCount = 0;
|
|
129
|
+
for (const branch of step.branches){
|
|
130
|
+
stepsCount += doCountSteps(branch.steps);
|
|
131
|
+
}
|
|
132
|
+
return [
|
|
133
|
+
`Branches: ${step.branches.length}`,
|
|
134
|
+
`Steps: ${stepsCount}`
|
|
135
|
+
];
|
|
136
|
+
};
|
|
117
137
|
const delayStepSummaryLines = (step)=>{
|
|
118
138
|
if (step.type !== _types.StepType.Delay) return [];
|
|
119
139
|
const { delay_for: duration , delay_until_field_path: field_path } = step.settings;
|
|
@@ -145,6 +165,8 @@ const formatStepSummary = (step)=>{
|
|
|
145
165
|
...batchStepSummaryLines(step),
|
|
146
166
|
...delayStepSummaryLines(step),
|
|
147
167
|
...httpFetchStepSummaryLines(step),
|
|
168
|
+
...branchStepSummaryLines(step),
|
|
169
|
+
...throttleStepSummaryLines(step),
|
|
148
170
|
// Extra line between step rows to make it easier on the eye.
|
|
149
171
|
" "
|
|
150
172
|
].filter((x)=>x);
|
|
@@ -211,3 +233,16 @@ const ensureValidCommandTarget = async (props, runContext)=>{
|
|
|
211
233
|
}
|
|
212
234
|
return _core.ux.error("Missing 1 required arg:\nworkflowKey");
|
|
213
235
|
};
|
|
236
|
+
const doCountSteps = (steps)=>{
|
|
237
|
+
let count = 0;
|
|
238
|
+
for (const step of steps){
|
|
239
|
+
count += 1;
|
|
240
|
+
if (step.type === _types.StepType.Branch) {
|
|
241
|
+
for (const branch of step.branches){
|
|
242
|
+
count += doCountSteps(branch.steps);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
return count;
|
|
247
|
+
};
|
|
248
|
+
const countSteps = (workflow)=>doCountSteps(workflow.steps);
|
|
@@ -10,9 +10,7 @@ function _export(target, all) {
|
|
|
10
10
|
}
|
|
11
11
|
_export(exports, {
|
|
12
12
|
readWorkflowDir: ()=>readWorkflowDir,
|
|
13
|
-
readAllForCommandTarget: ()=>readAllForCommandTarget
|
|
14
|
-
checkIfValidExtractedFilePathFormat: ()=>checkIfValidExtractedFilePathFormat,
|
|
15
|
-
readExtractedFileSync: ()=>readExtractedFileSync
|
|
13
|
+
readAllForCommandTarget: ()=>readAllForCommandTarget
|
|
16
14
|
});
|
|
17
15
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
18
16
|
const _core = require("@oclif/core");
|
|
@@ -20,9 +18,9 @@ const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
|
20
18
|
const _lodash = require("lodash");
|
|
21
19
|
const _error = require("../../helpers/error");
|
|
22
20
|
const _json = require("../../helpers/json");
|
|
23
|
-
const _liquid = require("../../helpers/liquid");
|
|
24
21
|
const _object = require("../../helpers/object");
|
|
25
|
-
const _helpers = require("
|
|
22
|
+
const _helpers = require("../shared/helpers");
|
|
23
|
+
const _helpers1 = require("./helpers");
|
|
26
24
|
function _getRequireWildcardCache(nodeInterop) {
|
|
27
25
|
if (typeof WeakMap !== "function") return null;
|
|
28
26
|
var cacheBabelInterop = new WeakMap();
|
|
@@ -65,102 +63,6 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
65
63
|
// For now we support up to two levels of content extraction in workflow.json.
|
|
66
64
|
// (e.g. workflow.json, then visual_blocks.json)
|
|
67
65
|
const MAX_EXTRACTION_LEVEL = 2;
|
|
68
|
-
// The following files are exepected to have valid json content, and should be
|
|
69
|
-
// decoded and joined into the main workflow.json.
|
|
70
|
-
const DECODABLE_JSON_FILES = new Set([
|
|
71
|
-
_helpers.VISUAL_BLOCKS_JSON
|
|
72
|
-
]);
|
|
73
|
-
/*
|
|
74
|
-
* Validate the file path format of an extracted field. The file path must be:
|
|
75
|
-
*
|
|
76
|
-
* 1) Expressed as a relative path.
|
|
77
|
-
*
|
|
78
|
-
* For exmaple:
|
|
79
|
-
* subject@: "email_1/subject.html" // GOOD
|
|
80
|
-
* subject@: "./email_1/subject.html" // GOOD
|
|
81
|
-
* subject@: "/workflow-x/email_1/subject.html" // BAD
|
|
82
|
-
*
|
|
83
|
-
* 2) The resolved path must be contained inside the workflow directory
|
|
84
|
-
*
|
|
85
|
-
* For exmaple (workflow-y is a different workflow dir in this example):
|
|
86
|
-
* subject@: "./email_1/subject.html" // GOOD
|
|
87
|
-
* subject@: "../workflow-y/email_1/subject.html" // BAD
|
|
88
|
-
*
|
|
89
|
-
* Note: does not validate the presence of the file nor the uniqueness of the
|
|
90
|
-
* file path.
|
|
91
|
-
*/ const checkIfValidExtractedFilePathFormat = (relpath, sourceFileAbspath)=>{
|
|
92
|
-
if (typeof relpath !== "string") return false;
|
|
93
|
-
if (_nodePath.isAbsolute(relpath)) return false;
|
|
94
|
-
const extractedFileAbspath = _nodePath.resolve(sourceFileAbspath, relpath);
|
|
95
|
-
const pathDiff = _nodePath.relative(sourceFileAbspath, extractedFileAbspath);
|
|
96
|
-
return !pathDiff.startsWith("..");
|
|
97
|
-
};
|
|
98
|
-
/*
|
|
99
|
-
* Validate the extracted file path based on its format and uniqueness (but not
|
|
100
|
-
* the presence).
|
|
101
|
-
*
|
|
102
|
-
* Note, the uniqueness check is based on reading from and writing to
|
|
103
|
-
* uniqueFilePaths, which is MUTATED in place.
|
|
104
|
-
*/ const validateExtractedFilePath = (val, workflowDirCtx, uniqueFilePaths, objPathToFieldStr)=>{
|
|
105
|
-
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _helpers.WORKFLOW_JSON);
|
|
106
|
-
// Validate the file path format, and that it is unique per workflow.
|
|
107
|
-
if (!checkIfValidExtractedFilePathFormat(val, workflowJsonPath) || typeof val !== "string" || val in uniqueFilePaths) {
|
|
108
|
-
const error = new _error.JsonDataError("must be a relative path string to a unique file within the directory", objPathToFieldStr);
|
|
109
|
-
return error;
|
|
110
|
-
}
|
|
111
|
-
// Keep track of all the valid extracted file paths that have been seen, so
|
|
112
|
-
// we can validate each file path's uniqueness as we traverse.
|
|
113
|
-
uniqueFilePaths[val] = true;
|
|
114
|
-
return undefined;
|
|
115
|
-
};
|
|
116
|
-
const readExtractedFileSync = (relpath, workflowDirCtx, objPathToFieldStr = "")=>{
|
|
117
|
-
// Check if the file actually exists at the given file path.
|
|
118
|
-
const abspath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
|
|
119
|
-
const exists = _fsExtra.pathExistsSync(abspath);
|
|
120
|
-
if (!exists) {
|
|
121
|
-
const error = new _error.JsonDataError("must be a relative path string to a file that exists", objPathToFieldStr);
|
|
122
|
-
return [
|
|
123
|
-
undefined,
|
|
124
|
-
error
|
|
125
|
-
];
|
|
126
|
-
}
|
|
127
|
-
// Read the file and check for valid liquid syntax given it is supported
|
|
128
|
-
// across all message templates and file extensions.
|
|
129
|
-
const contentStr = _fsExtra.readFileSync(abspath, "utf8");
|
|
130
|
-
const liquidParseError = (0, _liquid.validateLiquidSyntax)(contentStr);
|
|
131
|
-
if (liquidParseError) {
|
|
132
|
-
const error = new _error.JsonDataError(`points to a file that contains invalid liquid syntax (${relpath})\n\n` + (0, _error.formatErrors)([
|
|
133
|
-
liquidParseError
|
|
134
|
-
], {
|
|
135
|
-
indentBy: 2
|
|
136
|
-
}), objPathToFieldStr);
|
|
137
|
-
return [
|
|
138
|
-
undefined,
|
|
139
|
-
error
|
|
140
|
-
];
|
|
141
|
-
}
|
|
142
|
-
// If the file is expected to contain decodable json, then parse the contentStr
|
|
143
|
-
// as such.
|
|
144
|
-
const fileName = _nodePath.basename(abspath.toLowerCase());
|
|
145
|
-
const decodable = DECODABLE_JSON_FILES.has(fileName);
|
|
146
|
-
const [content, jsonParseErrors] = decodable ? (0, _json.parseJson)(contentStr) : [
|
|
147
|
-
contentStr,
|
|
148
|
-
[]
|
|
149
|
-
];
|
|
150
|
-
if (jsonParseErrors.length > 0) {
|
|
151
|
-
const error = new _error.JsonDataError(`points to a file with invalid content (${relpath})\n\n` + (0, _error.formatErrors)(jsonParseErrors, {
|
|
152
|
-
indentBy: 2
|
|
153
|
-
}), objPathToFieldStr);
|
|
154
|
-
return [
|
|
155
|
-
undefined,
|
|
156
|
-
error
|
|
157
|
-
];
|
|
158
|
-
}
|
|
159
|
-
return [
|
|
160
|
-
content,
|
|
161
|
-
undefined
|
|
162
|
-
];
|
|
163
|
-
};
|
|
164
66
|
const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
|
|
165
67
|
// Tracks any errors encountered during traversal. Mutated in place.
|
|
166
68
|
const errors = [];
|
|
@@ -192,7 +94,7 @@ const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
|
|
|
192
94
|
const lastFound = (0, _object.getLastFound)(prevJoinedFilePaths, parts);
|
|
193
95
|
const prevJoinedFilePath = typeof lastFound === "string" ? lastFound : undefined;
|
|
194
96
|
const rebasedFilePath = prevJoinedFilePath ? _nodePath.join(_nodePath.dirname(prevJoinedFilePath), value) : value;
|
|
195
|
-
const invalidFilePathError = validateExtractedFilePath(rebasedFilePath, workflowDirCtx, uniqueFilePaths, objPathToFieldStr);
|
|
97
|
+
const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(rebasedFilePath, _nodePath.resolve(workflowDirCtx.abspath, _helpers1.WORKFLOW_JSON), uniqueFilePaths, objPathToFieldStr);
|
|
196
98
|
if (invalidFilePathError) {
|
|
197
99
|
errors.push(invalidFilePathError);
|
|
198
100
|
// Wipe the invalid file path in the node so the final workflow json
|
|
@@ -206,7 +108,7 @@ const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
|
|
|
206
108
|
}
|
|
207
109
|
// By this point we have a valid extracted file path, so attempt to read
|
|
208
110
|
// the file at the file path.
|
|
209
|
-
const [content, readExtractedFileError] = readExtractedFileSync(rebasedFilePath, workflowDirCtx, objPathToFieldStr);
|
|
111
|
+
const [content, readExtractedFileError] = (0, _helpers.readExtractedFileSync)(rebasedFilePath, workflowDirCtx, objPathToFieldStr);
|
|
210
112
|
if (readExtractedFileError) {
|
|
211
113
|
errors.push(readExtractedFileError);
|
|
212
114
|
// Replace the extracted file path with the rebased one, and set the
|
|
@@ -236,7 +138,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
|
|
|
236
138
|
const { withExtractedFiles =false , withReadonlyField =false } = opts;
|
|
237
139
|
const dirExists = await _fsExtra.pathExists(abspath);
|
|
238
140
|
if (!dirExists) throw new Error(`${abspath} does not exist`);
|
|
239
|
-
const workflowJsonPath = await (0,
|
|
141
|
+
const workflowJsonPath = await (0, _helpers1.lsWorkflowJson)(abspath);
|
|
240
142
|
if (!workflowJsonPath) throw new Error(`${abspath} is not a workflow directory`);
|
|
241
143
|
const result = await (0, _json.readJson)(workflowJsonPath);
|
|
242
144
|
if (!result[0]) return result;
|
|
@@ -259,7 +161,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
|
|
|
259
161
|
// eslint-disable-next-line no-await-in-loop
|
|
260
162
|
const [workflow, readErrors] = await readWorkflowDir(workflowDirCtx, opts);
|
|
261
163
|
if (readErrors.length > 0) {
|
|
262
|
-
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath,
|
|
164
|
+
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _helpers1.WORKFLOW_JSON);
|
|
263
165
|
const e = new _error.SourceError((0, _error.formatErrors)(readErrors), workflowJsonPath);
|
|
264
166
|
errors.push(e);
|
|
265
167
|
continue;
|
|
@@ -298,7 +200,7 @@ const readAllForCommandTarget = async (target, opts = {})=>{
|
|
|
298
200
|
type: "workflow",
|
|
299
201
|
key: dirent.name,
|
|
300
202
|
abspath,
|
|
301
|
-
exists: await (0,
|
|
203
|
+
exists: await (0, _helpers1.isWorkflowDir)(abspath)
|
|
302
204
|
};
|
|
303
205
|
return workflowDirCtx;
|
|
304
206
|
});
|