@knocklabs/cli 0.1.4 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +407 -4
- package/dist/commands/layout/get.js +98 -0
- package/dist/commands/layout/list.js +86 -0
- package/dist/commands/layout/pull.js +199 -0
- package/dist/commands/layout/push.js +159 -0
- package/dist/commands/layout/validate.js +134 -0
- package/dist/commands/workflow/get.js +39 -7
- package/dist/commands/workflow/list.js +4 -1
- package/dist/lib/api-v1.js +47 -0
- package/dist/lib/marshal/email-layout/helpers.js +124 -0
- package/dist/lib/marshal/email-layout/index.js +19 -0
- package/dist/lib/marshal/email-layout/reader.js +193 -0
- package/dist/lib/marshal/email-layout/types.js +4 -0
- package/dist/lib/marshal/email-layout/writer.js +240 -0
- package/dist/lib/marshal/shared/helpers.js +135 -0
- package/dist/lib/marshal/workflow/generator.js +3 -2
- package/dist/lib/marshal/workflow/helpers.js +27 -5
- package/dist/lib/marshal/workflow/reader.js +8 -106
- package/dist/lib/marshal/workflow/types.js +1 -0
- package/dist/lib/marshal/workflow/writer.js +76 -40
- package/dist/lib/run-context/loader.js +11 -0
- package/oclif.manifest.json +333 -1
- package/package.json +9 -6
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
13
|
+
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE,
|
|
14
|
+
readExtractedFileSync: ()=>readExtractedFileSync,
|
|
15
|
+
validateExtractedFilePath: ()=>validateExtractedFilePath,
|
|
16
|
+
checkIfValidExtractedFilePathFormat: ()=>checkIfValidExtractedFilePathFormat
|
|
17
|
+
});
|
|
18
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
19
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
20
|
+
const _error = require("../../helpers/error");
|
|
21
|
+
const _json = require("../../helpers/json");
|
|
22
|
+
const _liquid = require("../../helpers/liquid");
|
|
23
|
+
const _workflow = require("../workflow");
|
|
24
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
25
|
+
if (typeof WeakMap !== "function") return null;
|
|
26
|
+
var cacheBabelInterop = new WeakMap();
|
|
27
|
+
var cacheNodeInterop = new WeakMap();
|
|
28
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
29
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
30
|
+
})(nodeInterop);
|
|
31
|
+
}
|
|
32
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
33
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
34
|
+
return obj;
|
|
35
|
+
}
|
|
36
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
37
|
+
return {
|
|
38
|
+
default: obj
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
42
|
+
if (cache && cache.has(obj)) {
|
|
43
|
+
return cache.get(obj);
|
|
44
|
+
}
|
|
45
|
+
var newObj = {};
|
|
46
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
47
|
+
for(var key in obj){
|
|
48
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
49
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
50
|
+
if (desc && (desc.get || desc.set)) {
|
|
51
|
+
Object.defineProperty(newObj, key, desc);
|
|
52
|
+
} else {
|
|
53
|
+
newObj[key] = obj[key];
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
newObj.default = obj;
|
|
58
|
+
if (cache) {
|
|
59
|
+
cache.set(obj, newObj);
|
|
60
|
+
}
|
|
61
|
+
return newObj;
|
|
62
|
+
}
|
|
63
|
+
const FILEPATH_MARKER = "@";
|
|
64
|
+
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
65
|
+
// The following files are exepected to have valid json content, and should be
|
|
66
|
+
// decoded and joined into the main JSON file.
|
|
67
|
+
const DECODABLE_JSON_FILES = new Set([
|
|
68
|
+
_workflow.VISUAL_BLOCKS_JSON
|
|
69
|
+
]);
|
|
70
|
+
const readExtractedFileSync = (relpath, dirCtx, objPathToFieldStr = "")=>{
|
|
71
|
+
// Check if the file actually exists at the given file path.
|
|
72
|
+
const abspath = _nodePath.resolve(dirCtx.abspath, relpath);
|
|
73
|
+
const exists = _fsExtra.pathExistsSync(abspath);
|
|
74
|
+
if (!exists) {
|
|
75
|
+
const error = new _error.JsonDataError("must be a relative path string to a file that exists", objPathToFieldStr);
|
|
76
|
+
return [
|
|
77
|
+
undefined,
|
|
78
|
+
error
|
|
79
|
+
];
|
|
80
|
+
}
|
|
81
|
+
// Read the file and check for valid liquid syntax given it is supported
|
|
82
|
+
// across all message templates and file extensions.
|
|
83
|
+
const contentStr = _fsExtra.readFileSync(abspath, "utf8");
|
|
84
|
+
const liquidParseError = (0, _liquid.validateLiquidSyntax)(contentStr);
|
|
85
|
+
if (liquidParseError) {
|
|
86
|
+
const error = new _error.JsonDataError(`points to a file that contains invalid liquid syntax (${relpath})\n\n` + (0, _error.formatErrors)([
|
|
87
|
+
liquidParseError
|
|
88
|
+
], {
|
|
89
|
+
indentBy: 2
|
|
90
|
+
}), objPathToFieldStr);
|
|
91
|
+
return [
|
|
92
|
+
undefined,
|
|
93
|
+
error
|
|
94
|
+
];
|
|
95
|
+
}
|
|
96
|
+
// If the file is expected to contain decodable json, then parse the contentStr
|
|
97
|
+
// as such.
|
|
98
|
+
const fileName = _nodePath.basename(abspath.toLowerCase());
|
|
99
|
+
const decodable = DECODABLE_JSON_FILES.has(fileName);
|
|
100
|
+
const [content, jsonParseErrors] = decodable ? (0, _json.parseJson)(contentStr) : [
|
|
101
|
+
contentStr,
|
|
102
|
+
[]
|
|
103
|
+
];
|
|
104
|
+
if (jsonParseErrors.length > 0) {
|
|
105
|
+
const error = new _error.JsonDataError(`points to a file with invalid content (${relpath})\n\n` + (0, _error.formatErrors)(jsonParseErrors, {
|
|
106
|
+
indentBy: 2
|
|
107
|
+
}), objPathToFieldStr);
|
|
108
|
+
return [
|
|
109
|
+
undefined,
|
|
110
|
+
error
|
|
111
|
+
];
|
|
112
|
+
}
|
|
113
|
+
return [
|
|
114
|
+
content,
|
|
115
|
+
undefined
|
|
116
|
+
];
|
|
117
|
+
};
|
|
118
|
+
const validateExtractedFilePath = (val, sourceFileAbspath, uniqueFilePaths, objPathToFieldStr)=>{
|
|
119
|
+
// Validate the file path format, and that it is unique per entity.
|
|
120
|
+
if (!checkIfValidExtractedFilePathFormat(val, sourceFileAbspath) || typeof val !== "string" || val in uniqueFilePaths) {
|
|
121
|
+
const error = new _error.JsonDataError("must be a relative path string to a unique file within the directory", objPathToFieldStr);
|
|
122
|
+
return error;
|
|
123
|
+
}
|
|
124
|
+
// Keep track of all the valid extracted file paths that have been seen, so
|
|
125
|
+
// we can validate each file path's uniqueness as we traverse.
|
|
126
|
+
uniqueFilePaths[val] = true;
|
|
127
|
+
return undefined;
|
|
128
|
+
};
|
|
129
|
+
const checkIfValidExtractedFilePathFormat = (relpath, sourceFileAbspath)=>{
|
|
130
|
+
if (typeof relpath !== "string") return false;
|
|
131
|
+
if (_nodePath.isAbsolute(relpath)) return false;
|
|
132
|
+
const extractedFileAbspath = _nodePath.resolve(sourceFileAbspath, relpath);
|
|
133
|
+
const pathDiff = _nodePath.relative(sourceFileAbspath, extractedFileAbspath);
|
|
134
|
+
return !pathDiff.startsWith("..");
|
|
135
|
+
};
|
|
@@ -15,7 +15,8 @@ _export(exports, {
|
|
|
15
15
|
});
|
|
16
16
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
17
17
|
const _lodash = require("lodash");
|
|
18
|
-
const _helpers = require("
|
|
18
|
+
const _helpers = require("../shared/helpers");
|
|
19
|
+
const _helpers1 = require("./helpers");
|
|
19
20
|
const _types = require("./types");
|
|
20
21
|
const _writer = require("./writer");
|
|
21
22
|
function _getRequireWildcardCache(nodeInterop) {
|
|
@@ -263,7 +264,7 @@ const scaffoldWorkflowDirBundle = (attrs)=>{
|
|
|
263
264
|
steps: scaffoldedSteps
|
|
264
265
|
};
|
|
265
266
|
return (0, _lodash.assign)({
|
|
266
|
-
[
|
|
267
|
+
[_helpers1.WORKFLOW_JSON]: workflowJson
|
|
267
268
|
}, ...bundleFragments);
|
|
268
269
|
};
|
|
269
270
|
const generateWorkflowDir = async (workflowDirCtx, attrs)=>{
|
|
@@ -12,15 +12,14 @@ _export(exports, {
|
|
|
12
12
|
WORKFLOW_JSON: ()=>WORKFLOW_JSON,
|
|
13
13
|
VISUAL_BLOCKS_JSON: ()=>VISUAL_BLOCKS_JSON,
|
|
14
14
|
workflowJsonPath: ()=>workflowJsonPath,
|
|
15
|
-
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
16
|
-
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE,
|
|
17
15
|
validateWorkflowKey: ()=>validateWorkflowKey,
|
|
18
16
|
lsWorkflowJson: ()=>lsWorkflowJson,
|
|
19
17
|
isWorkflowDir: ()=>isWorkflowDir,
|
|
20
18
|
formatCategories: ()=>formatCategories,
|
|
21
19
|
formatStepSummary: ()=>formatStepSummary,
|
|
22
20
|
formatStatus: ()=>formatStatus,
|
|
23
|
-
ensureValidCommandTarget: ()=>ensureValidCommandTarget
|
|
21
|
+
ensureValidCommandTarget: ()=>ensureValidCommandTarget,
|
|
22
|
+
countSteps: ()=>countSteps
|
|
24
23
|
});
|
|
25
24
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
26
25
|
const _core = require("@oclif/core");
|
|
@@ -70,8 +69,6 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
70
69
|
const WORKFLOW_JSON = "workflow.json";
|
|
71
70
|
const VISUAL_BLOCKS_JSON = "visual_blocks.json";
|
|
72
71
|
const workflowJsonPath = (workflowDirCtx)=>_nodePath.resolve(workflowDirCtx.abspath, WORKFLOW_JSON);
|
|
73
|
-
const FILEPATH_MARKER = "@";
|
|
74
|
-
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
75
72
|
const validateWorkflowKey = (input)=>{
|
|
76
73
|
if (!(0, _string.checkSlugifiedFormat)(input, {
|
|
77
74
|
onlyLowerCase: true
|
|
@@ -126,6 +123,17 @@ const throttleStepSummaryLines = (step)=>{
|
|
|
126
123
|
`Throttle limit: ${throttle_limit}`
|
|
127
124
|
];
|
|
128
125
|
};
|
|
126
|
+
const branchStepSummaryLines = (step)=>{
|
|
127
|
+
if (step.type !== _types.StepType.Branch) return [];
|
|
128
|
+
let stepsCount = 0;
|
|
129
|
+
for (const branch of step.branches){
|
|
130
|
+
stepsCount += doCountSteps(branch.steps);
|
|
131
|
+
}
|
|
132
|
+
return [
|
|
133
|
+
`Branches: ${step.branches.length}`,
|
|
134
|
+
`Steps: ${stepsCount}`
|
|
135
|
+
];
|
|
136
|
+
};
|
|
129
137
|
const delayStepSummaryLines = (step)=>{
|
|
130
138
|
if (step.type !== _types.StepType.Delay) return [];
|
|
131
139
|
const { delay_for: duration , delay_until_field_path: field_path } = step.settings;
|
|
@@ -157,6 +165,7 @@ const formatStepSummary = (step)=>{
|
|
|
157
165
|
...batchStepSummaryLines(step),
|
|
158
166
|
...delayStepSummaryLines(step),
|
|
159
167
|
...httpFetchStepSummaryLines(step),
|
|
168
|
+
...branchStepSummaryLines(step),
|
|
160
169
|
...throttleStepSummaryLines(step),
|
|
161
170
|
// Extra line between step rows to make it easier on the eye.
|
|
162
171
|
" "
|
|
@@ -224,3 +233,16 @@ const ensureValidCommandTarget = async (props, runContext)=>{
|
|
|
224
233
|
}
|
|
225
234
|
return _core.ux.error("Missing 1 required arg:\nworkflowKey");
|
|
226
235
|
};
|
|
236
|
+
const doCountSteps = (steps)=>{
|
|
237
|
+
let count = 0;
|
|
238
|
+
for (const step of steps){
|
|
239
|
+
count += 1;
|
|
240
|
+
if (step.type === _types.StepType.Branch) {
|
|
241
|
+
for (const branch of step.branches){
|
|
242
|
+
count += doCountSteps(branch.steps);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
return count;
|
|
247
|
+
};
|
|
248
|
+
const countSteps = (workflow)=>doCountSteps(workflow.steps);
|
|
@@ -10,9 +10,7 @@ function _export(target, all) {
|
|
|
10
10
|
}
|
|
11
11
|
_export(exports, {
|
|
12
12
|
readWorkflowDir: ()=>readWorkflowDir,
|
|
13
|
-
readAllForCommandTarget: ()=>readAllForCommandTarget
|
|
14
|
-
checkIfValidExtractedFilePathFormat: ()=>checkIfValidExtractedFilePathFormat,
|
|
15
|
-
readExtractedFileSync: ()=>readExtractedFileSync
|
|
13
|
+
readAllForCommandTarget: ()=>readAllForCommandTarget
|
|
16
14
|
});
|
|
17
15
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
18
16
|
const _core = require("@oclif/core");
|
|
@@ -20,9 +18,9 @@ const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
|
20
18
|
const _lodash = require("lodash");
|
|
21
19
|
const _error = require("../../helpers/error");
|
|
22
20
|
const _json = require("../../helpers/json");
|
|
23
|
-
const _liquid = require("../../helpers/liquid");
|
|
24
21
|
const _object = require("../../helpers/object");
|
|
25
|
-
const _helpers = require("
|
|
22
|
+
const _helpers = require("../shared/helpers");
|
|
23
|
+
const _helpers1 = require("./helpers");
|
|
26
24
|
function _getRequireWildcardCache(nodeInterop) {
|
|
27
25
|
if (typeof WeakMap !== "function") return null;
|
|
28
26
|
var cacheBabelInterop = new WeakMap();
|
|
@@ -65,102 +63,6 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
65
63
|
// For now we support up to two levels of content extraction in workflow.json.
|
|
66
64
|
// (e.g. workflow.json, then visual_blocks.json)
|
|
67
65
|
const MAX_EXTRACTION_LEVEL = 2;
|
|
68
|
-
// The following files are exepected to have valid json content, and should be
|
|
69
|
-
// decoded and joined into the main workflow.json.
|
|
70
|
-
const DECODABLE_JSON_FILES = new Set([
|
|
71
|
-
_helpers.VISUAL_BLOCKS_JSON
|
|
72
|
-
]);
|
|
73
|
-
/*
|
|
74
|
-
* Validate the file path format of an extracted field. The file path must be:
|
|
75
|
-
*
|
|
76
|
-
* 1) Expressed as a relative path.
|
|
77
|
-
*
|
|
78
|
-
* For exmaple:
|
|
79
|
-
* subject@: "email_1/subject.html" // GOOD
|
|
80
|
-
* subject@: "./email_1/subject.html" // GOOD
|
|
81
|
-
* subject@: "/workflow-x/email_1/subject.html" // BAD
|
|
82
|
-
*
|
|
83
|
-
* 2) The resolved path must be contained inside the workflow directory
|
|
84
|
-
*
|
|
85
|
-
* For exmaple (workflow-y is a different workflow dir in this example):
|
|
86
|
-
* subject@: "./email_1/subject.html" // GOOD
|
|
87
|
-
* subject@: "../workflow-y/email_1/subject.html" // BAD
|
|
88
|
-
*
|
|
89
|
-
* Note: does not validate the presence of the file nor the uniqueness of the
|
|
90
|
-
* file path.
|
|
91
|
-
*/ const checkIfValidExtractedFilePathFormat = (relpath, sourceFileAbspath)=>{
|
|
92
|
-
if (typeof relpath !== "string") return false;
|
|
93
|
-
if (_nodePath.isAbsolute(relpath)) return false;
|
|
94
|
-
const extractedFileAbspath = _nodePath.resolve(sourceFileAbspath, relpath);
|
|
95
|
-
const pathDiff = _nodePath.relative(sourceFileAbspath, extractedFileAbspath);
|
|
96
|
-
return !pathDiff.startsWith("..");
|
|
97
|
-
};
|
|
98
|
-
/*
|
|
99
|
-
* Validate the extracted file path based on its format and uniqueness (but not
|
|
100
|
-
* the presence).
|
|
101
|
-
*
|
|
102
|
-
* Note, the uniqueness check is based on reading from and writing to
|
|
103
|
-
* uniqueFilePaths, which is MUTATED in place.
|
|
104
|
-
*/ const validateExtractedFilePath = (val, workflowDirCtx, uniqueFilePaths, objPathToFieldStr)=>{
|
|
105
|
-
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _helpers.WORKFLOW_JSON);
|
|
106
|
-
// Validate the file path format, and that it is unique per workflow.
|
|
107
|
-
if (!checkIfValidExtractedFilePathFormat(val, workflowJsonPath) || typeof val !== "string" || val in uniqueFilePaths) {
|
|
108
|
-
const error = new _error.JsonDataError("must be a relative path string to a unique file within the directory", objPathToFieldStr);
|
|
109
|
-
return error;
|
|
110
|
-
}
|
|
111
|
-
// Keep track of all the valid extracted file paths that have been seen, so
|
|
112
|
-
// we can validate each file path's uniqueness as we traverse.
|
|
113
|
-
uniqueFilePaths[val] = true;
|
|
114
|
-
return undefined;
|
|
115
|
-
};
|
|
116
|
-
const readExtractedFileSync = (relpath, workflowDirCtx, objPathToFieldStr = "")=>{
|
|
117
|
-
// Check if the file actually exists at the given file path.
|
|
118
|
-
const abspath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
|
|
119
|
-
const exists = _fsExtra.pathExistsSync(abspath);
|
|
120
|
-
if (!exists) {
|
|
121
|
-
const error = new _error.JsonDataError("must be a relative path string to a file that exists", objPathToFieldStr);
|
|
122
|
-
return [
|
|
123
|
-
undefined,
|
|
124
|
-
error
|
|
125
|
-
];
|
|
126
|
-
}
|
|
127
|
-
// Read the file and check for valid liquid syntax given it is supported
|
|
128
|
-
// across all message templates and file extensions.
|
|
129
|
-
const contentStr = _fsExtra.readFileSync(abspath, "utf8");
|
|
130
|
-
const liquidParseError = (0, _liquid.validateLiquidSyntax)(contentStr);
|
|
131
|
-
if (liquidParseError) {
|
|
132
|
-
const error = new _error.JsonDataError(`points to a file that contains invalid liquid syntax (${relpath})\n\n` + (0, _error.formatErrors)([
|
|
133
|
-
liquidParseError
|
|
134
|
-
], {
|
|
135
|
-
indentBy: 2
|
|
136
|
-
}), objPathToFieldStr);
|
|
137
|
-
return [
|
|
138
|
-
undefined,
|
|
139
|
-
error
|
|
140
|
-
];
|
|
141
|
-
}
|
|
142
|
-
// If the file is expected to contain decodable json, then parse the contentStr
|
|
143
|
-
// as such.
|
|
144
|
-
const fileName = _nodePath.basename(abspath.toLowerCase());
|
|
145
|
-
const decodable = DECODABLE_JSON_FILES.has(fileName);
|
|
146
|
-
const [content, jsonParseErrors] = decodable ? (0, _json.parseJson)(contentStr) : [
|
|
147
|
-
contentStr,
|
|
148
|
-
[]
|
|
149
|
-
];
|
|
150
|
-
if (jsonParseErrors.length > 0) {
|
|
151
|
-
const error = new _error.JsonDataError(`points to a file with invalid content (${relpath})\n\n` + (0, _error.formatErrors)(jsonParseErrors, {
|
|
152
|
-
indentBy: 2
|
|
153
|
-
}), objPathToFieldStr);
|
|
154
|
-
return [
|
|
155
|
-
undefined,
|
|
156
|
-
error
|
|
157
|
-
];
|
|
158
|
-
}
|
|
159
|
-
return [
|
|
160
|
-
content,
|
|
161
|
-
undefined
|
|
162
|
-
];
|
|
163
|
-
};
|
|
164
66
|
const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
|
|
165
67
|
// Tracks any errors encountered during traversal. Mutated in place.
|
|
166
68
|
const errors = [];
|
|
@@ -192,7 +94,7 @@ const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
|
|
|
192
94
|
const lastFound = (0, _object.getLastFound)(prevJoinedFilePaths, parts);
|
|
193
95
|
const prevJoinedFilePath = typeof lastFound === "string" ? lastFound : undefined;
|
|
194
96
|
const rebasedFilePath = prevJoinedFilePath ? _nodePath.join(_nodePath.dirname(prevJoinedFilePath), value) : value;
|
|
195
|
-
const invalidFilePathError = validateExtractedFilePath(rebasedFilePath, workflowDirCtx, uniqueFilePaths, objPathToFieldStr);
|
|
97
|
+
const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(rebasedFilePath, _nodePath.resolve(workflowDirCtx.abspath, _helpers1.WORKFLOW_JSON), uniqueFilePaths, objPathToFieldStr);
|
|
196
98
|
if (invalidFilePathError) {
|
|
197
99
|
errors.push(invalidFilePathError);
|
|
198
100
|
// Wipe the invalid file path in the node so the final workflow json
|
|
@@ -206,7 +108,7 @@ const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
|
|
|
206
108
|
}
|
|
207
109
|
// By this point we have a valid extracted file path, so attempt to read
|
|
208
110
|
// the file at the file path.
|
|
209
|
-
const [content, readExtractedFileError] = readExtractedFileSync(rebasedFilePath, workflowDirCtx, objPathToFieldStr);
|
|
111
|
+
const [content, readExtractedFileError] = (0, _helpers.readExtractedFileSync)(rebasedFilePath, workflowDirCtx, objPathToFieldStr);
|
|
210
112
|
if (readExtractedFileError) {
|
|
211
113
|
errors.push(readExtractedFileError);
|
|
212
114
|
// Replace the extracted file path with the rebased one, and set the
|
|
@@ -236,7 +138,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
|
|
|
236
138
|
const { withExtractedFiles =false , withReadonlyField =false } = opts;
|
|
237
139
|
const dirExists = await _fsExtra.pathExists(abspath);
|
|
238
140
|
if (!dirExists) throw new Error(`${abspath} does not exist`);
|
|
239
|
-
const workflowJsonPath = await (0,
|
|
141
|
+
const workflowJsonPath = await (0, _helpers1.lsWorkflowJson)(abspath);
|
|
240
142
|
if (!workflowJsonPath) throw new Error(`${abspath} is not a workflow directory`);
|
|
241
143
|
const result = await (0, _json.readJson)(workflowJsonPath);
|
|
242
144
|
if (!result[0]) return result;
|
|
@@ -259,7 +161,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
|
|
|
259
161
|
// eslint-disable-next-line no-await-in-loop
|
|
260
162
|
const [workflow, readErrors] = await readWorkflowDir(workflowDirCtx, opts);
|
|
261
163
|
if (readErrors.length > 0) {
|
|
262
|
-
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath,
|
|
164
|
+
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _helpers1.WORKFLOW_JSON);
|
|
263
165
|
const e = new _error.SourceError((0, _error.formatErrors)(readErrors), workflowJsonPath);
|
|
264
166
|
errors.push(e);
|
|
265
167
|
continue;
|
|
@@ -298,7 +200,7 @@ const readAllForCommandTarget = async (target, opts = {})=>{
|
|
|
298
200
|
type: "workflow",
|
|
299
201
|
key: dirent.name,
|
|
300
202
|
abspath,
|
|
301
|
-
exists: await (0,
|
|
203
|
+
exists: await (0, _helpers1.isWorkflowDir)(abspath)
|
|
302
204
|
};
|
|
303
205
|
return workflowDirCtx;
|
|
304
206
|
});
|
|
@@ -23,8 +23,10 @@ const _lodash = require("lodash");
|
|
|
23
23
|
const _const = require("../../helpers/const");
|
|
24
24
|
const _json = require("../../helpers/json");
|
|
25
25
|
const _object = require("../../helpers/object");
|
|
26
|
-
const _helpers = require("
|
|
26
|
+
const _helpers = require("../shared/helpers");
|
|
27
|
+
const _helpers1 = require("./helpers");
|
|
27
28
|
const _reader = require("./reader");
|
|
29
|
+
const _types = require("./types");
|
|
28
30
|
function _getRequireWildcardCache(nodeInterop) {
|
|
29
31
|
if (typeof WeakMap !== "function") return null;
|
|
30
32
|
var cacheBabelInterop = new WeakMap();
|
|
@@ -121,6 +123,9 @@ const formatExtractedFilePath = (objPathParts, fileExt, opts = {})=>{
|
|
|
121
123
|
];
|
|
122
124
|
return _nodePath.join(...paths).toLowerCase();
|
|
123
125
|
};
|
|
126
|
+
const NON_RECURSIVELY_TRAVERSABLE_FIELDS_FOR_EXTRACTION = new Set([
|
|
127
|
+
"branches"
|
|
128
|
+
]);
|
|
124
129
|
const compileExtractionSettings = (node, objPathParts = [])=>{
|
|
125
130
|
const map = new Map();
|
|
126
131
|
const compileRecursively = (item, parts)=>{
|
|
@@ -138,10 +143,14 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
|
|
|
138
143
|
key
|
|
139
144
|
], extractableFields[key]);
|
|
140
145
|
}
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
146
|
+
// Recursively exam current field for any additionally extractable data
|
|
147
|
+
// within, except for disallowed fields
|
|
148
|
+
if (!NON_RECURSIVELY_TRAVERSABLE_FIELDS_FOR_EXTRACTION.has(key)) {
|
|
149
|
+
compileRecursively(val, [
|
|
150
|
+
...parts,
|
|
151
|
+
key
|
|
152
|
+
]);
|
|
153
|
+
}
|
|
145
154
|
}
|
|
146
155
|
return;
|
|
147
156
|
}
|
|
@@ -167,35 +176,23 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
|
|
|
167
176
|
return 0;
|
|
168
177
|
}));
|
|
169
178
|
};
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
* The way this function works and handles the content extraction is by:
|
|
188
|
-
* 1. Traversing the given step node, and compiling all annotated extraction
|
|
189
|
-
* settings by the object path in the node *ordered from leaf to root*.
|
|
190
|
-
* 2. Iterate over compiled extraction settings from leaf to root, and start
|
|
191
|
-
* extracting out the field as needed. In case the node that needs to be
|
|
192
|
-
* extracted out contains extracted file paths, then those file paths get
|
|
193
|
-
* rebased to relative to the referenced file.
|
|
194
|
-
*/ const buildWorkflowDirBundle = (remoteWorkflow, localWorkflow = {})=>{
|
|
195
|
-
const bundle = {};
|
|
196
|
-
const mutWorkflow = (0, _lodash.cloneDeep)(remoteWorkflow);
|
|
197
|
-
const localWorkflowStepsByRef = (0, _lodash.keyBy)(localWorkflow.steps || [], "ref");
|
|
198
|
-
for (const step of mutWorkflow.steps){
|
|
179
|
+
const keyLocalWorkflowStepsByRef = (steps, result = {})=>{
|
|
180
|
+
if (!Array.isArray(steps)) return result;
|
|
181
|
+
for (const step of steps){
|
|
182
|
+
if (!(0, _lodash.isPlainObject)(step)) continue;
|
|
183
|
+
if (!step.ref) continue;
|
|
184
|
+
result[step.ref] = step;
|
|
185
|
+
if (step.type === _types.StepType.Branch && Array.isArray(step.branches)) {
|
|
186
|
+
for (const branch of step.branches){
|
|
187
|
+
if (!(0, _lodash.isPlainObject)(branch)) continue;
|
|
188
|
+
result = keyLocalWorkflowStepsByRef(branch.steps, result);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
return result;
|
|
193
|
+
};
|
|
194
|
+
const recursivelyBuildWorkflowDirBundle = (bundle, steps, localWorkflowStepsByRef)=>{
|
|
195
|
+
for (const step of steps){
|
|
199
196
|
// A compiled map of extraction settings of every field in the step where
|
|
200
197
|
// we support content extraction, organized by each field's object path.
|
|
201
198
|
const compiledExtractionSettings = compileExtractionSettings(step);
|
|
@@ -217,7 +214,7 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
|
|
|
217
214
|
// First figure out the relative file path (within the workflow directory)
|
|
218
215
|
// for the extracted file. If already extracted in the local workflow,
|
|
219
216
|
// then use that; otherwise format a new file path.
|
|
220
|
-
const relpath = extractedFilePath
|
|
217
|
+
const relpath = typeof extractedFilePath === "string" ? extractedFilePath : formatExtractedFilePath(objPathParts, fileExt, {
|
|
221
218
|
unnestDirsBy: 1,
|
|
222
219
|
nestIntoDirs: [
|
|
223
220
|
step.ref
|
|
@@ -242,6 +239,7 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
|
|
|
242
239
|
// bundle for writing to the file system later. Then replace the field
|
|
243
240
|
// content with the extracted file path and mark the field as extracted
|
|
244
241
|
// with @ suffix.
|
|
242
|
+
//
|
|
245
243
|
// TODO: Consider guarding against an edge case, and check if the relpath
|
|
246
244
|
// already exists in the bundle, and if so make the relpath unique.
|
|
247
245
|
(0, _lodash.set)(bundle, [
|
|
@@ -250,10 +248,48 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
|
|
|
250
248
|
(0, _lodash.set)(step, `${objPathStr}${_helpers.FILEPATH_MARKER}`, relpath);
|
|
251
249
|
(0, _lodash.unset)(step, objPathParts);
|
|
252
250
|
}
|
|
251
|
+
// Lastly, recurse thru any branches that exist in the workflow tree
|
|
252
|
+
if (step.type === _types.StepType.Branch) {
|
|
253
|
+
for (const branch of step.branches){
|
|
254
|
+
recursivelyBuildWorkflowDirBundle(bundle, branch.steps, localWorkflowStepsByRef);
|
|
255
|
+
}
|
|
256
|
+
}
|
|
253
257
|
}
|
|
254
|
-
|
|
258
|
+
};
|
|
259
|
+
/*
|
|
260
|
+
* For a given workflow payload (and its local workflow reference), this function
|
|
261
|
+
* builds a "workflow directory bundle", which is an obj made up of all the
|
|
262
|
+
* relative file paths (within the workflow directory) and its file content to
|
|
263
|
+
* write the workflow directory.
|
|
264
|
+
*
|
|
265
|
+
* Every workflow will always have a workflow.json file, so every bundle includes
|
|
266
|
+
* it and its content at minimum. To the extent the workflow includes any
|
|
267
|
+
* extractable fields, those fields content get extracted out and added to the
|
|
268
|
+
* bundle.
|
|
269
|
+
*
|
|
270
|
+
* Important things to keep in mind re: content extraction:
|
|
271
|
+
* 1. There can be multiple places in workflow json where content extraction
|
|
272
|
+
* happens.
|
|
273
|
+
* 2. There can be multiple levels of content extraction happening, currently
|
|
274
|
+
* at a maximum of 2 levels.
|
|
275
|
+
*
|
|
276
|
+
* The way this function works and handles the content extraction is by:
|
|
277
|
+
* 1. Traversing the given step node, and compiling all annotated extraction
|
|
278
|
+
* settings by the object path in the node *ordered from leaf to root*.
|
|
279
|
+
* 2. Iterate over compiled extraction settings from leaf to root, and start
|
|
280
|
+
* extracting out the field as needed. In case the node that needs to be
|
|
281
|
+
* extracted out contains extracted file paths, then those file paths get
|
|
282
|
+
* rebased to relative to the referenced file.
|
|
283
|
+
*/ const buildWorkflowDirBundle = (remoteWorkflow, localWorkflow = {})=>{
|
|
284
|
+
const bundle = {};
|
|
285
|
+
const mutWorkflow = (0, _lodash.cloneDeep)(remoteWorkflow);
|
|
286
|
+
const localWorkflowStepsByRef = keyLocalWorkflowStepsByRef(localWorkflow.steps);
|
|
287
|
+
// Recursively traverse the workflow step tree, mutating it and the bundle
|
|
288
|
+
// along the way
|
|
289
|
+
recursivelyBuildWorkflowDirBundle(bundle, mutWorkflow.steps, localWorkflowStepsByRef);
|
|
290
|
+
// Then, prepare the workflow data to be written into a workflow json file.
|
|
255
291
|
return (0, _lodash.set)(bundle, [
|
|
256
|
-
|
|
292
|
+
_helpers1.WORKFLOW_JSON
|
|
257
293
|
], toWorkflowJson(mutWorkflow));
|
|
258
294
|
};
|
|
259
295
|
const writeWorkflowDirFromData = async (workflowDirCtx, remoteWorkflow)=>{
|
|
@@ -274,7 +310,7 @@ const writeWorkflowDirFromBundle = async (workflowDirCtx, workflowDirBundle)=>{
|
|
|
274
310
|
}
|
|
275
311
|
const promises = Object.entries(workflowDirBundle).map(([relpath, fileContent])=>{
|
|
276
312
|
const filePath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
|
|
277
|
-
return relpath ===
|
|
313
|
+
return relpath === _helpers1.WORKFLOW_JSON ? _fsExtra.outputJson(filePath, fileContent, {
|
|
278
314
|
spaces: _json.DOUBLE_SPACES
|
|
279
315
|
}) : _fsExtra.outputFile(filePath, fileContent);
|
|
280
316
|
});
|
|
@@ -310,7 +346,7 @@ const writeWorkflowDirFromBundle = async (workflowDirCtx, workflowDirBundle)=>{
|
|
|
310
346
|
const promises = dirents.map(async (dirent)=>{
|
|
311
347
|
const direntName = dirent.name.toLowerCase();
|
|
312
348
|
const direntPath = _nodePath.resolve(indexDirCtx.abspath, direntName);
|
|
313
|
-
if (await (0,
|
|
349
|
+
if (await (0, _helpers1.isWorkflowDir)(direntPath) && workflowsByKey[direntName]) {
|
|
314
350
|
return;
|
|
315
351
|
}
|
|
316
352
|
await _fsExtra.remove(direntPath);
|
|
@@ -333,7 +369,7 @@ const writeWorkflowsIndexDir = async (indexDirCtx, remoteWorkflows)=>{
|
|
|
333
369
|
type: "workflow",
|
|
334
370
|
key: workflow.key,
|
|
335
371
|
abspath: workflowDirPath,
|
|
336
|
-
exists: indexDirCtx.exists ? await (0,
|
|
372
|
+
exists: indexDirCtx.exists ? await (0, _helpers1.isWorkflowDir)(workflowDirPath) : false
|
|
337
373
|
};
|
|
338
374
|
return writeWorkflowDirFromData(workflowDirCtx, workflow);
|
|
339
375
|
});
|
|
@@ -11,6 +11,7 @@ Object.defineProperty(exports, "load", {
|
|
|
11
11
|
get: ()=>load
|
|
12
12
|
});
|
|
13
13
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
14
|
+
const _emailLayout = /*#__PURE__*/ _interopRequireWildcard(require("../marshal/email-layout"));
|
|
14
15
|
const _translation = /*#__PURE__*/ _interopRequireWildcard(require("../marshal/translation"));
|
|
15
16
|
const _workflow = /*#__PURE__*/ _interopRequireWildcard(require("../marshal/workflow"));
|
|
16
17
|
function _getRequireWildcardCache(nodeInterop) {
|
|
@@ -63,6 +64,16 @@ const evaluateRecursively = async (ctx, currDir)=>{
|
|
|
63
64
|
exists: true
|
|
64
65
|
};
|
|
65
66
|
}
|
|
67
|
+
// Check if we are inside a layout directory, and if so update the context.
|
|
68
|
+
const isEmailLayoutDir = await _emailLayout.isEmailLayoutDir(currDir);
|
|
69
|
+
if (!ctx.resourceDir && isEmailLayoutDir) {
|
|
70
|
+
ctx.resourceDir = {
|
|
71
|
+
type: "email_layout",
|
|
72
|
+
key: _nodePath.basename(currDir),
|
|
73
|
+
abspath: currDir,
|
|
74
|
+
exists: true
|
|
75
|
+
};
|
|
76
|
+
}
|
|
66
77
|
// NOTE: Must keep this check as last in the order of directory-type checks
|
|
67
78
|
// since the `isTranslationDir` only checks that the directory name is a
|
|
68
79
|
// valid locale name.
|