@knocklabs/cli 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +152 -6
- package/dist/commands/layout/get.js +94 -0
- package/dist/commands/layout/list.js +82 -0
- package/dist/commands/layout/pull.js +195 -0
- package/dist/commands/layout/push.js +155 -0
- package/dist/commands/layout/validate.js +130 -0
- package/dist/commands/workflow/get.js +39 -7
- package/dist/commands/workflow/list.js +4 -1
- package/dist/lib/api-v1.js +47 -0
- package/dist/lib/marshal/email-layout/helpers.js +124 -0
- package/dist/lib/marshal/email-layout/index.js +19 -0
- package/dist/lib/marshal/email-layout/reader.js +193 -0
- package/dist/lib/marshal/email-layout/types.js +4 -0
- package/dist/lib/marshal/email-layout/writer.js +240 -0
- package/dist/lib/marshal/shared/helpers.js +135 -0
- package/dist/lib/marshal/workflow/generator.js +3 -2
- package/dist/lib/marshal/workflow/helpers.js +27 -5
- package/dist/lib/marshal/workflow/reader.js +8 -106
- package/dist/lib/marshal/workflow/types.js +1 -0
- package/dist/lib/marshal/workflow/writer.js +76 -40
- package/dist/lib/run-context/loader.js +11 -0
- package/oclif.manifest.json +318 -1
- package/package.json +10 -7
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
readAllForCommandTarget: ()=>readAllForCommandTarget,
|
|
13
|
+
readEmailLayoutDir: ()=>readEmailLayoutDir
|
|
14
|
+
});
|
|
15
|
+
const _nodePath = /*#__PURE__*/ _interopRequireDefault(require("node:path"));
|
|
16
|
+
const _core = require("@oclif/core");
|
|
17
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
18
|
+
const _lodash = require("lodash");
|
|
19
|
+
const _error = require("../../helpers/error");
|
|
20
|
+
const _json = require("../../helpers/json");
|
|
21
|
+
const _object = require("../../helpers/object");
|
|
22
|
+
const _helpers = require("../shared/helpers");
|
|
23
|
+
const _helpers1 = require("./helpers");
|
|
24
|
+
function _interopRequireDefault(obj) {
|
|
25
|
+
return obj && obj.__esModule ? obj : {
|
|
26
|
+
default: obj
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
30
|
+
if (typeof WeakMap !== "function") return null;
|
|
31
|
+
var cacheBabelInterop = new WeakMap();
|
|
32
|
+
var cacheNodeInterop = new WeakMap();
|
|
33
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
34
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
35
|
+
})(nodeInterop);
|
|
36
|
+
}
|
|
37
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
38
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
39
|
+
return obj;
|
|
40
|
+
}
|
|
41
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
42
|
+
return {
|
|
43
|
+
default: obj
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
47
|
+
if (cache && cache.has(obj)) {
|
|
48
|
+
return cache.get(obj);
|
|
49
|
+
}
|
|
50
|
+
var newObj = {};
|
|
51
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
52
|
+
for(var key in obj){
|
|
53
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
54
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
55
|
+
if (desc && (desc.get || desc.set)) {
|
|
56
|
+
Object.defineProperty(newObj, key, desc);
|
|
57
|
+
} else {
|
|
58
|
+
newObj[key] = obj[key];
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
newObj.default = obj;
|
|
63
|
+
if (cache) {
|
|
64
|
+
cache.set(obj, newObj);
|
|
65
|
+
}
|
|
66
|
+
return newObj;
|
|
67
|
+
}
|
|
68
|
+
const readAllForCommandTarget = async (target, opts = {})=>{
|
|
69
|
+
const { type: targetType , context: targetCtx } = target;
|
|
70
|
+
if (!targetCtx.exists) {
|
|
71
|
+
const subject = targetType === "emailLayoutDir" ? "a layout directory at" : "layout directories in";
|
|
72
|
+
return _core.ux.error(`Cannot locate ${subject} \`${targetCtx.abspath}\``);
|
|
73
|
+
}
|
|
74
|
+
switch(targetType){
|
|
75
|
+
case "emailLayoutDir":
|
|
76
|
+
{
|
|
77
|
+
return readEmailLayoutsDirs([
|
|
78
|
+
targetCtx
|
|
79
|
+
], opts);
|
|
80
|
+
}
|
|
81
|
+
case "emailLayoutsIndexDir":
|
|
82
|
+
{
|
|
83
|
+
const dirents = await _fsExtra.readdir(targetCtx.abspath, {
|
|
84
|
+
withFileTypes: true
|
|
85
|
+
});
|
|
86
|
+
const promises = dirents.map(async (dirent)=>{
|
|
87
|
+
const abspath = _nodePath.default.resolve(targetCtx.abspath, dirent.name);
|
|
88
|
+
const layoutDirCtx = {
|
|
89
|
+
type: "email_layout",
|
|
90
|
+
key: dirent.name,
|
|
91
|
+
abspath,
|
|
92
|
+
exists: await (0, _helpers1.isEmailLayoutDir)(abspath)
|
|
93
|
+
};
|
|
94
|
+
return layoutDirCtx;
|
|
95
|
+
});
|
|
96
|
+
const layoutDirCtxs = (await Promise.all(promises)).filter((layoutDirCtx)=>layoutDirCtx.exists);
|
|
97
|
+
return readEmailLayoutsDirs(layoutDirCtxs, opts);
|
|
98
|
+
}
|
|
99
|
+
default:
|
|
100
|
+
throw new Error(`Invalid layout command target: ${target}`);
|
|
101
|
+
}
|
|
102
|
+
};
|
|
103
|
+
/*
|
|
104
|
+
* For the given list of layout directory contexts, read each layout dir and
|
|
105
|
+
* return layout directory data.
|
|
106
|
+
*/ const readEmailLayoutsDirs = async (layoutDirCtxs, opts = {})=>{
|
|
107
|
+
const layouts = [];
|
|
108
|
+
const errors = [];
|
|
109
|
+
for (const layoutDirCtx of layoutDirCtxs){
|
|
110
|
+
// eslint-disable-next-line no-await-in-loop
|
|
111
|
+
const [layout, readErrors] = await readEmailLayoutDir(layoutDirCtx, opts);
|
|
112
|
+
if (readErrors.length > 0) {
|
|
113
|
+
const layoutJsonPath = _nodePath.default.resolve(layoutDirCtx.abspath, _helpers1.LAYOUT_JSON);
|
|
114
|
+
const e = new _error.SourceError((0, _error.formatErrors)(readErrors), layoutJsonPath);
|
|
115
|
+
errors.push(e);
|
|
116
|
+
continue;
|
|
117
|
+
}
|
|
118
|
+
layouts.push({
|
|
119
|
+
...layoutDirCtx,
|
|
120
|
+
content: layout
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
return [
|
|
124
|
+
layouts,
|
|
125
|
+
errors
|
|
126
|
+
];
|
|
127
|
+
};
|
|
128
|
+
const readEmailLayoutDir = async (layoutDirCtx, opts = {})=>{
|
|
129
|
+
const { abspath } = layoutDirCtx;
|
|
130
|
+
const { withExtractedFiles =false , withReadonlyField =false } = opts;
|
|
131
|
+
const dirExists = await _fsExtra.pathExists(abspath);
|
|
132
|
+
if (!dirExists) throw new Error(`${abspath} does not exist`);
|
|
133
|
+
const layoutJsonPath = await (0, _helpers1.lsEmailLayoutJson)(abspath);
|
|
134
|
+
if (!layoutJsonPath) throw new Error(`${abspath} is not a layout directory`);
|
|
135
|
+
const result = await (0, _json.readJson)(layoutJsonPath);
|
|
136
|
+
if (!result[0]) return result;
|
|
137
|
+
let [layoutJson] = result;
|
|
138
|
+
layoutJson = withReadonlyField ? layoutJson : (0, _object.omitDeep)(layoutJson, [
|
|
139
|
+
"__readonly"
|
|
140
|
+
]);
|
|
141
|
+
return withExtractedFiles ? joinExtractedFiles(layoutDirCtx, layoutJson) : [
|
|
142
|
+
layoutJson,
|
|
143
|
+
[]
|
|
144
|
+
];
|
|
145
|
+
};
|
|
146
|
+
/*
|
|
147
|
+
* Given a layout json object, compiles all referenced extracted files from it
|
|
148
|
+
* and returns the updated object with the extracted content joined and inlined.
|
|
149
|
+
*/ const joinExtractedFiles = async (layoutDirCtx, layoutJson)=>{
|
|
150
|
+
// Tracks any errors encountered during traversal. Mutated in place.
|
|
151
|
+
const errors = [];
|
|
152
|
+
// Tracks each new valid extracted file path seen (rebased to be relative to
|
|
153
|
+
// layout.json) in the layout json node. Mutated in place, and used
|
|
154
|
+
// to validate the uniqueness of an extracted path encountered.
|
|
155
|
+
const uniqueFilePaths = {};
|
|
156
|
+
(0, _object.mapValuesDeep)(layoutJson, (relpath, key, parts)=>{
|
|
157
|
+
// If not marked with the @suffix, there's nothing to do.
|
|
158
|
+
if (!_helpers.FILEPATH_MARKED_RE.test(key)) return;
|
|
159
|
+
const objPathToFieldStr = _object.ObjPath.stringify(parts);
|
|
160
|
+
const inlinObjPathStr = objPathToFieldStr.replace(_helpers.FILEPATH_MARKED_RE, "");
|
|
161
|
+
// If there is inlined content present already, then nothing more to do.
|
|
162
|
+
if ((0, _lodash.hasIn)(layoutJson, inlinObjPathStr)) return;
|
|
163
|
+
// Check if the extracted path found at the current field path is valid
|
|
164
|
+
const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(relpath, _nodePath.default.resolve(layoutDirCtx.abspath, _helpers1.LAYOUT_JSON), uniqueFilePaths, objPathToFieldStr);
|
|
165
|
+
if (invalidFilePathError) {
|
|
166
|
+
errors.push(invalidFilePathError);
|
|
167
|
+
// Wipe the invalid file path in the node so the final layout json
|
|
168
|
+
// object ends up with only valid file paths, this way layout writer
|
|
169
|
+
// can see only valid file paths and use those when pulling.
|
|
170
|
+
(0, _lodash.set)(layoutJson, inlinObjPathStr, undefined);
|
|
171
|
+
(0, _lodash.set)(layoutJson, objPathToFieldStr, undefined);
|
|
172
|
+
return;
|
|
173
|
+
}
|
|
174
|
+
// By this point we have a valid extracted file path, so attempt to read the file.
|
|
175
|
+
const [content, readExtractedFileError] = (0, _helpers.readExtractedFileSync)(relpath, layoutDirCtx, key);
|
|
176
|
+
if (readExtractedFileError) {
|
|
177
|
+
errors.push(readExtractedFileError);
|
|
178
|
+
// If there's an error, replace the extracted file path with the original one, and set the
|
|
179
|
+
// inlined field path in layout object with empty content, so we know
|
|
180
|
+
// we do not need to try inlining again.
|
|
181
|
+
(0, _lodash.set)(layoutJson, objPathToFieldStr, relpath);
|
|
182
|
+
(0, _lodash.set)(layoutJson, inlinObjPathStr, undefined);
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
// Inline the file content and remove the extracted file path.
|
|
186
|
+
(0, _lodash.set)(layoutJson, objPathToFieldStr, relpath);
|
|
187
|
+
(0, _lodash.set)(layoutJson, inlinObjPathStr, content);
|
|
188
|
+
});
|
|
189
|
+
return [
|
|
190
|
+
layoutJson,
|
|
191
|
+
errors
|
|
192
|
+
];
|
|
193
|
+
};
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
writeEmailLayoutDirFromData: ()=>writeEmailLayoutDirFromData,
|
|
13
|
+
writeEmailLayoutIndexDir: ()=>writeEmailLayoutIndexDir,
|
|
14
|
+
buildEmailLayoutDirBundle: ()=>buildEmailLayoutDirBundle,
|
|
15
|
+
pruneLayoutsIndexDir: ()=>pruneLayoutsIndexDir,
|
|
16
|
+
toEmailLayoutJson: ()=>toEmailLayoutJson
|
|
17
|
+
});
|
|
18
|
+
const _nodePath = /*#__PURE__*/ _interopRequireDefault(require("node:path"));
|
|
19
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
20
|
+
const _lodash = require("lodash");
|
|
21
|
+
const _const = require("../../helpers/const");
|
|
22
|
+
const _json = require("../../helpers/json");
|
|
23
|
+
const _object = require("../../helpers/object");
|
|
24
|
+
const _helpers = require("../shared/helpers");
|
|
25
|
+
const _helpers1 = require("./helpers");
|
|
26
|
+
const _reader = require("./reader");
|
|
27
|
+
function _interopRequireDefault(obj) {
|
|
28
|
+
return obj && obj.__esModule ? obj : {
|
|
29
|
+
default: obj
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
33
|
+
if (typeof WeakMap !== "function") return null;
|
|
34
|
+
var cacheBabelInterop = new WeakMap();
|
|
35
|
+
var cacheNodeInterop = new WeakMap();
|
|
36
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
37
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
38
|
+
})(nodeInterop);
|
|
39
|
+
}
|
|
40
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
41
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
42
|
+
return obj;
|
|
43
|
+
}
|
|
44
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
45
|
+
return {
|
|
46
|
+
default: obj
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
50
|
+
if (cache && cache.has(obj)) {
|
|
51
|
+
return cache.get(obj);
|
|
52
|
+
}
|
|
53
|
+
var newObj = {};
|
|
54
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
55
|
+
for(var key in obj){
|
|
56
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
57
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
58
|
+
if (desc && (desc.get || desc.set)) {
|
|
59
|
+
Object.defineProperty(newObj, key, desc);
|
|
60
|
+
} else {
|
|
61
|
+
newObj[key] = obj[key];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
newObj.default = obj;
|
|
66
|
+
if (cache) {
|
|
67
|
+
cache.set(obj, newObj);
|
|
68
|
+
}
|
|
69
|
+
return newObj;
|
|
70
|
+
}
|
|
71
|
+
/* Traverse a given email layout data and compile extraction settings of every extractable
|
|
72
|
+
* field into a sorted map.
|
|
73
|
+
*
|
|
74
|
+
* NOTE: Currently we do NOT support content extraction at nested levels for email layouts.
|
|
75
|
+
*/ const compileExtractionSettings = (emailLayout)=>{
|
|
76
|
+
const extractableFields = (0, _lodash.get)(emailLayout, [
|
|
77
|
+
"__annotation",
|
|
78
|
+
"extractable_fields"
|
|
79
|
+
], {});
|
|
80
|
+
const map = new Map();
|
|
81
|
+
for (const [key] of Object.entries(emailLayout)){
|
|
82
|
+
// If the field we are on is extractable, then add its extraction
|
|
83
|
+
// settings to the map with the current object path.
|
|
84
|
+
if (key in extractableFields) {
|
|
85
|
+
map.set([
|
|
86
|
+
key
|
|
87
|
+
], extractableFields[key]);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
return map;
|
|
91
|
+
};
|
|
92
|
+
/* Sanitize the email layout content into a format that's appropriate for reading
|
|
93
|
+
* and writing, by stripping out any annotation fields and handling readonly
|
|
94
|
+
* fields.
|
|
95
|
+
*/ const toEmailLayoutJson = (emailLayout)=>{
|
|
96
|
+
var _emailLayout___annotation;
|
|
97
|
+
// Move read only field under the dedicated field "__readonly".
|
|
98
|
+
const readonlyFields = ((_emailLayout___annotation = emailLayout.__annotation) === null || _emailLayout___annotation === void 0 ? void 0 : _emailLayout___annotation.readonly_fields) || [];
|
|
99
|
+
const [readonly, remainder] = (0, _object.split)(emailLayout, readonlyFields);
|
|
100
|
+
const emailLayoutjson = {
|
|
101
|
+
...remainder,
|
|
102
|
+
__readonly: readonly
|
|
103
|
+
};
|
|
104
|
+
// Strip out all schema annotations, so not to expose them to end users.
|
|
105
|
+
return (0, _object.omitDeep)(emailLayoutjson, [
|
|
106
|
+
"__annotation"
|
|
107
|
+
]);
|
|
108
|
+
};
|
|
109
|
+
const writeEmailLayoutDirFromData = async (emailLayoutDirCtx, remoteEmailLayout)=>{
|
|
110
|
+
// If the layout directory exists on the file system (i.e. previously
|
|
111
|
+
// pulled before), then read the layout file to use as a reference.
|
|
112
|
+
const [localEmailLayout] = emailLayoutDirCtx.exists ? await (0, _reader.readEmailLayoutDir)(emailLayoutDirCtx, {
|
|
113
|
+
withExtractedFiles: true
|
|
114
|
+
}) : [];
|
|
115
|
+
const bundle = buildEmailLayoutDirBundle(remoteEmailLayout, localEmailLayout);
|
|
116
|
+
const backupDirPath = _nodePath.default.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
117
|
+
try {
|
|
118
|
+
// We store a backup in case there's an error.
|
|
119
|
+
if (emailLayoutDirCtx.exists) {
|
|
120
|
+
await _fsExtra.copy(emailLayoutDirCtx.abspath, backupDirPath);
|
|
121
|
+
await _fsExtra.emptyDir(emailLayoutDirCtx.abspath);
|
|
122
|
+
}
|
|
123
|
+
const promises = Object.entries(bundle).map(([relpath, fileContent])=>{
|
|
124
|
+
const filePath = _nodePath.default.resolve(emailLayoutDirCtx.abspath, relpath);
|
|
125
|
+
return relpath === _helpers1.LAYOUT_JSON ? _fsExtra.outputJson(filePath, fileContent, {
|
|
126
|
+
spaces: _json.DOUBLE_SPACES
|
|
127
|
+
}) : _fsExtra.outputFile(filePath, fileContent);
|
|
128
|
+
});
|
|
129
|
+
await Promise.all(promises);
|
|
130
|
+
} catch (error) {
|
|
131
|
+
// In case of any error, wipe the target directory that is likely in a bad
|
|
132
|
+
// state then restore the backup if one existed before.
|
|
133
|
+
if (emailLayoutDirCtx.exists) {
|
|
134
|
+
await _fsExtra.emptyDir(emailLayoutDirCtx.abspath);
|
|
135
|
+
await _fsExtra.copy(backupDirPath, emailLayoutDirCtx.abspath);
|
|
136
|
+
} else {
|
|
137
|
+
await _fsExtra.remove(emailLayoutDirCtx.abspath);
|
|
138
|
+
}
|
|
139
|
+
throw error;
|
|
140
|
+
} finally{
|
|
141
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
142
|
+
await _fsExtra.remove(backupDirPath);
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
/* For a given email layout payload, this function builds a "email layout directoy bundle".
|
|
146
|
+
* This is an object which contains all the relative paths and its file content.
|
|
147
|
+
* It includes the extractable fields, which are extracted out and added to the bundle as separate files.
|
|
148
|
+
*/ const buildEmailLayoutDirBundle = (remoteEmailLayout, localEmailLayout = {})=>{
|
|
149
|
+
const bundle = {};
|
|
150
|
+
const mutRemoteEmailLayout = (0, _lodash.cloneDeep)(remoteEmailLayout);
|
|
151
|
+
// A map of extraction settings of every field in the email layout
|
|
152
|
+
const compiledExtractionSettings = compileExtractionSettings(mutRemoteEmailLayout);
|
|
153
|
+
// Iterate through each extractable field, determine whether we need to
|
|
154
|
+
// extract the field content, and if so, perform the
|
|
155
|
+
// extraction.
|
|
156
|
+
for (const [objPathParts, extractionSettings] of compiledExtractionSettings){
|
|
157
|
+
// If this layout doesn't have this field path, then we don't extract.
|
|
158
|
+
if (!(0, _lodash.has)(mutRemoteEmailLayout, objPathParts)) continue;
|
|
159
|
+
// If the field at this path is extracted in the local layout, then
|
|
160
|
+
// always extract; otherwise extract based on the field settings default.
|
|
161
|
+
const objPathStr = _object.ObjPath.stringify(objPathParts);
|
|
162
|
+
const extractedFilePath = (0, _lodash.get)(localEmailLayout, `${objPathStr}${_helpers.FILEPATH_MARKER}`);
|
|
163
|
+
const { default: extractByDefault , file_ext: fileExt } = extractionSettings;
|
|
164
|
+
if (!extractedFilePath && !extractByDefault) continue;
|
|
165
|
+
// By this point, we have a field where we need to extract its content.
|
|
166
|
+
const data = (0, _lodash.get)(mutRemoteEmailLayout, objPathParts);
|
|
167
|
+
const fileName = objPathParts.pop();
|
|
168
|
+
// If we have an extracted file path from the local layout, we use that. In the other
|
|
169
|
+
// case we use the default path.
|
|
170
|
+
const relpath = typeof extractedFilePath === "string" ? extractedFilePath : `${fileName}.${fileExt}`;
|
|
171
|
+
// Perform the extraction by adding the content and its file path to the
|
|
172
|
+
// bundle for writing to the file system later. Then replace the field
|
|
173
|
+
// content with the extracted file path and mark the field as extracted
|
|
174
|
+
// with @ suffix.
|
|
175
|
+
(0, _lodash.set)(bundle, [
|
|
176
|
+
relpath
|
|
177
|
+
], data);
|
|
178
|
+
(0, _lodash.set)(mutRemoteEmailLayout, `${objPathStr}${_helpers.FILEPATH_MARKER}`, relpath);
|
|
179
|
+
(0, _lodash.unset)(mutRemoteEmailLayout, objPathStr);
|
|
180
|
+
}
|
|
181
|
+
// At this point the bundle contains all extractable files, so we finally add the layout
|
|
182
|
+
// JSON realtive path + the file content.
|
|
183
|
+
return (0, _lodash.set)(bundle, [
|
|
184
|
+
_helpers1.LAYOUT_JSON
|
|
185
|
+
], toEmailLayoutJson(mutRemoteEmailLayout));
|
|
186
|
+
};
|
|
187
|
+
const writeEmailLayoutIndexDir = async (indexDirCtx, remoteEmailLayouts)=>{
|
|
188
|
+
const backupDirPath = _nodePath.default.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
189
|
+
try {
|
|
190
|
+
if (indexDirCtx.exists) {
|
|
191
|
+
await _fsExtra.copy(indexDirCtx.abspath, backupDirPath);
|
|
192
|
+
await pruneLayoutsIndexDir(indexDirCtx, remoteEmailLayouts);
|
|
193
|
+
}
|
|
194
|
+
const writeEmailLayoutDirPromises = remoteEmailLayouts.map(async (remoteEmailLayout)=>{
|
|
195
|
+
const emailLayoutDirPath = _nodePath.default.resolve(indexDirCtx.abspath, remoteEmailLayout.key);
|
|
196
|
+
const emailLayoutDirCtx = {
|
|
197
|
+
type: "email_layout",
|
|
198
|
+
key: remoteEmailLayout.key,
|
|
199
|
+
abspath: emailLayoutDirPath,
|
|
200
|
+
exists: indexDirCtx.exists ? await (0, _helpers1.isEmailLayoutDir)(emailLayoutDirPath) : false
|
|
201
|
+
};
|
|
202
|
+
return writeEmailLayoutDirFromData(emailLayoutDirCtx, remoteEmailLayout);
|
|
203
|
+
});
|
|
204
|
+
await Promise.all(writeEmailLayoutDirPromises);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
if (indexDirCtx.exists) {
|
|
207
|
+
await _fsExtra.emptyDir(indexDirCtx.abspath);
|
|
208
|
+
await _fsExtra.copy(backupDirPath, indexDirCtx.abspath);
|
|
209
|
+
} else {
|
|
210
|
+
await _fsExtra.remove(indexDirCtx.abspath);
|
|
211
|
+
}
|
|
212
|
+
throw error;
|
|
213
|
+
} finally{
|
|
214
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
215
|
+
await _fsExtra.remove(backupDirPath);
|
|
216
|
+
}
|
|
217
|
+
};
|
|
218
|
+
/*
|
|
219
|
+
* Prunes the index directory by removing any files, or directories that aren't
|
|
220
|
+
* layout dirs found in fetched layouts. We want to preserve any layout
|
|
221
|
+
* dirs that are going to be updated with remote layouts, so extracted links
|
|
222
|
+
* can be respected.
|
|
223
|
+
*/ const pruneLayoutsIndexDir = async (indexDirCtx, remoteEmailLayouts)=>{
|
|
224
|
+
const emailLayoutsByKey = Object.fromEntries(remoteEmailLayouts.map((e)=>[
|
|
225
|
+
e.key.toLowerCase(),
|
|
226
|
+
e
|
|
227
|
+
]));
|
|
228
|
+
const dirents = await _fsExtra.readdir(indexDirCtx.abspath, {
|
|
229
|
+
withFileTypes: true
|
|
230
|
+
});
|
|
231
|
+
const promises = dirents.map(async (dirent)=>{
|
|
232
|
+
const direntName = dirent.name.toLowerCase();
|
|
233
|
+
const direntPath = _nodePath.default.resolve(indexDirCtx.abspath, direntName);
|
|
234
|
+
if (await (0, _helpers1.isEmailLayoutDir)(direntPath) && emailLayoutsByKey[direntName]) {
|
|
235
|
+
return;
|
|
236
|
+
}
|
|
237
|
+
await _fsExtra.remove(direntPath);
|
|
238
|
+
});
|
|
239
|
+
await Promise.all(promises);
|
|
240
|
+
};
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
13
|
+
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE,
|
|
14
|
+
readExtractedFileSync: ()=>readExtractedFileSync,
|
|
15
|
+
validateExtractedFilePath: ()=>validateExtractedFilePath,
|
|
16
|
+
checkIfValidExtractedFilePathFormat: ()=>checkIfValidExtractedFilePathFormat
|
|
17
|
+
});
|
|
18
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
19
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
20
|
+
const _error = require("../../helpers/error");
|
|
21
|
+
const _json = require("../../helpers/json");
|
|
22
|
+
const _liquid = require("../../helpers/liquid");
|
|
23
|
+
const _workflow = require("../workflow");
|
|
24
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
25
|
+
if (typeof WeakMap !== "function") return null;
|
|
26
|
+
var cacheBabelInterop = new WeakMap();
|
|
27
|
+
var cacheNodeInterop = new WeakMap();
|
|
28
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
29
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
30
|
+
})(nodeInterop);
|
|
31
|
+
}
|
|
32
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
33
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
34
|
+
return obj;
|
|
35
|
+
}
|
|
36
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
37
|
+
return {
|
|
38
|
+
default: obj
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
42
|
+
if (cache && cache.has(obj)) {
|
|
43
|
+
return cache.get(obj);
|
|
44
|
+
}
|
|
45
|
+
var newObj = {};
|
|
46
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
47
|
+
for(var key in obj){
|
|
48
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
49
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
50
|
+
if (desc && (desc.get || desc.set)) {
|
|
51
|
+
Object.defineProperty(newObj, key, desc);
|
|
52
|
+
} else {
|
|
53
|
+
newObj[key] = obj[key];
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
newObj.default = obj;
|
|
58
|
+
if (cache) {
|
|
59
|
+
cache.set(obj, newObj);
|
|
60
|
+
}
|
|
61
|
+
return newObj;
|
|
62
|
+
}
|
|
63
|
+
const FILEPATH_MARKER = "@";
|
|
64
|
+
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
65
|
+
// The following files are exepected to have valid json content, and should be
|
|
66
|
+
// decoded and joined into the main JSON file.
|
|
67
|
+
const DECODABLE_JSON_FILES = new Set([
|
|
68
|
+
_workflow.VISUAL_BLOCKS_JSON
|
|
69
|
+
]);
|
|
70
|
+
const readExtractedFileSync = (relpath, dirCtx, objPathToFieldStr = "")=>{
|
|
71
|
+
// Check if the file actually exists at the given file path.
|
|
72
|
+
const abspath = _nodePath.resolve(dirCtx.abspath, relpath);
|
|
73
|
+
const exists = _fsExtra.pathExistsSync(abspath);
|
|
74
|
+
if (!exists) {
|
|
75
|
+
const error = new _error.JsonDataError("must be a relative path string to a file that exists", objPathToFieldStr);
|
|
76
|
+
return [
|
|
77
|
+
undefined,
|
|
78
|
+
error
|
|
79
|
+
];
|
|
80
|
+
}
|
|
81
|
+
// Read the file and check for valid liquid syntax given it is supported
|
|
82
|
+
// across all message templates and file extensions.
|
|
83
|
+
const contentStr = _fsExtra.readFileSync(abspath, "utf8");
|
|
84
|
+
const liquidParseError = (0, _liquid.validateLiquidSyntax)(contentStr);
|
|
85
|
+
if (liquidParseError) {
|
|
86
|
+
const error = new _error.JsonDataError(`points to a file that contains invalid liquid syntax (${relpath})\n\n` + (0, _error.formatErrors)([
|
|
87
|
+
liquidParseError
|
|
88
|
+
], {
|
|
89
|
+
indentBy: 2
|
|
90
|
+
}), objPathToFieldStr);
|
|
91
|
+
return [
|
|
92
|
+
undefined,
|
|
93
|
+
error
|
|
94
|
+
];
|
|
95
|
+
}
|
|
96
|
+
// If the file is expected to contain decodable json, then parse the contentStr
|
|
97
|
+
// as such.
|
|
98
|
+
const fileName = _nodePath.basename(abspath.toLowerCase());
|
|
99
|
+
const decodable = DECODABLE_JSON_FILES.has(fileName);
|
|
100
|
+
const [content, jsonParseErrors] = decodable ? (0, _json.parseJson)(contentStr) : [
|
|
101
|
+
contentStr,
|
|
102
|
+
[]
|
|
103
|
+
];
|
|
104
|
+
if (jsonParseErrors.length > 0) {
|
|
105
|
+
const error = new _error.JsonDataError(`points to a file with invalid content (${relpath})\n\n` + (0, _error.formatErrors)(jsonParseErrors, {
|
|
106
|
+
indentBy: 2
|
|
107
|
+
}), objPathToFieldStr);
|
|
108
|
+
return [
|
|
109
|
+
undefined,
|
|
110
|
+
error
|
|
111
|
+
];
|
|
112
|
+
}
|
|
113
|
+
return [
|
|
114
|
+
content,
|
|
115
|
+
undefined
|
|
116
|
+
];
|
|
117
|
+
};
|
|
118
|
+
const validateExtractedFilePath = (val, sourceFileAbspath, uniqueFilePaths, objPathToFieldStr)=>{
|
|
119
|
+
// Validate the file path format, and that it is unique per entity.
|
|
120
|
+
if (!checkIfValidExtractedFilePathFormat(val, sourceFileAbspath) || typeof val !== "string" || val in uniqueFilePaths) {
|
|
121
|
+
const error = new _error.JsonDataError("must be a relative path string to a unique file within the directory", objPathToFieldStr);
|
|
122
|
+
return error;
|
|
123
|
+
}
|
|
124
|
+
// Keep track of all the valid extracted file paths that have been seen, so
|
|
125
|
+
// we can validate each file path's uniqueness as we traverse.
|
|
126
|
+
uniqueFilePaths[val] = true;
|
|
127
|
+
return undefined;
|
|
128
|
+
};
|
|
129
|
+
const checkIfValidExtractedFilePathFormat = (relpath, sourceFileAbspath)=>{
|
|
130
|
+
if (typeof relpath !== "string") return false;
|
|
131
|
+
if (_nodePath.isAbsolute(relpath)) return false;
|
|
132
|
+
const extractedFileAbspath = _nodePath.resolve(sourceFileAbspath, relpath);
|
|
133
|
+
const pathDiff = _nodePath.relative(sourceFileAbspath, extractedFileAbspath);
|
|
134
|
+
return !pathDiff.startsWith("..");
|
|
135
|
+
};
|
|
@@ -15,7 +15,8 @@ _export(exports, {
|
|
|
15
15
|
});
|
|
16
16
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
17
17
|
const _lodash = require("lodash");
|
|
18
|
-
const _helpers = require("
|
|
18
|
+
const _helpers = require("../shared/helpers");
|
|
19
|
+
const _helpers1 = require("./helpers");
|
|
19
20
|
const _types = require("./types");
|
|
20
21
|
const _writer = require("./writer");
|
|
21
22
|
function _getRequireWildcardCache(nodeInterop) {
|
|
@@ -263,7 +264,7 @@ const scaffoldWorkflowDirBundle = (attrs)=>{
|
|
|
263
264
|
steps: scaffoldedSteps
|
|
264
265
|
};
|
|
265
266
|
return (0, _lodash.assign)({
|
|
266
|
-
[
|
|
267
|
+
[_helpers1.WORKFLOW_JSON]: workflowJson
|
|
267
268
|
}, ...bundleFragments);
|
|
268
269
|
};
|
|
269
270
|
const generateWorkflowDir = async (workflowDirCtx, attrs)=>{
|
|
@@ -12,15 +12,14 @@ _export(exports, {
|
|
|
12
12
|
WORKFLOW_JSON: ()=>WORKFLOW_JSON,
|
|
13
13
|
VISUAL_BLOCKS_JSON: ()=>VISUAL_BLOCKS_JSON,
|
|
14
14
|
workflowJsonPath: ()=>workflowJsonPath,
|
|
15
|
-
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
16
|
-
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE,
|
|
17
15
|
validateWorkflowKey: ()=>validateWorkflowKey,
|
|
18
16
|
lsWorkflowJson: ()=>lsWorkflowJson,
|
|
19
17
|
isWorkflowDir: ()=>isWorkflowDir,
|
|
20
18
|
formatCategories: ()=>formatCategories,
|
|
21
19
|
formatStepSummary: ()=>formatStepSummary,
|
|
22
20
|
formatStatus: ()=>formatStatus,
|
|
23
|
-
ensureValidCommandTarget: ()=>ensureValidCommandTarget
|
|
21
|
+
ensureValidCommandTarget: ()=>ensureValidCommandTarget,
|
|
22
|
+
countSteps: ()=>countSteps
|
|
24
23
|
});
|
|
25
24
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
26
25
|
const _core = require("@oclif/core");
|
|
@@ -70,8 +69,6 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
70
69
|
const WORKFLOW_JSON = "workflow.json";
|
|
71
70
|
const VISUAL_BLOCKS_JSON = "visual_blocks.json";
|
|
72
71
|
const workflowJsonPath = (workflowDirCtx)=>_nodePath.resolve(workflowDirCtx.abspath, WORKFLOW_JSON);
|
|
73
|
-
const FILEPATH_MARKER = "@";
|
|
74
|
-
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
75
72
|
const validateWorkflowKey = (input)=>{
|
|
76
73
|
if (!(0, _string.checkSlugifiedFormat)(input, {
|
|
77
74
|
onlyLowerCase: true
|
|
@@ -126,6 +123,17 @@ const throttleStepSummaryLines = (step)=>{
|
|
|
126
123
|
`Throttle limit: ${throttle_limit}`
|
|
127
124
|
];
|
|
128
125
|
};
|
|
126
|
+
const branchStepSummaryLines = (step)=>{
|
|
127
|
+
if (step.type !== _types.StepType.Branch) return [];
|
|
128
|
+
let stepsCount = 0;
|
|
129
|
+
for (const branch of step.branches){
|
|
130
|
+
stepsCount += doCountSteps(branch.steps);
|
|
131
|
+
}
|
|
132
|
+
return [
|
|
133
|
+
`Branches: ${step.branches.length}`,
|
|
134
|
+
`Steps: ${stepsCount}`
|
|
135
|
+
];
|
|
136
|
+
};
|
|
129
137
|
const delayStepSummaryLines = (step)=>{
|
|
130
138
|
if (step.type !== _types.StepType.Delay) return [];
|
|
131
139
|
const { delay_for: duration , delay_until_field_path: field_path } = step.settings;
|
|
@@ -157,6 +165,7 @@ const formatStepSummary = (step)=>{
|
|
|
157
165
|
...batchStepSummaryLines(step),
|
|
158
166
|
...delayStepSummaryLines(step),
|
|
159
167
|
...httpFetchStepSummaryLines(step),
|
|
168
|
+
...branchStepSummaryLines(step),
|
|
160
169
|
...throttleStepSummaryLines(step),
|
|
161
170
|
// Extra line between step rows to make it easier on the eye.
|
|
162
171
|
" "
|
|
@@ -224,3 +233,16 @@ const ensureValidCommandTarget = async (props, runContext)=>{
|
|
|
224
233
|
}
|
|
225
234
|
return _core.ux.error("Missing 1 required arg:\nworkflowKey");
|
|
226
235
|
};
|
|
236
|
+
const doCountSteps = (steps)=>{
|
|
237
|
+
let count = 0;
|
|
238
|
+
for (const step of steps){
|
|
239
|
+
count += 1;
|
|
240
|
+
if (step.type === _types.StepType.Branch) {
|
|
241
|
+
for (const branch of step.branches){
|
|
242
|
+
count += doCountSteps(branch.steps);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
return count;
|
|
247
|
+
};
|
|
248
|
+
const countSteps = (workflow)=>doCountSteps(workflow.steps);
|