@knocklabs/cli 0.1.7 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +23 -23
- package/dist/commands/commit/list.js +2 -2
- package/dist/commands/layout/list.js +2 -2
- package/dist/commands/layout/pull.js +3 -3
- package/dist/commands/layout/push.js +2 -2
- package/dist/commands/translation/list.js +2 -2
- package/dist/commands/translation/pull.js +2 -2
- package/dist/commands/workflow/list.js +2 -2
- package/dist/commands/workflow/new.js +2 -2
- package/dist/commands/workflow/pull.js +3 -3
- package/dist/commands/workflow/push.js +2 -2
- package/dist/lib/api-v1.js +19 -19
- package/dist/lib/helpers/{object.js → object.isomorphic.js} +8 -1
- package/dist/lib/marshal/email-layout/reader.js +7 -6
- package/dist/lib/marshal/email-layout/writer.js +12 -12
- package/dist/lib/marshal/shared/const.isomorphic.js +25 -0
- package/dist/lib/marshal/shared/helpers.js +0 -4
- package/dist/lib/marshal/workflow/generator.js +8 -8
- package/dist/lib/marshal/workflow/helpers.js +3 -6
- package/dist/lib/marshal/workflow/index.js +1 -0
- package/dist/lib/marshal/workflow/processor.isomorphic.js +272 -0
- package/dist/lib/marshal/workflow/reader.js +11 -9
- package/dist/lib/marshal/workflow/writer.js +7 -238
- package/oclif.manifest.json +1 -1
- package/package.json +1 -1
|
@@ -20,9 +20,9 @@ const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
|
20
20
|
const _lodash = require("lodash");
|
|
21
21
|
const _const = require("../../helpers/const");
|
|
22
22
|
const _json = require("../../helpers/json");
|
|
23
|
-
const
|
|
24
|
-
const
|
|
25
|
-
const
|
|
23
|
+
const _objectIsomorphic = require("../../helpers/object.isomorphic");
|
|
24
|
+
const _constIsomorphic = require("../shared/const.isomorphic");
|
|
25
|
+
const _helpers = require("./helpers");
|
|
26
26
|
const _reader = require("./reader");
|
|
27
27
|
function _interopRequireDefault(obj) {
|
|
28
28
|
return obj && obj.__esModule ? obj : {
|
|
@@ -96,13 +96,13 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
96
96
|
var _emailLayout___annotation;
|
|
97
97
|
// Move read only field under the dedicated field "__readonly".
|
|
98
98
|
const readonlyFields = ((_emailLayout___annotation = emailLayout.__annotation) === null || _emailLayout___annotation === void 0 ? void 0 : _emailLayout___annotation.readonly_fields) || [];
|
|
99
|
-
const [readonly, remainder] = (0,
|
|
99
|
+
const [readonly, remainder] = (0, _objectIsomorphic.split)(emailLayout, readonlyFields);
|
|
100
100
|
const emailLayoutjson = {
|
|
101
101
|
...remainder,
|
|
102
102
|
__readonly: readonly
|
|
103
103
|
};
|
|
104
104
|
// Strip out all schema annotations, so not to expose them to end users.
|
|
105
|
-
return (0,
|
|
105
|
+
return (0, _objectIsomorphic.omitDeep)(emailLayoutjson, [
|
|
106
106
|
"__annotation"
|
|
107
107
|
]);
|
|
108
108
|
};
|
|
@@ -122,7 +122,7 @@ const writeEmailLayoutDirFromData = async (emailLayoutDirCtx, remoteEmailLayout)
|
|
|
122
122
|
}
|
|
123
123
|
const promises = Object.entries(bundle).map(([relpath, fileContent])=>{
|
|
124
124
|
const filePath = _nodePath.default.resolve(emailLayoutDirCtx.abspath, relpath);
|
|
125
|
-
return relpath ===
|
|
125
|
+
return relpath === _helpers.LAYOUT_JSON ? _fsExtra.outputJson(filePath, fileContent, {
|
|
126
126
|
spaces: _json.DOUBLE_SPACES
|
|
127
127
|
}) : _fsExtra.outputFile(filePath, fileContent);
|
|
128
128
|
});
|
|
@@ -158,8 +158,8 @@ const writeEmailLayoutDirFromData = async (emailLayoutDirCtx, remoteEmailLayout)
|
|
|
158
158
|
if (!(0, _lodash.has)(mutRemoteEmailLayout, objPathParts)) continue;
|
|
159
159
|
// If the field at this path is extracted in the local layout, then
|
|
160
160
|
// always extract; otherwise extract based on the field settings default.
|
|
161
|
-
const objPathStr =
|
|
162
|
-
const extractedFilePath = (0, _lodash.get)(localEmailLayout, `${objPathStr}${
|
|
161
|
+
const objPathStr = _objectIsomorphic.ObjPath.stringify(objPathParts);
|
|
162
|
+
const extractedFilePath = (0, _lodash.get)(localEmailLayout, `${objPathStr}${_constIsomorphic.FILEPATH_MARKER}`);
|
|
163
163
|
const { default: extractByDefault , file_ext: fileExt } = extractionSettings;
|
|
164
164
|
if (!extractedFilePath && !extractByDefault) continue;
|
|
165
165
|
// By this point, we have a field where we need to extract its content.
|
|
@@ -175,13 +175,13 @@ const writeEmailLayoutDirFromData = async (emailLayoutDirCtx, remoteEmailLayout)
|
|
|
175
175
|
(0, _lodash.set)(bundle, [
|
|
176
176
|
relpath
|
|
177
177
|
], data);
|
|
178
|
-
(0, _lodash.set)(mutRemoteEmailLayout, `${objPathStr}${
|
|
178
|
+
(0, _lodash.set)(mutRemoteEmailLayout, `${objPathStr}${_constIsomorphic.FILEPATH_MARKER}`, relpath);
|
|
179
179
|
(0, _lodash.unset)(mutRemoteEmailLayout, objPathStr);
|
|
180
180
|
}
|
|
181
181
|
// At this point the bundle contains all extractable files, so we finally add the layout
|
|
182
182
|
// JSON realtive path + the file content.
|
|
183
183
|
return (0, _lodash.set)(bundle, [
|
|
184
|
-
|
|
184
|
+
_helpers.LAYOUT_JSON
|
|
185
185
|
], toEmailLayoutJson(mutRemoteEmailLayout));
|
|
186
186
|
};
|
|
187
187
|
const writeEmailLayoutIndexDir = async (indexDirCtx, remoteEmailLayouts)=>{
|
|
@@ -197,7 +197,7 @@ const writeEmailLayoutIndexDir = async (indexDirCtx, remoteEmailLayouts)=>{
|
|
|
197
197
|
type: "email_layout",
|
|
198
198
|
key: remoteEmailLayout.key,
|
|
199
199
|
abspath: emailLayoutDirPath,
|
|
200
|
-
exists: indexDirCtx.exists ? await (0,
|
|
200
|
+
exists: indexDirCtx.exists ? await (0, _helpers.isEmailLayoutDir)(emailLayoutDirPath) : false
|
|
201
201
|
};
|
|
202
202
|
return writeEmailLayoutDirFromData(emailLayoutDirCtx, remoteEmailLayout);
|
|
203
203
|
});
|
|
@@ -231,7 +231,7 @@ const writeEmailLayoutIndexDir = async (indexDirCtx, remoteEmailLayouts)=>{
|
|
|
231
231
|
const promises = dirents.map(async (dirent)=>{
|
|
232
232
|
const direntName = dirent.name.toLowerCase();
|
|
233
233
|
const direntPath = _nodePath.default.resolve(indexDirCtx.abspath, direntName);
|
|
234
|
-
if (await (0,
|
|
234
|
+
if (await (0, _helpers.isEmailLayoutDir)(direntPath) && emailLayoutsByKey[direntName]) {
|
|
235
235
|
return;
|
|
236
236
|
}
|
|
237
237
|
await _fsExtra.remove(direntPath);
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* IMPORTANT:
|
|
3
|
+
*
|
|
4
|
+
* This file is suffixed with `.isomorphic` because the code in this file is
|
|
5
|
+
* meant to run not just in a nodejs environment but also in a browser. For this
|
|
6
|
+
* reason there are some restrictions for which nodejs imports are allowed in
|
|
7
|
+
* this module. See `.eslintrc.json` for more details.
|
|
8
|
+
*/ // Mark any template fields we are extracting out with this suffix as a rule,
|
|
9
|
+
// so we can reliably interpret the field value.
|
|
10
|
+
"use strict";
|
|
11
|
+
Object.defineProperty(exports, "__esModule", {
|
|
12
|
+
value: true
|
|
13
|
+
});
|
|
14
|
+
function _export(target, all) {
|
|
15
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
16
|
+
enumerable: true,
|
|
17
|
+
get: all[name]
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
_export(exports, {
|
|
21
|
+
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
22
|
+
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE
|
|
23
|
+
});
|
|
24
|
+
const FILEPATH_MARKER = "@";
|
|
25
|
+
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
@@ -9,8 +9,6 @@ function _export(target, all) {
|
|
|
9
9
|
});
|
|
10
10
|
}
|
|
11
11
|
_export(exports, {
|
|
12
|
-
FILEPATH_MARKER: ()=>FILEPATH_MARKER,
|
|
13
|
-
FILEPATH_MARKED_RE: ()=>FILEPATH_MARKED_RE,
|
|
14
12
|
readExtractedFileSync: ()=>readExtractedFileSync,
|
|
15
13
|
validateExtractedFilePath: ()=>validateExtractedFilePath,
|
|
16
14
|
checkIfValidExtractedFilePathFormat: ()=>checkIfValidExtractedFilePathFormat
|
|
@@ -60,8 +58,6 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
60
58
|
}
|
|
61
59
|
return newObj;
|
|
62
60
|
}
|
|
63
|
-
const FILEPATH_MARKER = "@";
|
|
64
|
-
const FILEPATH_MARKED_RE = new RegExp(`${FILEPATH_MARKER}$`);
|
|
65
61
|
// The following files are exepected to have valid json content, and should be
|
|
66
62
|
// decoded and joined into the main JSON file.
|
|
67
63
|
const DECODABLE_JSON_FILES = new Set([
|
|
@@ -15,8 +15,8 @@ _export(exports, {
|
|
|
15
15
|
});
|
|
16
16
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
17
17
|
const _lodash = require("lodash");
|
|
18
|
-
const
|
|
19
|
-
const
|
|
18
|
+
const _constIsomorphic = require("../shared/const.isomorphic");
|
|
19
|
+
const _processorIsomorphic = require("./processor.isomorphic");
|
|
20
20
|
const _types = require("./types");
|
|
21
21
|
const _writer = require("./writer");
|
|
22
22
|
function _getRequireWildcardCache(nodeInterop) {
|
|
@@ -119,7 +119,7 @@ const scaffoldEmailChannelStep = (refSuffix)=>{
|
|
|
119
119
|
layout_key: "default"
|
|
120
120
|
},
|
|
121
121
|
subject: "You've got mail!",
|
|
122
|
-
["html_body" +
|
|
122
|
+
["html_body" + _constIsomorphic.FILEPATH_MARKER]: templateFilePath
|
|
123
123
|
}
|
|
124
124
|
};
|
|
125
125
|
const bundleFragment = {
|
|
@@ -139,7 +139,7 @@ const scaffoldInAppFeedChannelStep = (refSuffix)=>{
|
|
|
139
139
|
channel_key: "<IN-APP-FEED CHANNEL KEY>",
|
|
140
140
|
template: {
|
|
141
141
|
action_url: "{{ vars.app_url }}",
|
|
142
|
-
["markdown_body" +
|
|
142
|
+
["markdown_body" + _constIsomorphic.FILEPATH_MARKER]: templateFilePath
|
|
143
143
|
}
|
|
144
144
|
};
|
|
145
145
|
const bundleFragment = {
|
|
@@ -158,7 +158,7 @@ const scaffoldSmsChannelStep = (refSuffix)=>{
|
|
|
158
158
|
type: _types.StepType.Channel,
|
|
159
159
|
channel_key: "<SMS CHANNEL KEY>",
|
|
160
160
|
template: {
|
|
161
|
-
["text_body" +
|
|
161
|
+
["text_body" + _constIsomorphic.FILEPATH_MARKER]: templateFilePath
|
|
162
162
|
}
|
|
163
163
|
};
|
|
164
164
|
const bundleFragment = {
|
|
@@ -180,7 +180,7 @@ const scaffoldPushChannelStep = (refSuffix)=>{
|
|
|
180
180
|
settings: {
|
|
181
181
|
delivery_type: "content"
|
|
182
182
|
},
|
|
183
|
-
["text_body" +
|
|
183
|
+
["text_body" + _constIsomorphic.FILEPATH_MARKER]: templateFilePath
|
|
184
184
|
}
|
|
185
185
|
};
|
|
186
186
|
const bundleFragment = {
|
|
@@ -199,7 +199,7 @@ const scaffoldChatChannelStep = (refSuffix)=>{
|
|
|
199
199
|
type: _types.StepType.Channel,
|
|
200
200
|
channel_key: "<CHAT CHANNEL KEY>",
|
|
201
201
|
template: {
|
|
202
|
-
["markdown_body" +
|
|
202
|
+
["markdown_body" + _constIsomorphic.FILEPATH_MARKER]: templateFilePath
|
|
203
203
|
}
|
|
204
204
|
};
|
|
205
205
|
const bundleFragment = {
|
|
@@ -264,7 +264,7 @@ const scaffoldWorkflowDirBundle = (attrs)=>{
|
|
|
264
264
|
steps: scaffoldedSteps
|
|
265
265
|
};
|
|
266
266
|
return (0, _lodash.assign)({
|
|
267
|
-
[
|
|
267
|
+
[_processorIsomorphic.WORKFLOW_JSON]: workflowJson
|
|
268
268
|
}, ...bundleFragments);
|
|
269
269
|
};
|
|
270
270
|
const generateWorkflowDir = async (workflowDirCtx, attrs)=>{
|
|
@@ -9,8 +9,6 @@ function _export(target, all) {
|
|
|
9
9
|
});
|
|
10
10
|
}
|
|
11
11
|
_export(exports, {
|
|
12
|
-
WORKFLOW_JSON: ()=>WORKFLOW_JSON,
|
|
13
|
-
VISUAL_BLOCKS_JSON: ()=>VISUAL_BLOCKS_JSON,
|
|
14
12
|
workflowJsonPath: ()=>workflowJsonPath,
|
|
15
13
|
validateWorkflowKey: ()=>validateWorkflowKey,
|
|
16
14
|
lsWorkflowJson: ()=>lsWorkflowJson,
|
|
@@ -26,6 +24,7 @@ const _core = require("@oclif/core");
|
|
|
26
24
|
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
27
25
|
const _lodash = require("lodash");
|
|
28
26
|
const _string = require("../../helpers/string");
|
|
27
|
+
const _processorIsomorphic = require("./processor.isomorphic");
|
|
29
28
|
const _types = require("./types");
|
|
30
29
|
function _getRequireWildcardCache(nodeInterop) {
|
|
31
30
|
if (typeof WeakMap !== "function") return null;
|
|
@@ -66,9 +65,7 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
66
65
|
}
|
|
67
66
|
return newObj;
|
|
68
67
|
}
|
|
69
|
-
const
|
|
70
|
-
const VISUAL_BLOCKS_JSON = "visual_blocks.json";
|
|
71
|
-
const workflowJsonPath = (workflowDirCtx)=>_nodePath.resolve(workflowDirCtx.abspath, WORKFLOW_JSON);
|
|
68
|
+
const workflowJsonPath = (workflowDirCtx)=>_nodePath.resolve(workflowDirCtx.abspath, _processorIsomorphic.WORKFLOW_JSON);
|
|
72
69
|
const validateWorkflowKey = (input)=>{
|
|
73
70
|
if (!(0, _string.checkSlugifiedFormat)(input, {
|
|
74
71
|
onlyLowerCase: true
|
|
@@ -78,7 +75,7 @@ const validateWorkflowKey = (input)=>{
|
|
|
78
75
|
return undefined;
|
|
79
76
|
};
|
|
80
77
|
const lsWorkflowJson = async (dirPath)=>{
|
|
81
|
-
const workflowJsonPath = _nodePath.resolve(dirPath, WORKFLOW_JSON);
|
|
78
|
+
const workflowJsonPath = _nodePath.resolve(dirPath, _processorIsomorphic.WORKFLOW_JSON);
|
|
82
79
|
const exists = await _fsExtra.pathExists(workflowJsonPath);
|
|
83
80
|
return exists ? workflowJsonPath : undefined;
|
|
84
81
|
};
|
|
@@ -4,6 +4,7 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
});
|
|
5
5
|
_exportStar(require("./generator"), exports);
|
|
6
6
|
_exportStar(require("./helpers"), exports);
|
|
7
|
+
_exportStar(require("./processor.isomorphic"), exports);
|
|
7
8
|
_exportStar(require("./reader"), exports);
|
|
8
9
|
_exportStar(require("./types"), exports);
|
|
9
10
|
_exportStar(require("./writer"), exports);
|
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* IMPORTANT:
|
|
3
|
+
*
|
|
4
|
+
* This file is suffixed with `.isomorphic` because the code in this file is
|
|
5
|
+
* meant to run not just in a nodejs environment but also in a browser. For this
|
|
6
|
+
* reason there are some restrictions for which nodejs imports are allowed in
|
|
7
|
+
* this module. See `.eslintrc.json` for more details.
|
|
8
|
+
*/ "use strict";
|
|
9
|
+
Object.defineProperty(exports, "__esModule", {
|
|
10
|
+
value: true
|
|
11
|
+
});
|
|
12
|
+
function _export(target, all) {
|
|
13
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
14
|
+
enumerable: true,
|
|
15
|
+
get: all[name]
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
_export(exports, {
|
|
19
|
+
WORKFLOW_JSON: ()=>WORKFLOW_JSON,
|
|
20
|
+
VISUAL_BLOCKS_JSON: ()=>VISUAL_BLOCKS_JSON,
|
|
21
|
+
buildWorkflowDirBundle: ()=>buildWorkflowDirBundle,
|
|
22
|
+
formatExtractedFilePath: ()=>formatExtractedFilePath,
|
|
23
|
+
toWorkflowJson: ()=>toWorkflowJson
|
|
24
|
+
});
|
|
25
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
26
|
+
const _lodash = require("lodash");
|
|
27
|
+
const _objectIsomorphic = require("../../helpers/object.isomorphic");
|
|
28
|
+
const _constIsomorphic = require("../shared/const.isomorphic");
|
|
29
|
+
const _types = require("./types");
|
|
30
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
31
|
+
if (typeof WeakMap !== "function") return null;
|
|
32
|
+
var cacheBabelInterop = new WeakMap();
|
|
33
|
+
var cacheNodeInterop = new WeakMap();
|
|
34
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
35
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
36
|
+
})(nodeInterop);
|
|
37
|
+
}
|
|
38
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
39
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
40
|
+
return obj;
|
|
41
|
+
}
|
|
42
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
43
|
+
return {
|
|
44
|
+
default: obj
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
48
|
+
if (cache && cache.has(obj)) {
|
|
49
|
+
return cache.get(obj);
|
|
50
|
+
}
|
|
51
|
+
var newObj = {};
|
|
52
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
53
|
+
for(var key in obj){
|
|
54
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
55
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
56
|
+
if (desc && (desc.get || desc.set)) {
|
|
57
|
+
Object.defineProperty(newObj, key, desc);
|
|
58
|
+
} else {
|
|
59
|
+
newObj[key] = obj[key];
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
newObj.default = obj;
|
|
64
|
+
if (cache) {
|
|
65
|
+
cache.set(obj, newObj);
|
|
66
|
+
}
|
|
67
|
+
return newObj;
|
|
68
|
+
}
|
|
69
|
+
const WORKFLOW_JSON = "workflow.json";
|
|
70
|
+
const VISUAL_BLOCKS_JSON = "visual_blocks.json";
|
|
71
|
+
/*
|
|
72
|
+
* Sanitize the workflow content into a format that's appropriate for reading
|
|
73
|
+
* and writing, by stripping out any annotation fields and handling readonly
|
|
74
|
+
* fields.
|
|
75
|
+
*/ const toWorkflowJson = (workflow)=>{
|
|
76
|
+
var _workflow___annotation;
|
|
77
|
+
// Move read only fields of a workflow under the dedicated field "__readonly".
|
|
78
|
+
const readonlyFields = ((_workflow___annotation = workflow.__annotation) === null || _workflow___annotation === void 0 ? void 0 : _workflow___annotation.readonly_fields) || [];
|
|
79
|
+
const [readonly, remainder] = (0, _objectIsomorphic.split)(workflow, readonlyFields);
|
|
80
|
+
const worklfowJson = {
|
|
81
|
+
...remainder,
|
|
82
|
+
__readonly: readonly
|
|
83
|
+
};
|
|
84
|
+
// Strip out all schema annotations, so not to expose them to end users.
|
|
85
|
+
return (0, _objectIsomorphic.omitDeep)(worklfowJson, [
|
|
86
|
+
"__annotation"
|
|
87
|
+
]);
|
|
88
|
+
};
|
|
89
|
+
const formatExtractedFilePath = (objPathParts, fileExt, opts = {})=>{
|
|
90
|
+
const { unnestDirsBy =0 , nestIntoDirs =[] } = opts;
|
|
91
|
+
// 1. Unnest the obj path parts by the given depths, if the option is given.
|
|
92
|
+
const maxUnnestableDepth = Math.min(Math.max(objPathParts.length - 1, 0), unnestDirsBy);
|
|
93
|
+
const unnestedObjPathParts = objPathParts.slice(maxUnnestableDepth, objPathParts.length);
|
|
94
|
+
// 2. Build the file path parts based on the object path parts.
|
|
95
|
+
const filePathParts = [];
|
|
96
|
+
let arrayIndexNums = [];
|
|
97
|
+
for (const part of unnestedObjPathParts){
|
|
98
|
+
if (typeof part === "string" && arrayIndexNums.length > 0) {
|
|
99
|
+
filePathParts.push([
|
|
100
|
+
...arrayIndexNums,
|
|
101
|
+
part
|
|
102
|
+
].join("."));
|
|
103
|
+
arrayIndexNums = [];
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
if (typeof part === "string") {
|
|
107
|
+
filePathParts.push(part);
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
if (typeof part === "number") {
|
|
111
|
+
arrayIndexNums.push(part + 1);
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
if (arrayIndexNums.length > 0) {
|
|
116
|
+
filePathParts.push(arrayIndexNums.join("."));
|
|
117
|
+
}
|
|
118
|
+
// 3. Format the final file path out based on the file path parts. Nest it
|
|
119
|
+
// under the directories if the option is given.
|
|
120
|
+
const fileName = filePathParts.pop();
|
|
121
|
+
const paths = [
|
|
122
|
+
...nestIntoDirs,
|
|
123
|
+
...filePathParts,
|
|
124
|
+
`${fileName}.${fileExt}`
|
|
125
|
+
];
|
|
126
|
+
return _nodePath.join(...paths).toLowerCase();
|
|
127
|
+
};
|
|
128
|
+
const NON_RECURSIVELY_TRAVERSABLE_FIELDS_FOR_EXTRACTION = new Set([
|
|
129
|
+
"branches"
|
|
130
|
+
]);
|
|
131
|
+
const compileExtractionSettings = (node, objPathParts = [])=>{
|
|
132
|
+
const map = new Map();
|
|
133
|
+
const compileRecursively = (item, parts)=>{
|
|
134
|
+
if ((0, _lodash.isPlainObject)(item)) {
|
|
135
|
+
const extractableFields = (0, _lodash.get)(item, [
|
|
136
|
+
"__annotation",
|
|
137
|
+
"extractable_fields"
|
|
138
|
+
], {});
|
|
139
|
+
for (const [key, val] of Object.entries(item)){
|
|
140
|
+
// If the field we are on is extractable, then add its extraction
|
|
141
|
+
// settings to the map with the current object path.
|
|
142
|
+
if (key in extractableFields) {
|
|
143
|
+
map.set([
|
|
144
|
+
...parts,
|
|
145
|
+
key
|
|
146
|
+
], extractableFields[key]);
|
|
147
|
+
}
|
|
148
|
+
// Recursively exam current field for any additionally extractable data
|
|
149
|
+
// within, except for disallowed fields
|
|
150
|
+
if (!NON_RECURSIVELY_TRAVERSABLE_FIELDS_FOR_EXTRACTION.has(key)) {
|
|
151
|
+
compileRecursively(val, [
|
|
152
|
+
...parts,
|
|
153
|
+
key
|
|
154
|
+
]);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
return;
|
|
158
|
+
}
|
|
159
|
+
if (Array.isArray(item)) {
|
|
160
|
+
item.map((val, idx)=>compileRecursively(val, [
|
|
161
|
+
...parts,
|
|
162
|
+
idx
|
|
163
|
+
]));
|
|
164
|
+
}
|
|
165
|
+
};
|
|
166
|
+
// Walk the node tree and compile all extractable fields by object path.
|
|
167
|
+
compileRecursively(node, objPathParts);
|
|
168
|
+
// Sort the compiled entries in desc order by the object path length, so the
|
|
169
|
+
// deepest nested fields come first and the top most fields come last because
|
|
170
|
+
// this is the order we should be extracting and replacing field contents.
|
|
171
|
+
return new Map([
|
|
172
|
+
...map
|
|
173
|
+
].sort((a, b)=>{
|
|
174
|
+
const aLength = a[0].length;
|
|
175
|
+
const bLength = b[0].length;
|
|
176
|
+
if (aLength < bLength) return 1;
|
|
177
|
+
if (aLength > bLength) return -1;
|
|
178
|
+
return 0;
|
|
179
|
+
}));
|
|
180
|
+
};
|
|
181
|
+
const keyLocalWorkflowStepsByRef = (steps, result = {})=>{
|
|
182
|
+
if (!Array.isArray(steps)) return result;
|
|
183
|
+
for (const step of steps){
|
|
184
|
+
if (!(0, _lodash.isPlainObject)(step)) continue;
|
|
185
|
+
if (!step.ref) continue;
|
|
186
|
+
result[step.ref] = step;
|
|
187
|
+
if (step.type === _types.StepType.Branch && Array.isArray(step.branches)) {
|
|
188
|
+
for (const branch of step.branches){
|
|
189
|
+
if (!(0, _lodash.isPlainObject)(branch)) continue;
|
|
190
|
+
result = keyLocalWorkflowStepsByRef(branch.steps, result);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
return result;
|
|
195
|
+
};
|
|
196
|
+
const recursivelyBuildWorkflowDirBundle = (bundle, steps, localWorkflowStepsByRef)=>{
|
|
197
|
+
for (const step of steps){
|
|
198
|
+
// A compiled map of extraction settings of every field in the step where
|
|
199
|
+
// we support content extraction, organized by each field's object path.
|
|
200
|
+
const compiledExtractionSettings = compileExtractionSettings(step);
|
|
201
|
+
// Iterate through each extractable field, determine whether we need to
|
|
202
|
+
// extract the field content in the remote workflow, and if so, perform the
|
|
203
|
+
// extraction. Note, this compiled map is ordered by the deepest nested to
|
|
204
|
+
// the top most fields, so that more than one extraction is possible.
|
|
205
|
+
for (const [objPathParts, extractionSettings] of compiledExtractionSettings){
|
|
206
|
+
// If this step doesn't have this object path, then it's not relevant so
|
|
207
|
+
// nothing more to do here.
|
|
208
|
+
if (!(0, _lodash.has)(step, objPathParts)) continue;
|
|
209
|
+
// If the field at this path is extracted in the local workflow, then
|
|
210
|
+
// always extract; otherwise extract based on the field settings default.
|
|
211
|
+
const objPathStr = _objectIsomorphic.ObjPath.stringify(objPathParts);
|
|
212
|
+
const extractedFilePath = (0, _lodash.get)(localWorkflowStepsByRef, `${step.ref}.${objPathStr}${_constIsomorphic.FILEPATH_MARKER}`);
|
|
213
|
+
const { default: extractByDefault , file_ext: fileExt } = extractionSettings;
|
|
214
|
+
if (!extractedFilePath && !extractByDefault) continue;
|
|
215
|
+
// By this point, we have a field where we need to extract its content.
|
|
216
|
+
// First figure out the relative file path (within the workflow directory)
|
|
217
|
+
// for the extracted file. If already extracted in the local workflow,
|
|
218
|
+
// then use that; otherwise format a new file path.
|
|
219
|
+
const relpath = typeof extractedFilePath === "string" ? extractedFilePath : formatExtractedFilePath(objPathParts, fileExt, {
|
|
220
|
+
unnestDirsBy: 1,
|
|
221
|
+
nestIntoDirs: [
|
|
222
|
+
step.ref
|
|
223
|
+
]
|
|
224
|
+
});
|
|
225
|
+
// In case we are about to extract a field that has children rather than
|
|
226
|
+
// string content (e.g. visual blocks), prepare the data to strip out any
|
|
227
|
+
// annotations.
|
|
228
|
+
let data = (0, _objectIsomorphic.omitDeep)((0, _lodash.get)(step, objPathParts), [
|
|
229
|
+
"__annotation"
|
|
230
|
+
]);
|
|
231
|
+
// Also, if the extractable data contains extracted file paths in itself
|
|
232
|
+
// then rebase those file paths to be relative to its referenced file.
|
|
233
|
+
data = (0, _objectIsomorphic.mapValuesDeep)(data, (value, key)=>{
|
|
234
|
+
if (!_constIsomorphic.FILEPATH_MARKED_RE.test(key)) return value;
|
|
235
|
+
const rebaseRootDir = _nodePath.dirname(relpath);
|
|
236
|
+
const rebasedFilePath = _nodePath.relative(rebaseRootDir, value);
|
|
237
|
+
return rebasedFilePath;
|
|
238
|
+
});
|
|
239
|
+
const content = typeof data === "string" ? data : JSON.stringify(data, null, 2);
|
|
240
|
+
// Perform the extraction by adding the content and its file path to the
|
|
241
|
+
// bundle for writing to the file system later. Then replace the field
|
|
242
|
+
// content with the extracted file path and mark the field as extracted
|
|
243
|
+
// with @ suffix.
|
|
244
|
+
//
|
|
245
|
+
// TODO: Consider guarding against an edge case, and check if the relpath
|
|
246
|
+
// already exists in the bundle, and if so make the relpath unique.
|
|
247
|
+
(0, _lodash.set)(bundle, [
|
|
248
|
+
relpath
|
|
249
|
+
], content);
|
|
250
|
+
(0, _lodash.set)(step, `${objPathStr}${_constIsomorphic.FILEPATH_MARKER}`, relpath);
|
|
251
|
+
(0, _lodash.unset)(step, objPathParts);
|
|
252
|
+
}
|
|
253
|
+
// Lastly, recurse thru any branches that exist in the workflow tree
|
|
254
|
+
if (step.type === _types.StepType.Branch) {
|
|
255
|
+
for (const branch of step.branches){
|
|
256
|
+
recursivelyBuildWorkflowDirBundle(bundle, branch.steps, localWorkflowStepsByRef);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
};
|
|
261
|
+
const buildWorkflowDirBundle = (remoteWorkflow, localWorkflow = {})=>{
|
|
262
|
+
const bundle = {};
|
|
263
|
+
const mutWorkflow = (0, _lodash.cloneDeep)(remoteWorkflow);
|
|
264
|
+
const localWorkflowStepsByRef = keyLocalWorkflowStepsByRef(localWorkflow.steps);
|
|
265
|
+
// Recursively traverse the workflow step tree, mutating it and the bundle
|
|
266
|
+
// along the way
|
|
267
|
+
recursivelyBuildWorkflowDirBundle(bundle, mutWorkflow.steps, localWorkflowStepsByRef);
|
|
268
|
+
// Then, prepare the workflow data to be written into a workflow json file.
|
|
269
|
+
return (0, _lodash.set)(bundle, [
|
|
270
|
+
WORKFLOW_JSON
|
|
271
|
+
], toWorkflowJson(mutWorkflow));
|
|
272
|
+
};
|
|
@@ -18,9 +18,11 @@ const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
|
18
18
|
const _lodash = require("lodash");
|
|
19
19
|
const _error = require("../../helpers/error");
|
|
20
20
|
const _json = require("../../helpers/json");
|
|
21
|
-
const
|
|
21
|
+
const _objectIsomorphic = require("../../helpers/object.isomorphic");
|
|
22
|
+
const _constIsomorphic = require("../shared/const.isomorphic");
|
|
22
23
|
const _helpers = require("../shared/helpers");
|
|
23
24
|
const _helpers1 = require("./helpers");
|
|
25
|
+
const _processorIsomorphic = require("./processor.isomorphic");
|
|
24
26
|
function _getRequireWildcardCache(nodeInterop) {
|
|
25
27
|
if (typeof WeakMap !== "function") return null;
|
|
26
28
|
var cacheBabelInterop = new WeakMap();
|
|
@@ -80,21 +82,21 @@ const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
|
|
|
80
82
|
}).entries()){
|
|
81
83
|
const currJoinedFilePaths = {};
|
|
82
84
|
const prevJoinedFilePaths = joinedFilePathsPerLevel[idx - 1] || {};
|
|
83
|
-
(0,
|
|
85
|
+
(0, _objectIsomorphic.mapValuesDeep)(workflowJson, (value, key, parts)=>{
|
|
84
86
|
// If not marked with the @ suffix, there's nothing to do.
|
|
85
|
-
if (!
|
|
86
|
-
const objPathToFieldStr =
|
|
87
|
-
const inlinObjPathStr = objPathToFieldStr.replace(
|
|
87
|
+
if (!_constIsomorphic.FILEPATH_MARKED_RE.test(key)) return;
|
|
88
|
+
const objPathToFieldStr = _objectIsomorphic.ObjPath.stringify(parts);
|
|
89
|
+
const inlinObjPathStr = objPathToFieldStr.replace(_constIsomorphic.FILEPATH_MARKED_RE, "");
|
|
88
90
|
// If there is inlined content present already, then nothing more to do.
|
|
89
91
|
if ((0, _lodash.hasIn)(workflowJson, inlinObjPathStr)) return;
|
|
90
92
|
// Check if the extracted path found at the current field path belongs to
|
|
91
93
|
// a node whose parent or grandparent has been previously joined earlier
|
|
92
94
|
// in the tree. If so, rebase the extracted path to be a relative path to
|
|
93
95
|
// the workflow json.
|
|
94
|
-
const lastFound = (0,
|
|
96
|
+
const lastFound = (0, _objectIsomorphic.getLastFound)(prevJoinedFilePaths, parts);
|
|
95
97
|
const prevJoinedFilePath = typeof lastFound === "string" ? lastFound : undefined;
|
|
96
98
|
const rebasedFilePath = prevJoinedFilePath ? _nodePath.join(_nodePath.dirname(prevJoinedFilePath), value) : value;
|
|
97
|
-
const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(rebasedFilePath, _nodePath.resolve(workflowDirCtx.abspath,
|
|
99
|
+
const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(rebasedFilePath, _nodePath.resolve(workflowDirCtx.abspath, _processorIsomorphic.WORKFLOW_JSON), uniqueFilePaths, objPathToFieldStr);
|
|
98
100
|
if (invalidFilePathError) {
|
|
99
101
|
errors.push(invalidFilePathError);
|
|
100
102
|
// Wipe the invalid file path in the node so the final workflow json
|
|
@@ -143,7 +145,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
|
|
|
143
145
|
const result = await (0, _json.readJson)(workflowJsonPath);
|
|
144
146
|
if (!result[0]) return result;
|
|
145
147
|
let [workflowJson] = result;
|
|
146
|
-
workflowJson = withReadonlyField ? workflowJson : (0,
|
|
148
|
+
workflowJson = withReadonlyField ? workflowJson : (0, _objectIsomorphic.omitDeep)(workflowJson, [
|
|
147
149
|
"__readonly"
|
|
148
150
|
]);
|
|
149
151
|
return withExtractedFiles ? joinExtractedFiles(workflowDirCtx, workflowJson) : [
|
|
@@ -161,7 +163,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
|
|
|
161
163
|
// eslint-disable-next-line no-await-in-loop
|
|
162
164
|
const [workflow, readErrors] = await readWorkflowDir(workflowDirCtx, opts);
|
|
163
165
|
if (readErrors.length > 0) {
|
|
164
|
-
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath,
|
|
166
|
+
const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _processorIsomorphic.WORKFLOW_JSON);
|
|
165
167
|
const e = new _error.SourceError((0, _error.formatErrors)(readErrors), workflowJsonPath);
|
|
166
168
|
errors.push(e);
|
|
167
169
|
continue;
|