@knocklabs/cli 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,9 +10,7 @@ function _export(target, all) {
10
10
  }
11
11
  _export(exports, {
12
12
  readWorkflowDir: ()=>readWorkflowDir,
13
- readAllForCommandTarget: ()=>readAllForCommandTarget,
14
- checkIfValidExtractedFilePathFormat: ()=>checkIfValidExtractedFilePathFormat,
15
- readExtractedFileSync: ()=>readExtractedFileSync
13
+ readAllForCommandTarget: ()=>readAllForCommandTarget
16
14
  });
17
15
  const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
18
16
  const _core = require("@oclif/core");
@@ -20,9 +18,9 @@ const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
20
18
  const _lodash = require("lodash");
21
19
  const _error = require("../../helpers/error");
22
20
  const _json = require("../../helpers/json");
23
- const _liquid = require("../../helpers/liquid");
24
21
  const _object = require("../../helpers/object");
25
- const _helpers = require("./helpers");
22
+ const _helpers = require("../shared/helpers");
23
+ const _helpers1 = require("./helpers");
26
24
  function _getRequireWildcardCache(nodeInterop) {
27
25
  if (typeof WeakMap !== "function") return null;
28
26
  var cacheBabelInterop = new WeakMap();
@@ -65,102 +63,6 @@ function _interopRequireWildcard(obj, nodeInterop) {
65
63
  // For now we support up to two levels of content extraction in workflow.json.
66
64
  // (e.g. workflow.json, then visual_blocks.json)
67
65
  const MAX_EXTRACTION_LEVEL = 2;
68
- // The following files are exepected to have valid json content, and should be
69
- // decoded and joined into the main workflow.json.
70
- const DECODABLE_JSON_FILES = new Set([
71
- _helpers.VISUAL_BLOCKS_JSON
72
- ]);
73
- /*
74
- * Validate the file path format of an extracted field. The file path must be:
75
- *
76
- * 1) Expressed as a relative path.
77
- *
78
- * For exmaple:
79
- * subject@: "email_1/subject.html" // GOOD
80
- * subject@: "./email_1/subject.html" // GOOD
81
- * subject@: "/workflow-x/email_1/subject.html" // BAD
82
- *
83
- * 2) The resolved path must be contained inside the workflow directory
84
- *
85
- * For exmaple (workflow-y is a different workflow dir in this example):
86
- * subject@: "./email_1/subject.html" // GOOD
87
- * subject@: "../workflow-y/email_1/subject.html" // BAD
88
- *
89
- * Note: does not validate the presence of the file nor the uniqueness of the
90
- * file path.
91
- */ const checkIfValidExtractedFilePathFormat = (relpath, sourceFileAbspath)=>{
92
- if (typeof relpath !== "string") return false;
93
- if (_nodePath.isAbsolute(relpath)) return false;
94
- const extractedFileAbspath = _nodePath.resolve(sourceFileAbspath, relpath);
95
- const pathDiff = _nodePath.relative(sourceFileAbspath, extractedFileAbspath);
96
- return !pathDiff.startsWith("..");
97
- };
98
- /*
99
- * Validate the extracted file path based on its format and uniqueness (but not
100
- * the presence).
101
- *
102
- * Note, the uniqueness check is based on reading from and writing to
103
- * uniqueFilePaths, which is MUTATED in place.
104
- */ const validateExtractedFilePath = (val, workflowDirCtx, uniqueFilePaths, objPathToFieldStr)=>{
105
- const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _helpers.WORKFLOW_JSON);
106
- // Validate the file path format, and that it is unique per workflow.
107
- if (!checkIfValidExtractedFilePathFormat(val, workflowJsonPath) || typeof val !== "string" || val in uniqueFilePaths) {
108
- const error = new _error.JsonDataError("must be a relative path string to a unique file within the directory", objPathToFieldStr);
109
- return error;
110
- }
111
- // Keep track of all the valid extracted file paths that have been seen, so
112
- // we can validate each file path's uniqueness as we traverse.
113
- uniqueFilePaths[val] = true;
114
- return undefined;
115
- };
116
- const readExtractedFileSync = (relpath, workflowDirCtx, objPathToFieldStr = "")=>{
117
- // Check if the file actually exists at the given file path.
118
- const abspath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
119
- const exists = _fsExtra.pathExistsSync(abspath);
120
- if (!exists) {
121
- const error = new _error.JsonDataError("must be a relative path string to a file that exists", objPathToFieldStr);
122
- return [
123
- undefined,
124
- error
125
- ];
126
- }
127
- // Read the file and check for valid liquid syntax given it is supported
128
- // across all message templates and file extensions.
129
- const contentStr = _fsExtra.readFileSync(abspath, "utf8");
130
- const liquidParseError = (0, _liquid.validateLiquidSyntax)(contentStr);
131
- if (liquidParseError) {
132
- const error = new _error.JsonDataError(`points to a file that contains invalid liquid syntax (${relpath})\n\n` + (0, _error.formatErrors)([
133
- liquidParseError
134
- ], {
135
- indentBy: 2
136
- }), objPathToFieldStr);
137
- return [
138
- undefined,
139
- error
140
- ];
141
- }
142
- // If the file is expected to contain decodable json, then parse the contentStr
143
- // as such.
144
- const fileName = _nodePath.basename(abspath.toLowerCase());
145
- const decodable = DECODABLE_JSON_FILES.has(fileName);
146
- const [content, jsonParseErrors] = decodable ? (0, _json.parseJson)(contentStr) : [
147
- contentStr,
148
- []
149
- ];
150
- if (jsonParseErrors.length > 0) {
151
- const error = new _error.JsonDataError(`points to a file with invalid content (${relpath})\n\n` + (0, _error.formatErrors)(jsonParseErrors, {
152
- indentBy: 2
153
- }), objPathToFieldStr);
154
- return [
155
- undefined,
156
- error
157
- ];
158
- }
159
- return [
160
- content,
161
- undefined
162
- ];
163
- };
164
66
  const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
165
67
  // Tracks any errors encountered during traversal. Mutated in place.
166
68
  const errors = [];
@@ -192,7 +94,7 @@ const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
192
94
  const lastFound = (0, _object.getLastFound)(prevJoinedFilePaths, parts);
193
95
  const prevJoinedFilePath = typeof lastFound === "string" ? lastFound : undefined;
194
96
  const rebasedFilePath = prevJoinedFilePath ? _nodePath.join(_nodePath.dirname(prevJoinedFilePath), value) : value;
195
- const invalidFilePathError = validateExtractedFilePath(rebasedFilePath, workflowDirCtx, uniqueFilePaths, objPathToFieldStr);
97
+ const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(rebasedFilePath, _nodePath.resolve(workflowDirCtx.abspath, _helpers1.WORKFLOW_JSON), uniqueFilePaths, objPathToFieldStr);
196
98
  if (invalidFilePathError) {
197
99
  errors.push(invalidFilePathError);
198
100
  // Wipe the invalid file path in the node so the final workflow json
@@ -206,7 +108,7 @@ const joinExtractedFiles = async (workflowDirCtx, workflowJson)=>{
206
108
  }
207
109
  // By this point we have a valid extracted file path, so attempt to read
208
110
  // the file at the file path.
209
- const [content, readExtractedFileError] = readExtractedFileSync(rebasedFilePath, workflowDirCtx, objPathToFieldStr);
111
+ const [content, readExtractedFileError] = (0, _helpers.readExtractedFileSync)(rebasedFilePath, workflowDirCtx, objPathToFieldStr);
210
112
  if (readExtractedFileError) {
211
113
  errors.push(readExtractedFileError);
212
114
  // Replace the extracted file path with the rebased one, and set the
@@ -236,7 +138,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
236
138
  const { withExtractedFiles =false , withReadonlyField =false } = opts;
237
139
  const dirExists = await _fsExtra.pathExists(abspath);
238
140
  if (!dirExists) throw new Error(`${abspath} does not exist`);
239
- const workflowJsonPath = await (0, _helpers.lsWorkflowJson)(abspath);
141
+ const workflowJsonPath = await (0, _helpers1.lsWorkflowJson)(abspath);
240
142
  if (!workflowJsonPath) throw new Error(`${abspath} is not a workflow directory`);
241
143
  const result = await (0, _json.readJson)(workflowJsonPath);
242
144
  if (!result[0]) return result;
@@ -259,7 +161,7 @@ const readWorkflowDir = async (workflowDirCtx, opts = {})=>{
259
161
  // eslint-disable-next-line no-await-in-loop
260
162
  const [workflow, readErrors] = await readWorkflowDir(workflowDirCtx, opts);
261
163
  if (readErrors.length > 0) {
262
- const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _helpers.WORKFLOW_JSON);
164
+ const workflowJsonPath = _nodePath.resolve(workflowDirCtx.abspath, _helpers1.WORKFLOW_JSON);
263
165
  const e = new _error.SourceError((0, _error.formatErrors)(readErrors), workflowJsonPath);
264
166
  errors.push(e);
265
167
  continue;
@@ -298,7 +200,7 @@ const readAllForCommandTarget = async (target, opts = {})=>{
298
200
  type: "workflow",
299
201
  key: dirent.name,
300
202
  abspath,
301
- exists: await (0, _helpers.isWorkflowDir)(abspath)
203
+ exists: await (0, _helpers1.isWorkflowDir)(abspath)
302
204
  };
303
205
  return workflowDirCtx;
304
206
  });
@@ -12,5 +12,6 @@ var StepType;
12
12
  StepType["Batch"] = "batch";
13
13
  StepType["Delay"] = "delay";
14
14
  StepType["HttpFetch"] = "http_fetch";
15
+ StepType["Branch"] = "branch";
15
16
  StepType["Throttle"] = "throttle";
16
17
  })(StepType || (StepType = {}));
@@ -23,8 +23,10 @@ const _lodash = require("lodash");
23
23
  const _const = require("../../helpers/const");
24
24
  const _json = require("../../helpers/json");
25
25
  const _object = require("../../helpers/object");
26
- const _helpers = require("./helpers");
26
+ const _helpers = require("../shared/helpers");
27
+ const _helpers1 = require("./helpers");
27
28
  const _reader = require("./reader");
29
+ const _types = require("./types");
28
30
  function _getRequireWildcardCache(nodeInterop) {
29
31
  if (typeof WeakMap !== "function") return null;
30
32
  var cacheBabelInterop = new WeakMap();
@@ -121,6 +123,9 @@ const formatExtractedFilePath = (objPathParts, fileExt, opts = {})=>{
121
123
  ];
122
124
  return _nodePath.join(...paths).toLowerCase();
123
125
  };
126
+ const NON_RECURSIVELY_TRAVERSABLE_FIELDS_FOR_EXTRACTION = new Set([
127
+ "branches"
128
+ ]);
124
129
  const compileExtractionSettings = (node, objPathParts = [])=>{
125
130
  const map = new Map();
126
131
  const compileRecursively = (item, parts)=>{
@@ -138,10 +143,14 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
138
143
  key
139
144
  ], extractableFields[key]);
140
145
  }
141
- compileRecursively(val, [
142
- ...parts,
143
- key
144
- ]);
146
+ // Recursively exam current field for any additionally extractable data
147
+ // within, except for disallowed fields
148
+ if (!NON_RECURSIVELY_TRAVERSABLE_FIELDS_FOR_EXTRACTION.has(key)) {
149
+ compileRecursively(val, [
150
+ ...parts,
151
+ key
152
+ ]);
153
+ }
145
154
  }
146
155
  return;
147
156
  }
@@ -167,35 +176,23 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
167
176
  return 0;
168
177
  }));
169
178
  };
170
- /*
171
- * For a given workflow payload (and its local workflow reference), this function
172
- * builds a "workflow directory bundle", which is an obj made up of all the
173
- * relative file paths (within the workflow directory) and its file content to
174
- * write the workflow directory.
175
- *
176
- * Every workflow will always have a workflow.json file, so every bundle includes
177
- * it and its content at minimum. To the extent the workflow includes any
178
- * extractable fields, those fields content get extracted out and added to the
179
- * bundle.
180
- *
181
- * Important things to keep in mind re: content extraction:
182
- * 1. There can be multiple places in workflow json where content extraction
183
- * happens.
184
- * 2. There can be multiple levels of content extraction happening, currently
185
- * at a maximum of 2 levels.
186
- *
187
- * The way this function works and handles the content extraction is by:
188
- * 1. Traversing the given step node, and compiling all annotated extraction
189
- * settings by the object path in the node *ordered from leaf to root*.
190
- * 2. Iterate over compiled extraction settings from leaf to root, and start
191
- * extracting out the field as needed. In case the node that needs to be
192
- * extracted out contains extracted file paths, then those file paths get
193
- * rebased to relative to the referenced file.
194
- */ const buildWorkflowDirBundle = (remoteWorkflow, localWorkflow = {})=>{
195
- const bundle = {};
196
- const mutWorkflow = (0, _lodash.cloneDeep)(remoteWorkflow);
197
- const localWorkflowStepsByRef = (0, _lodash.keyBy)(localWorkflow.steps || [], "ref");
198
- for (const step of mutWorkflow.steps){
179
+ const keyLocalWorkflowStepsByRef = (steps, result = {})=>{
180
+ if (!Array.isArray(steps)) return result;
181
+ for (const step of steps){
182
+ if (!(0, _lodash.isPlainObject)(step)) continue;
183
+ if (!step.ref) continue;
184
+ result[step.ref] = step;
185
+ if (step.type === _types.StepType.Branch && Array.isArray(step.branches)) {
186
+ for (const branch of step.branches){
187
+ if (!(0, _lodash.isPlainObject)(branch)) continue;
188
+ result = keyLocalWorkflowStepsByRef(branch.steps, result);
189
+ }
190
+ }
191
+ }
192
+ return result;
193
+ };
194
+ const recursivelyBuildWorkflowDirBundle = (bundle, steps, localWorkflowStepsByRef)=>{
195
+ for (const step of steps){
199
196
  // A compiled map of extraction settings of every field in the step where
200
197
  // we support content extraction, organized by each field's object path.
201
198
  const compiledExtractionSettings = compileExtractionSettings(step);
@@ -217,7 +214,7 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
217
214
  // First figure out the relative file path (within the workflow directory)
218
215
  // for the extracted file. If already extracted in the local workflow,
219
216
  // then use that; otherwise format a new file path.
220
- const relpath = extractedFilePath || formatExtractedFilePath(objPathParts, fileExt, {
217
+ const relpath = typeof extractedFilePath === "string" ? extractedFilePath : formatExtractedFilePath(objPathParts, fileExt, {
221
218
  unnestDirsBy: 1,
222
219
  nestIntoDirs: [
223
220
  step.ref
@@ -242,6 +239,7 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
242
239
  // bundle for writing to the file system later. Then replace the field
243
240
  // content with the extracted file path and mark the field as extracted
244
241
  // with @ suffix.
242
+ //
245
243
  // TODO: Consider guarding against an edge case, and check if the relpath
246
244
  // already exists in the bundle, and if so make the relpath unique.
247
245
  (0, _lodash.set)(bundle, [
@@ -250,10 +248,48 @@ const compileExtractionSettings = (node, objPathParts = [])=>{
250
248
  (0, _lodash.set)(step, `${objPathStr}${_helpers.FILEPATH_MARKER}`, relpath);
251
249
  (0, _lodash.unset)(step, objPathParts);
252
250
  }
251
+ // Lastly, recurse thru any branches that exist in the workflow tree
252
+ if (step.type === _types.StepType.Branch) {
253
+ for (const branch of step.branches){
254
+ recursivelyBuildWorkflowDirBundle(bundle, branch.steps, localWorkflowStepsByRef);
255
+ }
256
+ }
253
257
  }
254
- // Finally, prepare the workflow data to be written into a workflow json file.
258
+ };
259
+ /*
260
+ * For a given workflow payload (and its local workflow reference), this function
261
+ * builds a "workflow directory bundle", which is an obj made up of all the
262
+ * relative file paths (within the workflow directory) and its file content to
263
+ * write the workflow directory.
264
+ *
265
+ * Every workflow will always have a workflow.json file, so every bundle includes
266
+ * it and its content at minimum. To the extent the workflow includes any
267
+ * extractable fields, those fields content get extracted out and added to the
268
+ * bundle.
269
+ *
270
+ * Important things to keep in mind re: content extraction:
271
+ * 1. There can be multiple places in workflow json where content extraction
272
+ * happens.
273
+ * 2. There can be multiple levels of content extraction happening, currently
274
+ * at a maximum of 2 levels.
275
+ *
276
+ * The way this function works and handles the content extraction is by:
277
+ * 1. Traversing the given step node, and compiling all annotated extraction
278
+ * settings by the object path in the node *ordered from leaf to root*.
279
+ * 2. Iterate over compiled extraction settings from leaf to root, and start
280
+ * extracting out the field as needed. In case the node that needs to be
281
+ * extracted out contains extracted file paths, then those file paths get
282
+ * rebased to relative to the referenced file.
283
+ */ const buildWorkflowDirBundle = (remoteWorkflow, localWorkflow = {})=>{
284
+ const bundle = {};
285
+ const mutWorkflow = (0, _lodash.cloneDeep)(remoteWorkflow);
286
+ const localWorkflowStepsByRef = keyLocalWorkflowStepsByRef(localWorkflow.steps);
287
+ // Recursively traverse the workflow step tree, mutating it and the bundle
288
+ // along the way
289
+ recursivelyBuildWorkflowDirBundle(bundle, mutWorkflow.steps, localWorkflowStepsByRef);
290
+ // Then, prepare the workflow data to be written into a workflow json file.
255
291
  return (0, _lodash.set)(bundle, [
256
- _helpers.WORKFLOW_JSON
292
+ _helpers1.WORKFLOW_JSON
257
293
  ], toWorkflowJson(mutWorkflow));
258
294
  };
259
295
  const writeWorkflowDirFromData = async (workflowDirCtx, remoteWorkflow)=>{
@@ -274,7 +310,7 @@ const writeWorkflowDirFromBundle = async (workflowDirCtx, workflowDirBundle)=>{
274
310
  }
275
311
  const promises = Object.entries(workflowDirBundle).map(([relpath, fileContent])=>{
276
312
  const filePath = _nodePath.resolve(workflowDirCtx.abspath, relpath);
277
- return relpath === _helpers.WORKFLOW_JSON ? _fsExtra.outputJson(filePath, fileContent, {
313
+ return relpath === _helpers1.WORKFLOW_JSON ? _fsExtra.outputJson(filePath, fileContent, {
278
314
  spaces: _json.DOUBLE_SPACES
279
315
  }) : _fsExtra.outputFile(filePath, fileContent);
280
316
  });
@@ -310,7 +346,7 @@ const writeWorkflowDirFromBundle = async (workflowDirCtx, workflowDirBundle)=>{
310
346
  const promises = dirents.map(async (dirent)=>{
311
347
  const direntName = dirent.name.toLowerCase();
312
348
  const direntPath = _nodePath.resolve(indexDirCtx.abspath, direntName);
313
- if (await (0, _helpers.isWorkflowDir)(direntPath) && workflowsByKey[direntName]) {
349
+ if (await (0, _helpers1.isWorkflowDir)(direntPath) && workflowsByKey[direntName]) {
314
350
  return;
315
351
  }
316
352
  await _fsExtra.remove(direntPath);
@@ -333,7 +369,7 @@ const writeWorkflowsIndexDir = async (indexDirCtx, remoteWorkflows)=>{
333
369
  type: "workflow",
334
370
  key: workflow.key,
335
371
  abspath: workflowDirPath,
336
- exists: indexDirCtx.exists ? await (0, _helpers.isWorkflowDir)(workflowDirPath) : false
372
+ exists: indexDirCtx.exists ? await (0, _helpers1.isWorkflowDir)(workflowDirPath) : false
337
373
  };
338
374
  return writeWorkflowDirFromData(workflowDirCtx, workflow);
339
375
  });
@@ -11,6 +11,7 @@ Object.defineProperty(exports, "load", {
11
11
  get: ()=>load
12
12
  });
13
13
  const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
14
+ const _emailLayout = /*#__PURE__*/ _interopRequireWildcard(require("../marshal/email-layout"));
14
15
  const _translation = /*#__PURE__*/ _interopRequireWildcard(require("../marshal/translation"));
15
16
  const _workflow = /*#__PURE__*/ _interopRequireWildcard(require("../marshal/workflow"));
16
17
  function _getRequireWildcardCache(nodeInterop) {
@@ -63,6 +64,16 @@ const evaluateRecursively = async (ctx, currDir)=>{
63
64
  exists: true
64
65
  };
65
66
  }
67
+ // Check if we are inside a layout directory, and if so update the context.
68
+ const isEmailLayoutDir = await _emailLayout.isEmailLayoutDir(currDir);
69
+ if (!ctx.resourceDir && isEmailLayoutDir) {
70
+ ctx.resourceDir = {
71
+ type: "email_layout",
72
+ key: _nodePath.basename(currDir),
73
+ abspath: currDir,
74
+ exists: true
75
+ };
76
+ }
66
77
  // NOTE: Must keep this check as last in the order of directory-type checks
67
78
  // since the `isTranslationDir` only checks that the directory name is a
68
79
  // valid locale name.