@knocklabs/cli 0.1.18 → 0.1.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +132 -24
- package/dist/commands/message-type/get.js +124 -0
- package/dist/commands/message-type/list.js +101 -0
- package/dist/commands/message-type/pull.js +218 -0
- package/dist/commands/message-type/push.js +171 -0
- package/dist/commands/message-type/validate.js +148 -0
- package/dist/commands/partial/get.js +124 -0
- package/dist/commands/partial/list.js +103 -0
- package/dist/commands/partial/pull.js +209 -0
- package/dist/commands/partial/push.js +169 -0
- package/dist/commands/partial/validate.js +146 -0
- package/dist/lib/api-v1.js +94 -0
- package/dist/lib/base-command.js +1 -4
- package/dist/lib/helpers/const.js +3 -3
- package/dist/lib/helpers/error.js +3 -6
- package/dist/lib/helpers/liquid.js +0 -2
- package/dist/lib/helpers/page.js +3 -3
- package/dist/lib/marshal/guide/index.js +19 -0
- package/dist/lib/marshal/guide/processor.isomorphic.js +85 -0
- package/dist/lib/marshal/guide/types.js +4 -0
- package/dist/lib/marshal/index.isomorphic.js +12 -8
- package/dist/lib/marshal/message-type/helpers.js +135 -0
- package/dist/lib/marshal/message-type/index.js +3 -0
- package/dist/lib/marshal/message-type/reader.js +198 -0
- package/dist/lib/marshal/message-type/writer.js +175 -0
- package/dist/lib/marshal/partial/helpers.js +134 -0
- package/dist/lib/marshal/partial/index.js +3 -0
- package/dist/lib/marshal/partial/reader.js +198 -0
- package/dist/lib/marshal/partial/types.js +3 -3
- package/dist/lib/marshal/partial/writer.js +175 -0
- package/dist/lib/marshal/workflow/types.js +3 -3
- package/dist/lib/run-context/loader.js +35 -31
- package/oclif.manifest.json +776 -1
- package/package.json +13 -13
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
prunePartialsIndexDir: function() {
|
|
13
|
+
return prunePartialsIndexDir;
|
|
14
|
+
},
|
|
15
|
+
writePartialDirFromData: function() {
|
|
16
|
+
return writePartialDirFromData;
|
|
17
|
+
},
|
|
18
|
+
writePartialsIndexDir: function() {
|
|
19
|
+
return writePartialsIndexDir;
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
|
|
23
|
+
const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
|
|
24
|
+
const _lodash = require("lodash");
|
|
25
|
+
const _const = require("../../helpers/const");
|
|
26
|
+
const _json = require("../../helpers/json");
|
|
27
|
+
const _helpers = require("./helpers");
|
|
28
|
+
const _processorisomorphic = require("./processor.isomorphic");
|
|
29
|
+
const _reader = require("./reader");
|
|
30
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
31
|
+
if (typeof WeakMap !== "function") return null;
|
|
32
|
+
var cacheBabelInterop = new WeakMap();
|
|
33
|
+
var cacheNodeInterop = new WeakMap();
|
|
34
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
35
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
36
|
+
})(nodeInterop);
|
|
37
|
+
}
|
|
38
|
+
function _interop_require_wildcard(obj, nodeInterop) {
|
|
39
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
40
|
+
return obj;
|
|
41
|
+
}
|
|
42
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
43
|
+
return {
|
|
44
|
+
default: obj
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
48
|
+
if (cache && cache.has(obj)) {
|
|
49
|
+
return cache.get(obj);
|
|
50
|
+
}
|
|
51
|
+
var newObj = {
|
|
52
|
+
__proto__: null
|
|
53
|
+
};
|
|
54
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
55
|
+
for(var key in obj){
|
|
56
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
57
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
58
|
+
if (desc && (desc.get || desc.set)) {
|
|
59
|
+
Object.defineProperty(newObj, key, desc);
|
|
60
|
+
} else {
|
|
61
|
+
newObj[key] = obj[key];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
newObj.default = obj;
|
|
66
|
+
if (cache) {
|
|
67
|
+
cache.set(obj, newObj);
|
|
68
|
+
}
|
|
69
|
+
return newObj;
|
|
70
|
+
}
|
|
71
|
+
const writePartialDirFromData = async (partialDirCtx, remotePartial)=>{
|
|
72
|
+
// If the partial directory exists on the file system (i.e. previously
|
|
73
|
+
// pulled before), then read the partial file to use as a reference.
|
|
74
|
+
const [localPartial] = partialDirCtx.exists ? await (0, _reader.readPartialDir)(partialDirCtx, {
|
|
75
|
+
withExtractedFiles: true
|
|
76
|
+
}) : [];
|
|
77
|
+
const bundle = (0, _processorisomorphic.buildPartialDirBundle)(remotePartial, localPartial);
|
|
78
|
+
return writePartialDirFromBundle(partialDirCtx, bundle);
|
|
79
|
+
};
|
|
80
|
+
/*
|
|
81
|
+
* A lower level write function that takes a constructed partial dir bundle
|
|
82
|
+
* and writes it into a partial directory on a local file system.
|
|
83
|
+
*
|
|
84
|
+
* It does not make any assumptions about how the partial directory bundle was
|
|
85
|
+
* built; for example, it can be from parsing the partial data fetched from
|
|
86
|
+
* the Knock API, or built manually for scaffolding purposes.
|
|
87
|
+
*/ const writePartialDirFromBundle = async (partialDirCtx, partialDirBundle)=>{
|
|
88
|
+
const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
89
|
+
try {
|
|
90
|
+
if (partialDirCtx.exists) {
|
|
91
|
+
await _fsextra.copy(partialDirCtx.abspath, backupDirPath);
|
|
92
|
+
await _fsextra.emptyDir(partialDirCtx.abspath);
|
|
93
|
+
}
|
|
94
|
+
const promises = Object.entries(partialDirBundle).map(([relpath, fileContent])=>{
|
|
95
|
+
const filePath = _nodepath.resolve(partialDirCtx.abspath, relpath);
|
|
96
|
+
return relpath === _processorisomorphic.PARTIAL_JSON ? _fsextra.outputJson(filePath, fileContent, {
|
|
97
|
+
spaces: _json.DOUBLE_SPACES
|
|
98
|
+
}) : _fsextra.outputFile(filePath, fileContent !== null && fileContent !== void 0 ? fileContent : "");
|
|
99
|
+
});
|
|
100
|
+
await Promise.all(promises);
|
|
101
|
+
} catch (error) {
|
|
102
|
+
// In case of any error, wipe the target directory that is likely in a bad
|
|
103
|
+
// state then restore the backup if one existed before.
|
|
104
|
+
if (partialDirCtx.exists) {
|
|
105
|
+
await _fsextra.emptyDir(partialDirCtx.abspath);
|
|
106
|
+
await _fsextra.copy(backupDirPath, partialDirCtx.abspath);
|
|
107
|
+
} else {
|
|
108
|
+
await _fsextra.remove(partialDirCtx.abspath);
|
|
109
|
+
}
|
|
110
|
+
throw error;
|
|
111
|
+
} finally{
|
|
112
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
113
|
+
await _fsextra.remove(backupDirPath);
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
/*
|
|
117
|
+
* Prunes the index directory by removing any files, or directories that aren't
|
|
118
|
+
* partial dirs found in fetched partials. We want to preserve any partial
|
|
119
|
+
* dirs that are going to be updated with remote partials, so extracted links
|
|
120
|
+
* can be respected.
|
|
121
|
+
*/ const prunePartialsIndexDir = async (indexDirCtx, remotePartials)=>{
|
|
122
|
+
const partialsByKey = Object.fromEntries(remotePartials.map((w)=>[
|
|
123
|
+
w.key.toLowerCase(),
|
|
124
|
+
w
|
|
125
|
+
]));
|
|
126
|
+
const dirents = await _fsextra.readdir(indexDirCtx.abspath, {
|
|
127
|
+
withFileTypes: true
|
|
128
|
+
});
|
|
129
|
+
const promises = dirents.map(async (dirent)=>{
|
|
130
|
+
const direntName = dirent.name.toLowerCase();
|
|
131
|
+
const direntPath = _nodepath.resolve(indexDirCtx.abspath, direntName);
|
|
132
|
+
if (await (0, _helpers.isPartialDir)(direntPath) && partialsByKey[direntName]) {
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
await _fsextra.remove(direntPath);
|
|
136
|
+
});
|
|
137
|
+
await Promise.all(promises);
|
|
138
|
+
};
|
|
139
|
+
const writePartialsIndexDir = async (indexDirCtx, remotePartials)=>{
|
|
140
|
+
const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
141
|
+
try {
|
|
142
|
+
// If the index directory already exists, back it up in the temp sandbox
|
|
143
|
+
// before wiping it clean.
|
|
144
|
+
if (indexDirCtx.exists) {
|
|
145
|
+
await _fsextra.copy(indexDirCtx.abspath, backupDirPath);
|
|
146
|
+
await prunePartialsIndexDir(indexDirCtx, remotePartials);
|
|
147
|
+
}
|
|
148
|
+
// Write given remote partials into the given partials directory path.
|
|
149
|
+
const writePartialDirPromises = remotePartials.map(async (partial)=>{
|
|
150
|
+
const partialDirPath = _nodepath.resolve(indexDirCtx.abspath, partial.key);
|
|
151
|
+
const partialDirCtx = {
|
|
152
|
+
type: "partial",
|
|
153
|
+
key: partial.key,
|
|
154
|
+
abspath: partialDirPath,
|
|
155
|
+
exists: indexDirCtx.exists ? await (0, _helpers.isPartialDir)(partialDirPath) : false
|
|
156
|
+
};
|
|
157
|
+
return writePartialDirFromData(partialDirCtx, partial);
|
|
158
|
+
});
|
|
159
|
+
await Promise.all(writePartialDirPromises);
|
|
160
|
+
} catch (error) {
|
|
161
|
+
console.log(error);
|
|
162
|
+
// In case of any error, wipe the index directory that is likely in a bad
|
|
163
|
+
// state then restore the backup if one existed before.
|
|
164
|
+
if (indexDirCtx.exists) {
|
|
165
|
+
await _fsextra.emptyDir(indexDirCtx.abspath);
|
|
166
|
+
await _fsextra.copy(backupDirPath, indexDirCtx.abspath);
|
|
167
|
+
} else {
|
|
168
|
+
await _fsextra.remove(indexDirCtx.abspath);
|
|
169
|
+
}
|
|
170
|
+
throw error;
|
|
171
|
+
} finally{
|
|
172
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
173
|
+
await _fsextra.remove(backupDirPath);
|
|
174
|
+
}
|
|
175
|
+
};
|
|
@@ -8,12 +8,12 @@ Object.defineProperty(exports, "StepType", {
|
|
|
8
8
|
return StepType;
|
|
9
9
|
}
|
|
10
10
|
});
|
|
11
|
-
var StepType
|
|
12
|
-
(function(StepType) {
|
|
11
|
+
var StepType = /*#__PURE__*/ function(StepType) {
|
|
13
12
|
StepType["Channel"] = "channel";
|
|
14
13
|
StepType["Batch"] = "batch";
|
|
15
14
|
StepType["Delay"] = "delay";
|
|
16
15
|
StepType["HttpFetch"] = "http_fetch";
|
|
17
16
|
StepType["Branch"] = "branch";
|
|
18
17
|
StepType["Throttle"] = "throttle";
|
|
19
|
-
|
|
18
|
+
return StepType;
|
|
19
|
+
}({});
|
|
@@ -14,6 +14,8 @@ Object.defineProperty(exports, "load", {
|
|
|
14
14
|
});
|
|
15
15
|
const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
|
|
16
16
|
const _emaillayout = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/email-layout"));
|
|
17
|
+
const _messagetype = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/message-type"));
|
|
18
|
+
const _partial = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/partial"));
|
|
17
19
|
const _translation = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/translation"));
|
|
18
20
|
const _workflow = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/workflow"));
|
|
19
21
|
function _getRequireWildcardCache(nodeInterop) {
|
|
@@ -57,38 +59,40 @@ function _interop_require_wildcard(obj, nodeInterop) {
|
|
|
57
59
|
}
|
|
58
60
|
return newObj;
|
|
59
61
|
}
|
|
62
|
+
const buildResourceDirContext = (type, currDir)=>{
|
|
63
|
+
return {
|
|
64
|
+
type,
|
|
65
|
+
key: _nodepath.basename(currDir),
|
|
66
|
+
abspath: currDir,
|
|
67
|
+
exists: true
|
|
68
|
+
};
|
|
69
|
+
};
|
|
60
70
|
const evaluateRecursively = async (ctx, currDir)=>{
|
|
61
|
-
// Check if we are inside a
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
ctx.resourceDir = {
|
|
87
|
-
type: "translation",
|
|
88
|
-
key: _nodepath.basename(currDir),
|
|
89
|
-
abspath: currDir,
|
|
90
|
-
exists: true
|
|
91
|
-
};
|
|
71
|
+
// Check if we are inside a resource directory and if so update the context.
|
|
72
|
+
if (!ctx.resourceDir) {
|
|
73
|
+
const isWorkflowDir = await _workflow.isWorkflowDir(currDir);
|
|
74
|
+
if (isWorkflowDir) {
|
|
75
|
+
ctx.resourceDir = buildResourceDirContext("workflow", currDir);
|
|
76
|
+
}
|
|
77
|
+
const isEmailLayoutDir = await _emaillayout.isEmailLayoutDir(currDir);
|
|
78
|
+
if (isEmailLayoutDir) {
|
|
79
|
+
ctx.resourceDir = buildResourceDirContext("email_layout", currDir);
|
|
80
|
+
}
|
|
81
|
+
const isPartialDir = await _partial.isPartialDir(currDir);
|
|
82
|
+
if (isPartialDir) {
|
|
83
|
+
ctx.resourceDir = buildResourceDirContext("partial", currDir);
|
|
84
|
+
}
|
|
85
|
+
const isMessageTypeDir = await _messagetype.isMessageTypeDir(currDir);
|
|
86
|
+
if (isMessageTypeDir) {
|
|
87
|
+
ctx.resourceDir = buildResourceDirContext("message_type", currDir);
|
|
88
|
+
}
|
|
89
|
+
// NOTE: Must keep this check as last in the order of directory-type checks
|
|
90
|
+
// since the `isTranslationDir` only checks that the directory name is a
|
|
91
|
+
// valid locale name.
|
|
92
|
+
const isTranslationDir = _translation.isTranslationDir(currDir);
|
|
93
|
+
if (isTranslationDir) {
|
|
94
|
+
ctx.resourceDir = buildResourceDirContext("translation", currDir);
|
|
95
|
+
}
|
|
92
96
|
}
|
|
93
97
|
// If we've identified the resource context, no need to go further.
|
|
94
98
|
// TODO: In the future, consider supporting a knock project config file which
|