@knocklabs/cli 0.1.19 → 0.1.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -29
- package/dist/commands/message-type/get.js +124 -0
- package/dist/commands/message-type/list.js +101 -0
- package/dist/commands/message-type/pull.js +218 -0
- package/dist/commands/message-type/push.js +171 -0
- package/dist/commands/message-type/validate.js +148 -0
- package/dist/commands/partial/pull.js +2 -2
- package/dist/lib/api-v1.js +47 -0
- package/dist/lib/base-command.js +1 -4
- package/dist/lib/helpers/const.js +3 -3
- package/dist/lib/helpers/error.js +3 -6
- package/dist/lib/helpers/page.js +3 -3
- package/dist/lib/marshal/guide/index.js +19 -0
- package/dist/lib/marshal/guide/processor.isomorphic.js +85 -0
- package/dist/lib/marshal/guide/types.js +4 -0
- package/dist/lib/marshal/index.isomorphic.js +12 -8
- package/dist/lib/marshal/message-type/helpers.js +135 -0
- package/dist/lib/marshal/message-type/index.js +3 -0
- package/dist/lib/marshal/message-type/reader.js +198 -0
- package/dist/lib/marshal/message-type/writer.js +175 -0
- package/dist/lib/marshal/partial/reader.js +3 -3
- package/dist/lib/marshal/partial/types.js +3 -3
- package/dist/lib/marshal/workflow/types.js +3 -3
- package/dist/lib/run-context/loader.js +35 -31
- package/oclif.manifest.json +391 -1
- package/package.json +11 -11
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
_export_star(require("./processor.isomorphic"), exports);
|
|
6
|
+
_export_star(require("./types"), exports);
|
|
7
|
+
function _export_star(from, to) {
|
|
8
|
+
Object.keys(from).forEach(function(k) {
|
|
9
|
+
if (k !== "default" && !Object.prototype.hasOwnProperty.call(to, k)) {
|
|
10
|
+
Object.defineProperty(to, k, {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
get: function() {
|
|
13
|
+
return from[k];
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
return from;
|
|
19
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* IMPORTANT:
|
|
3
|
+
*
|
|
4
|
+
* This file is suffixed with `.isomorphic` because the code in this file is
|
|
5
|
+
* meant to run not just in a nodejs environment but also in a browser. For this
|
|
6
|
+
* reason there are some restrictions for which nodejs imports are allowed in
|
|
7
|
+
* this module. See `.eslintrc.json` for more details.
|
|
8
|
+
*/ "use strict";
|
|
9
|
+
Object.defineProperty(exports, "__esModule", {
|
|
10
|
+
value: true
|
|
11
|
+
});
|
|
12
|
+
function _export(target, all) {
|
|
13
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
14
|
+
enumerable: true,
|
|
15
|
+
get: all[name]
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
_export(exports, {
|
|
19
|
+
GUIDE_JSON: function() {
|
|
20
|
+
return GUIDE_JSON;
|
|
21
|
+
},
|
|
22
|
+
buildGuideDirBundle: function() {
|
|
23
|
+
return buildGuideDirBundle;
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
const _lodash = require("lodash");
|
|
27
|
+
const _objectisomorphic = require("../../helpers/object.isomorphic");
|
|
28
|
+
const _constisomorphic = require("../shared/const.isomorphic");
|
|
29
|
+
const _helpersisomorphic = require("../shared/helpers.isomorphic");
|
|
30
|
+
const GUIDE_JSON = "guide.json";
|
|
31
|
+
const compileExtractionSettings = (guide)=>{
|
|
32
|
+
const extractableFields = (0, _lodash.get)(guide, [
|
|
33
|
+
"__annotation",
|
|
34
|
+
"extractable_fields"
|
|
35
|
+
], {});
|
|
36
|
+
const map = new Map();
|
|
37
|
+
for (const [key] of Object.entries(guide)){
|
|
38
|
+
// If the field we are on is extractable, then add its extraction settings
|
|
39
|
+
// to the map with the current object path.
|
|
40
|
+
if (key in extractableFields) {
|
|
41
|
+
map.set([
|
|
42
|
+
key
|
|
43
|
+
], extractableFields[key]);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
return map;
|
|
47
|
+
};
|
|
48
|
+
const buildGuideDirBundle = (remoteGuide, localGuide = {})=>{
|
|
49
|
+
const bundle = {};
|
|
50
|
+
const mutRemoteGuide = (0, _lodash.cloneDeep)(remoteGuide);
|
|
51
|
+
// A map of extraction settings of every field in the guide.
|
|
52
|
+
const compiledExtractionSettings = compileExtractionSettings(mutRemoteGuide);
|
|
53
|
+
// Iterate through each extractable field, determine whether we need to
|
|
54
|
+
// extract the field content, and if so, perform the extraction.
|
|
55
|
+
for (const [objPathParts, extractionSettings] of compiledExtractionSettings){
|
|
56
|
+
// If this field path does not exist, then we don't extract.
|
|
57
|
+
if (!(0, _lodash.has)(mutRemoteGuide, objPathParts)) continue;
|
|
58
|
+
// If the field at this path is extracted in the local guide, then always
|
|
59
|
+
// extract; otherwise extract based on the field settings default.
|
|
60
|
+
const objPathStr = _objectisomorphic.ObjPath.stringify(objPathParts);
|
|
61
|
+
const extractedFilePath = (0, _lodash.get)(localGuide, `${objPathStr}${_constisomorphic.FILEPATH_MARKER}`);
|
|
62
|
+
const { default: extractByDefault, file_ext: fileExt } = extractionSettings;
|
|
63
|
+
if (!extractedFilePath && !extractByDefault) continue;
|
|
64
|
+
// By this point, we have a field where we need to extract its content.
|
|
65
|
+
const data = (0, _lodash.get)(mutRemoteGuide, objPathParts);
|
|
66
|
+
const fileName = objPathParts.pop();
|
|
67
|
+
// If we have an extracted file path from the local guide, we use that;
|
|
68
|
+
// otherwise use the default path.
|
|
69
|
+
const relpath = typeof extractedFilePath === "string" ? extractedFilePath : `${fileName}.${fileExt}`;
|
|
70
|
+
// Perform the extraction by adding the content and its file path to the
|
|
71
|
+
// bundle for writing to the file system later. Then replace the field
|
|
72
|
+
// content with the extracted file path and mark the field as extracted
|
|
73
|
+
// with @ suffix.
|
|
74
|
+
(0, _lodash.set)(bundle, [
|
|
75
|
+
relpath
|
|
76
|
+
], data);
|
|
77
|
+
(0, _lodash.set)(mutRemoteGuide, `${objPathStr}${_constisomorphic.FILEPATH_MARKER}`, relpath);
|
|
78
|
+
(0, _lodash.unset)(mutRemoteGuide, objPathStr);
|
|
79
|
+
}
|
|
80
|
+
// At this point the bundle contains all extractable files, so we finally add
|
|
81
|
+
// the guide JSON relative path + the file content.
|
|
82
|
+
return (0, _lodash.set)(bundle, [
|
|
83
|
+
GUIDE_JSON
|
|
84
|
+
], (0, _helpersisomorphic.prepareResourceJson)(mutRemoteGuide));
|
|
85
|
+
};
|
|
@@ -14,21 +14,25 @@ _export(exports, {
|
|
|
14
14
|
buildEmailLayoutDirBundle: function() {
|
|
15
15
|
return _processorisomorphic.buildEmailLayoutDirBundle;
|
|
16
16
|
},
|
|
17
|
+
buildGuideDirBundle: function() {
|
|
18
|
+
return _processorisomorphic1.buildGuideDirBundle;
|
|
19
|
+
},
|
|
17
20
|
buildMessageTypeDirBundle: function() {
|
|
18
|
-
return
|
|
21
|
+
return _processorisomorphic2.buildMessageTypeDirBundle;
|
|
19
22
|
},
|
|
20
23
|
buildPartialDirBundle: function() {
|
|
21
|
-
return
|
|
24
|
+
return _processorisomorphic3.buildPartialDirBundle;
|
|
22
25
|
},
|
|
23
26
|
buildTranslationDirBundle: function() {
|
|
24
|
-
return
|
|
27
|
+
return _processorisomorphic4.buildTranslationDirBundle;
|
|
25
28
|
},
|
|
26
29
|
buildWorkflowDirBundle: function() {
|
|
27
|
-
return
|
|
30
|
+
return _processorisomorphic5.buildWorkflowDirBundle;
|
|
28
31
|
}
|
|
29
32
|
});
|
|
30
33
|
const _processorisomorphic = require("./email-layout/processor.isomorphic");
|
|
31
|
-
const _processorisomorphic1 = require("./
|
|
32
|
-
const _processorisomorphic2 = require("./
|
|
33
|
-
const _processorisomorphic3 = require("./
|
|
34
|
-
const _processorisomorphic4 = require("./
|
|
34
|
+
const _processorisomorphic1 = require("./guide/processor.isomorphic");
|
|
35
|
+
const _processorisomorphic2 = require("./message-type/processor.isomorphic");
|
|
36
|
+
const _processorisomorphic3 = require("./partial/processor.isomorphic");
|
|
37
|
+
const _processorisomorphic4 = require("./translation/processor.isomorphic");
|
|
38
|
+
const _processorisomorphic5 = require("./workflow/processor.isomorphic");
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
ensureValidCommandTarget: function() {
|
|
13
|
+
return ensureValidCommandTarget;
|
|
14
|
+
},
|
|
15
|
+
isMessageTypeDir: function() {
|
|
16
|
+
return isMessageTypeDir;
|
|
17
|
+
},
|
|
18
|
+
lsMessageTypeJson: function() {
|
|
19
|
+
return lsMessageTypeJson;
|
|
20
|
+
},
|
|
21
|
+
messageTypeJsonPath: function() {
|
|
22
|
+
return messageTypeJsonPath;
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
|
|
26
|
+
const _core = require("@oclif/core");
|
|
27
|
+
const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
|
|
28
|
+
const _processorisomorphic = require("./processor.isomorphic");
|
|
29
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
30
|
+
if (typeof WeakMap !== "function") return null;
|
|
31
|
+
var cacheBabelInterop = new WeakMap();
|
|
32
|
+
var cacheNodeInterop = new WeakMap();
|
|
33
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
34
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
35
|
+
})(nodeInterop);
|
|
36
|
+
}
|
|
37
|
+
function _interop_require_wildcard(obj, nodeInterop) {
|
|
38
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
39
|
+
return obj;
|
|
40
|
+
}
|
|
41
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
42
|
+
return {
|
|
43
|
+
default: obj
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
47
|
+
if (cache && cache.has(obj)) {
|
|
48
|
+
return cache.get(obj);
|
|
49
|
+
}
|
|
50
|
+
var newObj = {
|
|
51
|
+
__proto__: null
|
|
52
|
+
};
|
|
53
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
54
|
+
for(var key in obj){
|
|
55
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
56
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
57
|
+
if (desc && (desc.get || desc.set)) {
|
|
58
|
+
Object.defineProperty(newObj, key, desc);
|
|
59
|
+
} else {
|
|
60
|
+
newObj[key] = obj[key];
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
newObj.default = obj;
|
|
65
|
+
if (cache) {
|
|
66
|
+
cache.set(obj, newObj);
|
|
67
|
+
}
|
|
68
|
+
return newObj;
|
|
69
|
+
}
|
|
70
|
+
const messageTypeJsonPath = (messageTypeDirCtx)=>_nodepath.resolve(messageTypeDirCtx.abspath, _processorisomorphic.MESSAGE_TYPE_JSON);
|
|
71
|
+
const lsMessageTypeJson = async (dirPath)=>{
|
|
72
|
+
const messageTypeJsonPath = _nodepath.resolve(dirPath, _processorisomorphic.MESSAGE_TYPE_JSON);
|
|
73
|
+
const exists = await _fsextra.pathExists(messageTypeJsonPath);
|
|
74
|
+
return exists ? messageTypeJsonPath : undefined;
|
|
75
|
+
};
|
|
76
|
+
const isMessageTypeDir = async (dirPath)=>Boolean(await lsMessageTypeJson(dirPath));
|
|
77
|
+
const ensureValidCommandTarget = async (props, runContext)=>{
|
|
78
|
+
const { args, flags } = props;
|
|
79
|
+
const { commandId, resourceDir: resourceDirCtx, cwd: runCwd } = runContext;
|
|
80
|
+
// If the target resource is a different type than the current resource dir
|
|
81
|
+
// type, error out.
|
|
82
|
+
if (resourceDirCtx && resourceDirCtx.type !== "message_type") {
|
|
83
|
+
return _core.ux.error(`Cannot run ${commandId} inside a ${resourceDirCtx.type} directory`);
|
|
84
|
+
}
|
|
85
|
+
// Cannot accept both message type key arg and --all flag.
|
|
86
|
+
if (flags.all && args.messageTypeKey) {
|
|
87
|
+
return _core.ux.error(`messageTypeKey arg \`${args.messageTypeKey}\` cannot also be provided when using --all`);
|
|
88
|
+
}
|
|
89
|
+
// --all flag is given, which means no message type key arg.
|
|
90
|
+
if (flags.all) {
|
|
91
|
+
// If --all flag used inside a message type directory, then require a message
|
|
92
|
+
// types dir path.
|
|
93
|
+
if (resourceDirCtx && !flags["message-types-dir"]) {
|
|
94
|
+
return _core.ux.error("Missing required flag message-types-dir");
|
|
95
|
+
}
|
|
96
|
+
// Targeting all message type dirs in the message types index dir.
|
|
97
|
+
// TODO: Default to the knock project config first if present before cwd.
|
|
98
|
+
const defaultToCwd = {
|
|
99
|
+
abspath: runCwd,
|
|
100
|
+
exists: true
|
|
101
|
+
};
|
|
102
|
+
const indexDirCtx = flags["message-types-dir"] || defaultToCwd;
|
|
103
|
+
return {
|
|
104
|
+
type: "messageTypesIndexDir",
|
|
105
|
+
context: indexDirCtx
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
// Message type key arg is given, which means no --all flag.
|
|
109
|
+
if (args.messageTypeKey) {
|
|
110
|
+
if (resourceDirCtx && resourceDirCtx.key !== args.messageTypeKey) {
|
|
111
|
+
return _core.ux.error(`Cannot run ${commandId} \`${args.messageTypeKey}\` inside another message type directory:\n${resourceDirCtx.key}`);
|
|
112
|
+
}
|
|
113
|
+
const targetDirPath = resourceDirCtx ? resourceDirCtx.abspath : _nodepath.resolve(runCwd, args.messageTypeKey);
|
|
114
|
+
const messageTypeDirCtx = {
|
|
115
|
+
type: "message_type",
|
|
116
|
+
key: args.messageTypeKey,
|
|
117
|
+
abspath: targetDirPath,
|
|
118
|
+
exists: await isMessageTypeDir(targetDirPath)
|
|
119
|
+
};
|
|
120
|
+
return {
|
|
121
|
+
type: "messageTypeDir",
|
|
122
|
+
context: messageTypeDirCtx
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
// From this point on, we have neither a message type key arg nor --all flag.
|
|
126
|
+
// If running inside a message type directory, then use that message type
|
|
127
|
+
// directory.
|
|
128
|
+
if (resourceDirCtx) {
|
|
129
|
+
return {
|
|
130
|
+
type: "messageTypeDir",
|
|
131
|
+
context: resourceDirCtx
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
return _core.ux.error("Missing 1 required arg:\nmessageTypeKey");
|
|
135
|
+
};
|
|
@@ -2,8 +2,11 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", {
|
|
3
3
|
value: true
|
|
4
4
|
});
|
|
5
|
+
_export_star(require("./helpers"), exports);
|
|
5
6
|
_export_star(require("./processor.isomorphic"), exports);
|
|
7
|
+
_export_star(require("./reader"), exports);
|
|
6
8
|
_export_star(require("./types"), exports);
|
|
9
|
+
_export_star(require("./writer"), exports);
|
|
7
10
|
function _export_star(from, to) {
|
|
8
11
|
Object.keys(from).forEach(function(k) {
|
|
9
12
|
if (k !== "default" && !Object.prototype.hasOwnProperty.call(to, k)) {
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
readAllForCommandTarget: function() {
|
|
13
|
+
return readAllForCommandTarget;
|
|
14
|
+
},
|
|
15
|
+
readMessageTypeDir: function() {
|
|
16
|
+
return readMessageTypeDir;
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
const _nodepath = /*#__PURE__*/ _interop_require_default(require("node:path"));
|
|
20
|
+
const _core = require("@oclif/core");
|
|
21
|
+
const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
|
|
22
|
+
const _lodash = require("lodash");
|
|
23
|
+
const _error = require("../../helpers/error");
|
|
24
|
+
const _json = require("../../helpers/json");
|
|
25
|
+
const _objectisomorphic = require("../../helpers/object.isomorphic");
|
|
26
|
+
const _helpers = require("../shared/helpers");
|
|
27
|
+
const _constisomorphic = require("../shared/const.isomorphic");
|
|
28
|
+
const _helpers1 = require("./helpers");
|
|
29
|
+
const _processorisomorphic = require("./processor.isomorphic");
|
|
30
|
+
function _interop_require_default(obj) {
|
|
31
|
+
return obj && obj.__esModule ? obj : {
|
|
32
|
+
default: obj
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
36
|
+
if (typeof WeakMap !== "function") return null;
|
|
37
|
+
var cacheBabelInterop = new WeakMap();
|
|
38
|
+
var cacheNodeInterop = new WeakMap();
|
|
39
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
40
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
41
|
+
})(nodeInterop);
|
|
42
|
+
}
|
|
43
|
+
function _interop_require_wildcard(obj, nodeInterop) {
|
|
44
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
45
|
+
return obj;
|
|
46
|
+
}
|
|
47
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
48
|
+
return {
|
|
49
|
+
default: obj
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
53
|
+
if (cache && cache.has(obj)) {
|
|
54
|
+
return cache.get(obj);
|
|
55
|
+
}
|
|
56
|
+
var newObj = {
|
|
57
|
+
__proto__: null
|
|
58
|
+
};
|
|
59
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
60
|
+
for(var key in obj){
|
|
61
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
62
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
63
|
+
if (desc && (desc.get || desc.set)) {
|
|
64
|
+
Object.defineProperty(newObj, key, desc);
|
|
65
|
+
} else {
|
|
66
|
+
newObj[key] = obj[key];
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
newObj.default = obj;
|
|
71
|
+
if (cache) {
|
|
72
|
+
cache.set(obj, newObj);
|
|
73
|
+
}
|
|
74
|
+
return newObj;
|
|
75
|
+
}
|
|
76
|
+
/*
|
|
77
|
+
* For the given list of message type directory contexts, read each message type dir and
|
|
78
|
+
* return message type directory data.
|
|
79
|
+
*/ const readMessageTypeDirs = async (messageTypeDirCtxs, opts = {})=>{
|
|
80
|
+
const messageTypes = [];
|
|
81
|
+
const errors = [];
|
|
82
|
+
for (const messageTypeDirCtx of messageTypeDirCtxs){
|
|
83
|
+
// eslint-disable-next-line no-await-in-loop
|
|
84
|
+
const [messageType, readErrors] = await readMessageTypeDir(messageTypeDirCtx, opts);
|
|
85
|
+
if (readErrors.length > 0) {
|
|
86
|
+
const messageTypeJsonPath = _nodepath.default.resolve(messageTypeDirCtx.abspath, _processorisomorphic.MESSAGE_TYPE_JSON);
|
|
87
|
+
const e = new _error.SourceError((0, _error.formatErrors)(readErrors), messageTypeJsonPath);
|
|
88
|
+
errors.push(e);
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
messageTypes.push({
|
|
92
|
+
...messageTypeDirCtx,
|
|
93
|
+
content: messageType
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
return [
|
|
97
|
+
messageTypes,
|
|
98
|
+
errors
|
|
99
|
+
];
|
|
100
|
+
};
|
|
101
|
+
const readMessageTypeDir = async (messageTypeDirCtx, opts = {})=>{
|
|
102
|
+
const { abspath } = messageTypeDirCtx;
|
|
103
|
+
const { withExtractedFiles = false, withReadonlyField = false } = opts;
|
|
104
|
+
const dirExists = await _fsextra.pathExists(abspath);
|
|
105
|
+
if (!dirExists) throw new Error(`${abspath} does not exist`);
|
|
106
|
+
const messageTypeJsonPath = await (0, _helpers1.lsMessageTypeJson)(abspath);
|
|
107
|
+
if (!messageTypeJsonPath) throw new Error(`${abspath} is not a message type directory`);
|
|
108
|
+
const result = await (0, _json.readJson)(messageTypeJsonPath);
|
|
109
|
+
if (!result[0]) return result;
|
|
110
|
+
let [messageTypeJson] = result;
|
|
111
|
+
messageTypeJson = withReadonlyField ? messageTypeJson : (0, _objectisomorphic.omitDeep)(messageTypeJson, [
|
|
112
|
+
"__readonly"
|
|
113
|
+
]);
|
|
114
|
+
return withExtractedFiles ? joinExtractedFiles(messageTypeDirCtx, messageTypeJson) : [
|
|
115
|
+
messageTypeJson,
|
|
116
|
+
[]
|
|
117
|
+
];
|
|
118
|
+
};
|
|
119
|
+
const joinExtractedFiles = async (messageTypeDirCtx, messageTypeJson)=>{
|
|
120
|
+
// Tracks any errors encountered during traversal. Mutated in place.
|
|
121
|
+
const errors = [];
|
|
122
|
+
// Tracks each new valid extracted file path seen (rebased to be relative to
|
|
123
|
+
// message_type.json) in the message type json node. Mutated in place, and used
|
|
124
|
+
// to validate the uniqueness of an extracted path encountered.
|
|
125
|
+
const uniqueFilePaths = {};
|
|
126
|
+
(0, _objectisomorphic.mapValuesDeep)(messageTypeJson, (relpath, key, parts)=>{
|
|
127
|
+
// If not marked with the @suffix, there's nothing to do.
|
|
128
|
+
if (!_constisomorphic.FILEPATH_MARKED_RE.test(key)) return;
|
|
129
|
+
const objPathToFieldStr = _objectisomorphic.ObjPath.stringify(parts);
|
|
130
|
+
const inlinObjPathStr = objPathToFieldStr.replace(_constisomorphic.FILEPATH_MARKED_RE, "");
|
|
131
|
+
// If there is inlined content present already, then nothing more to do.
|
|
132
|
+
if ((0, _lodash.hasIn)(messageTypeJson, inlinObjPathStr)) return;
|
|
133
|
+
// Check if the extracted path found at the current field path is valid
|
|
134
|
+
const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(relpath, _nodepath.default.resolve(messageTypeDirCtx.abspath, _processorisomorphic.MESSAGE_TYPE_JSON), uniqueFilePaths, objPathToFieldStr);
|
|
135
|
+
if (invalidFilePathError) {
|
|
136
|
+
errors.push(invalidFilePathError);
|
|
137
|
+
// Wipe the invalid file path in the node so the final message type json
|
|
138
|
+
// object ends up with only valid file paths, this way message type writer
|
|
139
|
+
// can see only valid file paths and use those when pulling.
|
|
140
|
+
(0, _lodash.set)(messageTypeJson, inlinObjPathStr, undefined);
|
|
141
|
+
(0, _lodash.set)(messageTypeJson, objPathToFieldStr, undefined);
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
// By this point we have a valid extracted file path, so attempt to read the file.
|
|
145
|
+
const [content, readExtractedFileError] = (0, _helpers.readExtractedFileSync)(relpath, messageTypeDirCtx, key);
|
|
146
|
+
if (readExtractedFileError) {
|
|
147
|
+
errors.push(readExtractedFileError);
|
|
148
|
+
// If there's an error, replace the extracted file path with the original one, and set the
|
|
149
|
+
// inlined field path in message type object with empty content, so we know
|
|
150
|
+
// we do not need to try inlining again.
|
|
151
|
+
(0, _lodash.set)(messageTypeJson, objPathToFieldStr, relpath);
|
|
152
|
+
(0, _lodash.set)(messageTypeJson, inlinObjPathStr, undefined);
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
// Inline the file content and remove the extracted file path.
|
|
156
|
+
(0, _lodash.set)(messageTypeJson, objPathToFieldStr, relpath);
|
|
157
|
+
(0, _lodash.set)(messageTypeJson, inlinObjPathStr, content);
|
|
158
|
+
});
|
|
159
|
+
return [
|
|
160
|
+
messageTypeJson,
|
|
161
|
+
errors
|
|
162
|
+
];
|
|
163
|
+
};
|
|
164
|
+
const readAllForCommandTarget = async (target, opts = {})=>{
|
|
165
|
+
const { type: targetType, context: targetCtx } = target;
|
|
166
|
+
if (!targetCtx.exists) {
|
|
167
|
+
const subject = targetType === "messageTypeDir" ? "a message type directory at" : "message type directories in";
|
|
168
|
+
return _core.ux.error(`Cannot locate ${subject} \`${targetCtx.abspath}\``);
|
|
169
|
+
}
|
|
170
|
+
switch(targetType){
|
|
171
|
+
case "messageTypeDir":
|
|
172
|
+
{
|
|
173
|
+
return readMessageTypeDirs([
|
|
174
|
+
targetCtx
|
|
175
|
+
], opts);
|
|
176
|
+
}
|
|
177
|
+
case "messageTypesIndexDir":
|
|
178
|
+
{
|
|
179
|
+
const dirents = await _fsextra.readdir(targetCtx.abspath, {
|
|
180
|
+
withFileTypes: true
|
|
181
|
+
});
|
|
182
|
+
const promises = dirents.map(async (dirent)=>{
|
|
183
|
+
const abspath = _nodepath.default.resolve(targetCtx.abspath, dirent.name);
|
|
184
|
+
const messageTypeDirCtx = {
|
|
185
|
+
type: "message_type",
|
|
186
|
+
key: dirent.name,
|
|
187
|
+
abspath,
|
|
188
|
+
exists: await (0, _helpers1.isMessageTypeDir)(abspath)
|
|
189
|
+
};
|
|
190
|
+
return messageTypeDirCtx;
|
|
191
|
+
});
|
|
192
|
+
const messageTypeDirCtxs = (await Promise.all(promises)).filter((messageTypeDirCtx)=>messageTypeDirCtx.exists);
|
|
193
|
+
return readMessageTypeDirs(messageTypeDirCtxs, opts);
|
|
194
|
+
}
|
|
195
|
+
default:
|
|
196
|
+
throw new Error(`Invalid message type command target: ${target}`);
|
|
197
|
+
}
|
|
198
|
+
};
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
pruneMessageTypesIndexDir: function() {
|
|
13
|
+
return pruneMessageTypesIndexDir;
|
|
14
|
+
},
|
|
15
|
+
writeMessageTypeDirFromData: function() {
|
|
16
|
+
return writeMessageTypeDirFromData;
|
|
17
|
+
},
|
|
18
|
+
writeMessageTypesIndexDir: function() {
|
|
19
|
+
return writeMessageTypesIndexDir;
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
|
|
23
|
+
const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
|
|
24
|
+
const _lodash = require("lodash");
|
|
25
|
+
const _const = require("../../helpers/const");
|
|
26
|
+
const _json = require("../../helpers/json");
|
|
27
|
+
const _helpers = require("./helpers");
|
|
28
|
+
const _processorisomorphic = require("./processor.isomorphic");
|
|
29
|
+
const _reader = require("./reader");
|
|
30
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
31
|
+
if (typeof WeakMap !== "function") return null;
|
|
32
|
+
var cacheBabelInterop = new WeakMap();
|
|
33
|
+
var cacheNodeInterop = new WeakMap();
|
|
34
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
35
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
36
|
+
})(nodeInterop);
|
|
37
|
+
}
|
|
38
|
+
function _interop_require_wildcard(obj, nodeInterop) {
|
|
39
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
40
|
+
return obj;
|
|
41
|
+
}
|
|
42
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
43
|
+
return {
|
|
44
|
+
default: obj
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
48
|
+
if (cache && cache.has(obj)) {
|
|
49
|
+
return cache.get(obj);
|
|
50
|
+
}
|
|
51
|
+
var newObj = {
|
|
52
|
+
__proto__: null
|
|
53
|
+
};
|
|
54
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
55
|
+
for(var key in obj){
|
|
56
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
57
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
58
|
+
if (desc && (desc.get || desc.set)) {
|
|
59
|
+
Object.defineProperty(newObj, key, desc);
|
|
60
|
+
} else {
|
|
61
|
+
newObj[key] = obj[key];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
newObj.default = obj;
|
|
66
|
+
if (cache) {
|
|
67
|
+
cache.set(obj, newObj);
|
|
68
|
+
}
|
|
69
|
+
return newObj;
|
|
70
|
+
}
|
|
71
|
+
const writeMessageTypeDirFromData = async (messageTypeDirCtx, remoteMessageType)=>{
|
|
72
|
+
// If the message type directory exists on the file system (i.e. previously
|
|
73
|
+
// pulled before), then read the message type file to use as a reference.
|
|
74
|
+
const [localMessageType] = messageTypeDirCtx.exists ? await (0, _reader.readMessageTypeDir)(messageTypeDirCtx, {
|
|
75
|
+
withExtractedFiles: true
|
|
76
|
+
}) : [];
|
|
77
|
+
const bundle = (0, _processorisomorphic.buildMessageTypeDirBundle)(remoteMessageType, localMessageType);
|
|
78
|
+
return writeMessageTypeDirFromBundle(messageTypeDirCtx, bundle);
|
|
79
|
+
};
|
|
80
|
+
/*
|
|
81
|
+
* A lower level write function that takes a constructed message type dir bundle
|
|
82
|
+
* and writes it into a message type directory on a local file system.
|
|
83
|
+
*
|
|
84
|
+
* It does not make any assumptions about how the message type directory bundle
|
|
85
|
+
* was built; for example, it can be from parsing the message type data fetched
|
|
86
|
+
* from the Knock API, or built manually for scaffolding purposes.
|
|
87
|
+
*/ const writeMessageTypeDirFromBundle = async (messageTypeDirCtx, messageTypeDirBundle)=>{
|
|
88
|
+
const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
89
|
+
try {
|
|
90
|
+
if (messageTypeDirCtx.exists) {
|
|
91
|
+
await _fsextra.copy(messageTypeDirCtx.abspath, backupDirPath);
|
|
92
|
+
await _fsextra.emptyDir(messageTypeDirCtx.abspath);
|
|
93
|
+
}
|
|
94
|
+
const promises = Object.entries(messageTypeDirBundle).map(([relpath, fileContent])=>{
|
|
95
|
+
const filePath = _nodepath.resolve(messageTypeDirCtx.abspath, relpath);
|
|
96
|
+
return relpath === _processorisomorphic.MESSAGE_TYPE_JSON ? _fsextra.outputJson(filePath, fileContent, {
|
|
97
|
+
spaces: _json.DOUBLE_SPACES
|
|
98
|
+
}) : _fsextra.outputFile(filePath, fileContent !== null && fileContent !== void 0 ? fileContent : "");
|
|
99
|
+
});
|
|
100
|
+
await Promise.all(promises);
|
|
101
|
+
} catch (error) {
|
|
102
|
+
// In case of any error, wipe the target directory that is likely in a bad
|
|
103
|
+
// state then restore the backup if one existed before.
|
|
104
|
+
if (messageTypeDirCtx.exists) {
|
|
105
|
+
await _fsextra.emptyDir(messageTypeDirCtx.abspath);
|
|
106
|
+
await _fsextra.copy(backupDirPath, messageTypeDirCtx.abspath);
|
|
107
|
+
} else {
|
|
108
|
+
await _fsextra.remove(messageTypeDirCtx.abspath);
|
|
109
|
+
}
|
|
110
|
+
throw error;
|
|
111
|
+
} finally{
|
|
112
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
113
|
+
await _fsextra.remove(backupDirPath);
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
/*
|
|
117
|
+
* Prunes the index directory by removing any files, or directories that aren't
|
|
118
|
+
* message type dirs found in fetched message types. We want to preserve any
|
|
119
|
+
* message type dirs that are going to be updated with remote message types, so
|
|
120
|
+
* extracted links can be respected.
|
|
121
|
+
*/ const pruneMessageTypesIndexDir = async (indexDirCtx, remoteMessageTypes)=>{
|
|
122
|
+
const messageTypesByKey = Object.fromEntries(remoteMessageTypes.map((w)=>[
|
|
123
|
+
w.key.toLowerCase(),
|
|
124
|
+
w
|
|
125
|
+
]));
|
|
126
|
+
const dirents = await _fsextra.readdir(indexDirCtx.abspath, {
|
|
127
|
+
withFileTypes: true
|
|
128
|
+
});
|
|
129
|
+
const promises = dirents.map(async (dirent)=>{
|
|
130
|
+
const direntName = dirent.name.toLowerCase();
|
|
131
|
+
const direntPath = _nodepath.resolve(indexDirCtx.abspath, direntName);
|
|
132
|
+
if (await (0, _helpers.isMessageTypeDir)(direntPath) && messageTypesByKey[direntName]) {
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
await _fsextra.remove(direntPath);
|
|
136
|
+
});
|
|
137
|
+
await Promise.all(promises);
|
|
138
|
+
};
|
|
139
|
+
const writeMessageTypesIndexDir = async (indexDirCtx, remoteMessageTypes)=>{
|
|
140
|
+
const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
141
|
+
try {
|
|
142
|
+
// If the index directory already exists, back it up in the temp sandbox
|
|
143
|
+
// before wiping it clean.
|
|
144
|
+
if (indexDirCtx.exists) {
|
|
145
|
+
await _fsextra.copy(indexDirCtx.abspath, backupDirPath);
|
|
146
|
+
await pruneMessageTypesIndexDir(indexDirCtx, remoteMessageTypes);
|
|
147
|
+
}
|
|
148
|
+
// Write given remote message types into the given message types dir path.
|
|
149
|
+
const promises = remoteMessageTypes.map(async (messageType)=>{
|
|
150
|
+
const messageTypeDirPath = _nodepath.resolve(indexDirCtx.abspath, messageType.key);
|
|
151
|
+
const messageTypeDirCtx = {
|
|
152
|
+
type: "message_type",
|
|
153
|
+
key: messageType.key,
|
|
154
|
+
abspath: messageTypeDirPath,
|
|
155
|
+
exists: indexDirCtx.exists ? await (0, _helpers.isMessageTypeDir)(messageTypeDirPath) : false
|
|
156
|
+
};
|
|
157
|
+
return writeMessageTypeDirFromData(messageTypeDirCtx, messageType);
|
|
158
|
+
});
|
|
159
|
+
await Promise.all(promises);
|
|
160
|
+
} catch (error) {
|
|
161
|
+
console.log(error);
|
|
162
|
+
// In case of any error, wipe the index directory that is likely in a bad
|
|
163
|
+
// state then restore the backup if one existed before.
|
|
164
|
+
if (indexDirCtx.exists) {
|
|
165
|
+
await _fsextra.emptyDir(indexDirCtx.abspath);
|
|
166
|
+
await _fsextra.copy(backupDirPath, indexDirCtx.abspath);
|
|
167
|
+
} else {
|
|
168
|
+
await _fsextra.remove(indexDirCtx.abspath);
|
|
169
|
+
}
|
|
170
|
+
throw error;
|
|
171
|
+
} finally{
|
|
172
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
173
|
+
await _fsextra.remove(backupDirPath);
|
|
174
|
+
}
|
|
175
|
+
};
|