@knocklabs/cli 0.1.19 → 0.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -29
- package/dist/commands/message-type/get.js +124 -0
- package/dist/commands/message-type/list.js +101 -0
- package/dist/commands/message-type/pull.js +218 -0
- package/dist/commands/message-type/push.js +171 -0
- package/dist/commands/message-type/validate.js +148 -0
- package/dist/commands/partial/pull.js +2 -2
- package/dist/commands/workflow/generate-types.js +138 -0
- package/dist/lib/api-v1.js +47 -0
- package/dist/lib/base-command.js +1 -4
- package/dist/lib/helpers/const.js +3 -3
- package/dist/lib/helpers/error.js +3 -6
- package/dist/lib/helpers/flag.js +16 -0
- package/dist/lib/helpers/page.js +3 -3
- package/dist/lib/marshal/guide/index.js +19 -0
- package/dist/lib/marshal/guide/processor.isomorphic.js +85 -0
- package/dist/lib/marshal/guide/types.js +4 -0
- package/dist/lib/marshal/index.isomorphic.js +12 -8
- package/dist/lib/marshal/message-type/helpers.js +135 -0
- package/dist/lib/marshal/message-type/index.js +3 -0
- package/dist/lib/marshal/message-type/reader.js +198 -0
- package/dist/lib/marshal/message-type/writer.js +175 -0
- package/dist/lib/marshal/partial/reader.js +3 -3
- package/dist/lib/marshal/partial/types.js +3 -3
- package/dist/lib/marshal/workflow/types.js +3 -3
- package/dist/lib/run-context/loader.js +35 -31
- package/dist/lib/type-generator.js +100 -0
- package/oclif.manifest.json +446 -1
- package/package.json +16 -15
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
pruneMessageTypesIndexDir: function() {
|
|
13
|
+
return pruneMessageTypesIndexDir;
|
|
14
|
+
},
|
|
15
|
+
writeMessageTypeDirFromData: function() {
|
|
16
|
+
return writeMessageTypeDirFromData;
|
|
17
|
+
},
|
|
18
|
+
writeMessageTypesIndexDir: function() {
|
|
19
|
+
return writeMessageTypesIndexDir;
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
|
|
23
|
+
const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
|
|
24
|
+
const _lodash = require("lodash");
|
|
25
|
+
const _const = require("../../helpers/const");
|
|
26
|
+
const _json = require("../../helpers/json");
|
|
27
|
+
const _helpers = require("./helpers");
|
|
28
|
+
const _processorisomorphic = require("./processor.isomorphic");
|
|
29
|
+
const _reader = require("./reader");
|
|
30
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
31
|
+
if (typeof WeakMap !== "function") return null;
|
|
32
|
+
var cacheBabelInterop = new WeakMap();
|
|
33
|
+
var cacheNodeInterop = new WeakMap();
|
|
34
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
35
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
36
|
+
})(nodeInterop);
|
|
37
|
+
}
|
|
38
|
+
function _interop_require_wildcard(obj, nodeInterop) {
|
|
39
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
40
|
+
return obj;
|
|
41
|
+
}
|
|
42
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
43
|
+
return {
|
|
44
|
+
default: obj
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
48
|
+
if (cache && cache.has(obj)) {
|
|
49
|
+
return cache.get(obj);
|
|
50
|
+
}
|
|
51
|
+
var newObj = {
|
|
52
|
+
__proto__: null
|
|
53
|
+
};
|
|
54
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
55
|
+
for(var key in obj){
|
|
56
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
57
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
58
|
+
if (desc && (desc.get || desc.set)) {
|
|
59
|
+
Object.defineProperty(newObj, key, desc);
|
|
60
|
+
} else {
|
|
61
|
+
newObj[key] = obj[key];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
newObj.default = obj;
|
|
66
|
+
if (cache) {
|
|
67
|
+
cache.set(obj, newObj);
|
|
68
|
+
}
|
|
69
|
+
return newObj;
|
|
70
|
+
}
|
|
71
|
+
const writeMessageTypeDirFromData = async (messageTypeDirCtx, remoteMessageType)=>{
|
|
72
|
+
// If the message type directory exists on the file system (i.e. previously
|
|
73
|
+
// pulled before), then read the message type file to use as a reference.
|
|
74
|
+
const [localMessageType] = messageTypeDirCtx.exists ? await (0, _reader.readMessageTypeDir)(messageTypeDirCtx, {
|
|
75
|
+
withExtractedFiles: true
|
|
76
|
+
}) : [];
|
|
77
|
+
const bundle = (0, _processorisomorphic.buildMessageTypeDirBundle)(remoteMessageType, localMessageType);
|
|
78
|
+
return writeMessageTypeDirFromBundle(messageTypeDirCtx, bundle);
|
|
79
|
+
};
|
|
80
|
+
/*
|
|
81
|
+
* A lower level write function that takes a constructed message type dir bundle
|
|
82
|
+
* and writes it into a message type directory on a local file system.
|
|
83
|
+
*
|
|
84
|
+
* It does not make any assumptions about how the message type directory bundle
|
|
85
|
+
* was built; for example, it can be from parsing the message type data fetched
|
|
86
|
+
* from the Knock API, or built manually for scaffolding purposes.
|
|
87
|
+
*/ const writeMessageTypeDirFromBundle = async (messageTypeDirCtx, messageTypeDirBundle)=>{
|
|
88
|
+
const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
89
|
+
try {
|
|
90
|
+
if (messageTypeDirCtx.exists) {
|
|
91
|
+
await _fsextra.copy(messageTypeDirCtx.abspath, backupDirPath);
|
|
92
|
+
await _fsextra.emptyDir(messageTypeDirCtx.abspath);
|
|
93
|
+
}
|
|
94
|
+
const promises = Object.entries(messageTypeDirBundle).map(([relpath, fileContent])=>{
|
|
95
|
+
const filePath = _nodepath.resolve(messageTypeDirCtx.abspath, relpath);
|
|
96
|
+
return relpath === _processorisomorphic.MESSAGE_TYPE_JSON ? _fsextra.outputJson(filePath, fileContent, {
|
|
97
|
+
spaces: _json.DOUBLE_SPACES
|
|
98
|
+
}) : _fsextra.outputFile(filePath, fileContent !== null && fileContent !== void 0 ? fileContent : "");
|
|
99
|
+
});
|
|
100
|
+
await Promise.all(promises);
|
|
101
|
+
} catch (error) {
|
|
102
|
+
// In case of any error, wipe the target directory that is likely in a bad
|
|
103
|
+
// state then restore the backup if one existed before.
|
|
104
|
+
if (messageTypeDirCtx.exists) {
|
|
105
|
+
await _fsextra.emptyDir(messageTypeDirCtx.abspath);
|
|
106
|
+
await _fsextra.copy(backupDirPath, messageTypeDirCtx.abspath);
|
|
107
|
+
} else {
|
|
108
|
+
await _fsextra.remove(messageTypeDirCtx.abspath);
|
|
109
|
+
}
|
|
110
|
+
throw error;
|
|
111
|
+
} finally{
|
|
112
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
113
|
+
await _fsextra.remove(backupDirPath);
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
/*
|
|
117
|
+
* Prunes the index directory by removing any files, or directories that aren't
|
|
118
|
+
* message type dirs found in fetched message types. We want to preserve any
|
|
119
|
+
* message type dirs that are going to be updated with remote message types, so
|
|
120
|
+
* extracted links can be respected.
|
|
121
|
+
*/ const pruneMessageTypesIndexDir = async (indexDirCtx, remoteMessageTypes)=>{
|
|
122
|
+
const messageTypesByKey = Object.fromEntries(remoteMessageTypes.map((w)=>[
|
|
123
|
+
w.key.toLowerCase(),
|
|
124
|
+
w
|
|
125
|
+
]));
|
|
126
|
+
const dirents = await _fsextra.readdir(indexDirCtx.abspath, {
|
|
127
|
+
withFileTypes: true
|
|
128
|
+
});
|
|
129
|
+
const promises = dirents.map(async (dirent)=>{
|
|
130
|
+
const direntName = dirent.name.toLowerCase();
|
|
131
|
+
const direntPath = _nodepath.resolve(indexDirCtx.abspath, direntName);
|
|
132
|
+
if (await (0, _helpers.isMessageTypeDir)(direntPath) && messageTypesByKey[direntName]) {
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
await _fsextra.remove(direntPath);
|
|
136
|
+
});
|
|
137
|
+
await Promise.all(promises);
|
|
138
|
+
};
|
|
139
|
+
const writeMessageTypesIndexDir = async (indexDirCtx, remoteMessageTypes)=>{
|
|
140
|
+
const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
|
|
141
|
+
try {
|
|
142
|
+
// If the index directory already exists, back it up in the temp sandbox
|
|
143
|
+
// before wiping it clean.
|
|
144
|
+
if (indexDirCtx.exists) {
|
|
145
|
+
await _fsextra.copy(indexDirCtx.abspath, backupDirPath);
|
|
146
|
+
await pruneMessageTypesIndexDir(indexDirCtx, remoteMessageTypes);
|
|
147
|
+
}
|
|
148
|
+
// Write given remote message types into the given message types dir path.
|
|
149
|
+
const promises = remoteMessageTypes.map(async (messageType)=>{
|
|
150
|
+
const messageTypeDirPath = _nodepath.resolve(indexDirCtx.abspath, messageType.key);
|
|
151
|
+
const messageTypeDirCtx = {
|
|
152
|
+
type: "message_type",
|
|
153
|
+
key: messageType.key,
|
|
154
|
+
abspath: messageTypeDirPath,
|
|
155
|
+
exists: indexDirCtx.exists ? await (0, _helpers.isMessageTypeDir)(messageTypeDirPath) : false
|
|
156
|
+
};
|
|
157
|
+
return writeMessageTypeDirFromData(messageTypeDirCtx, messageType);
|
|
158
|
+
});
|
|
159
|
+
await Promise.all(promises);
|
|
160
|
+
} catch (error) {
|
|
161
|
+
console.log(error);
|
|
162
|
+
// In case of any error, wipe the index directory that is likely in a bad
|
|
163
|
+
// state then restore the backup if one existed before.
|
|
164
|
+
if (indexDirCtx.exists) {
|
|
165
|
+
await _fsextra.emptyDir(indexDirCtx.abspath);
|
|
166
|
+
await _fsextra.copy(backupDirPath, indexDirCtx.abspath);
|
|
167
|
+
} else {
|
|
168
|
+
await _fsextra.remove(indexDirCtx.abspath);
|
|
169
|
+
}
|
|
170
|
+
throw error;
|
|
171
|
+
} finally{
|
|
172
|
+
// Always clean up the backup directory in the temp sandbox.
|
|
173
|
+
await _fsextra.remove(backupDirPath);
|
|
174
|
+
}
|
|
175
|
+
};
|
|
@@ -76,7 +76,7 @@ function _interop_require_wildcard(obj, nodeInterop) {
|
|
|
76
76
|
/*
|
|
77
77
|
* For the given list of partial directory contexts, read each partial dir and
|
|
78
78
|
* return partial directory data.
|
|
79
|
-
*/ const
|
|
79
|
+
*/ const readPartialDirs = async (partialDirCtxs, opts = {})=>{
|
|
80
80
|
const partials = [];
|
|
81
81
|
const errors = [];
|
|
82
82
|
for (const partialDirCtx of partialDirCtxs){
|
|
@@ -170,7 +170,7 @@ const readAllForCommandTarget = async (target, opts = {})=>{
|
|
|
170
170
|
switch(targetType){
|
|
171
171
|
case "partialDir":
|
|
172
172
|
{
|
|
173
|
-
return
|
|
173
|
+
return readPartialDirs([
|
|
174
174
|
targetCtx
|
|
175
175
|
], opts);
|
|
176
176
|
}
|
|
@@ -190,7 +190,7 @@ const readAllForCommandTarget = async (target, opts = {})=>{
|
|
|
190
190
|
return partialDirCtx;
|
|
191
191
|
});
|
|
192
192
|
const partialDirCtxs = (await Promise.all(promises)).filter((partialDirCtx)=>partialDirCtx.exists);
|
|
193
|
-
return
|
|
193
|
+
return readPartialDirs(partialDirCtxs, opts);
|
|
194
194
|
}
|
|
195
195
|
default:
|
|
196
196
|
throw new Error(`Invalid partial command target: ${target}`);
|
|
@@ -8,10 +8,10 @@ Object.defineProperty(exports, "PartialType", {
|
|
|
8
8
|
return PartialType;
|
|
9
9
|
}
|
|
10
10
|
});
|
|
11
|
-
var PartialType
|
|
12
|
-
(function(PartialType) {
|
|
11
|
+
var PartialType = /*#__PURE__*/ function(PartialType) {
|
|
13
12
|
PartialType["Html"] = "html";
|
|
14
13
|
PartialType["Json"] = "json";
|
|
15
14
|
PartialType["Markdown"] = "markdown";
|
|
16
15
|
PartialType["Text"] = "text";
|
|
17
|
-
|
|
16
|
+
return PartialType;
|
|
17
|
+
}({});
|
|
@@ -8,12 +8,12 @@ Object.defineProperty(exports, "StepType", {
|
|
|
8
8
|
return StepType;
|
|
9
9
|
}
|
|
10
10
|
});
|
|
11
|
-
var StepType
|
|
12
|
-
(function(StepType) {
|
|
11
|
+
var StepType = /*#__PURE__*/ function(StepType) {
|
|
13
12
|
StepType["Channel"] = "channel";
|
|
14
13
|
StepType["Batch"] = "batch";
|
|
15
14
|
StepType["Delay"] = "delay";
|
|
16
15
|
StepType["HttpFetch"] = "http_fetch";
|
|
17
16
|
StepType["Branch"] = "branch";
|
|
18
17
|
StepType["Throttle"] = "throttle";
|
|
19
|
-
|
|
18
|
+
return StepType;
|
|
19
|
+
}({});
|
|
@@ -14,6 +14,8 @@ Object.defineProperty(exports, "load", {
|
|
|
14
14
|
});
|
|
15
15
|
const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
|
|
16
16
|
const _emaillayout = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/email-layout"));
|
|
17
|
+
const _messagetype = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/message-type"));
|
|
18
|
+
const _partial = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/partial"));
|
|
17
19
|
const _translation = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/translation"));
|
|
18
20
|
const _workflow = /*#__PURE__*/ _interop_require_wildcard(require("../marshal/workflow"));
|
|
19
21
|
function _getRequireWildcardCache(nodeInterop) {
|
|
@@ -57,38 +59,40 @@ function _interop_require_wildcard(obj, nodeInterop) {
|
|
|
57
59
|
}
|
|
58
60
|
return newObj;
|
|
59
61
|
}
|
|
62
|
+
const buildResourceDirContext = (type, currDir)=>{
|
|
63
|
+
return {
|
|
64
|
+
type,
|
|
65
|
+
key: _nodepath.basename(currDir),
|
|
66
|
+
abspath: currDir,
|
|
67
|
+
exists: true
|
|
68
|
+
};
|
|
69
|
+
};
|
|
60
70
|
const evaluateRecursively = async (ctx, currDir)=>{
|
|
61
|
-
// Check if we are inside a
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
ctx.resourceDir = {
|
|
87
|
-
type: "translation",
|
|
88
|
-
key: _nodepath.basename(currDir),
|
|
89
|
-
abspath: currDir,
|
|
90
|
-
exists: true
|
|
91
|
-
};
|
|
71
|
+
// Check if we are inside a resource directory and if so update the context.
|
|
72
|
+
if (!ctx.resourceDir) {
|
|
73
|
+
const isWorkflowDir = await _workflow.isWorkflowDir(currDir);
|
|
74
|
+
if (isWorkflowDir) {
|
|
75
|
+
ctx.resourceDir = buildResourceDirContext("workflow", currDir);
|
|
76
|
+
}
|
|
77
|
+
const isEmailLayoutDir = await _emaillayout.isEmailLayoutDir(currDir);
|
|
78
|
+
if (isEmailLayoutDir) {
|
|
79
|
+
ctx.resourceDir = buildResourceDirContext("email_layout", currDir);
|
|
80
|
+
}
|
|
81
|
+
const isPartialDir = await _partial.isPartialDir(currDir);
|
|
82
|
+
if (isPartialDir) {
|
|
83
|
+
ctx.resourceDir = buildResourceDirContext("partial", currDir);
|
|
84
|
+
}
|
|
85
|
+
const isMessageTypeDir = await _messagetype.isMessageTypeDir(currDir);
|
|
86
|
+
if (isMessageTypeDir) {
|
|
87
|
+
ctx.resourceDir = buildResourceDirContext("message_type", currDir);
|
|
88
|
+
}
|
|
89
|
+
// NOTE: Must keep this check as last in the order of directory-type checks
|
|
90
|
+
// since the `isTranslationDir` only checks that the directory name is a
|
|
91
|
+
// valid locale name.
|
|
92
|
+
const isTranslationDir = _translation.isTranslationDir(currDir);
|
|
93
|
+
if (isTranslationDir) {
|
|
94
|
+
ctx.resourceDir = buildResourceDirContext("translation", currDir);
|
|
95
|
+
}
|
|
92
96
|
}
|
|
93
97
|
// If we've identified the resource context, no need to go further.
|
|
94
98
|
// TODO: In the future, consider supporting a knock project config file which
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
generateWorkflowTypes: function() {
|
|
13
|
+
return generateWorkflowTypes;
|
|
14
|
+
},
|
|
15
|
+
getLanguageFromExtension: function() {
|
|
16
|
+
return getLanguageFromExtension;
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
const _quicktypecore = require("quicktype-core");
|
|
20
|
+
function getLanguageFromExtension(extension) {
|
|
21
|
+
switch(extension){
|
|
22
|
+
case "ts":
|
|
23
|
+
return "typescript";
|
|
24
|
+
case "py":
|
|
25
|
+
return "python";
|
|
26
|
+
case "go":
|
|
27
|
+
return "go";
|
|
28
|
+
case "rb":
|
|
29
|
+
return "ruby";
|
|
30
|
+
default:
|
|
31
|
+
return undefined;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Transforms the schema to add additionalProperties to all objects.
|
|
36
|
+
*
|
|
37
|
+
* TODO: handle refs, union types, and more.
|
|
38
|
+
*
|
|
39
|
+
* @param schema The schema to transform
|
|
40
|
+
* @returns The transformed schema
|
|
41
|
+
*/ function transformSchema(schema) {
|
|
42
|
+
if (schema.type === "object" && !schema.additionalProperties) {
|
|
43
|
+
schema.additionalProperties = false;
|
|
44
|
+
}
|
|
45
|
+
var _schema_properties;
|
|
46
|
+
for (const key of Object.keys((_schema_properties = schema.properties) !== null && _schema_properties !== void 0 ? _schema_properties : {})){
|
|
47
|
+
const property = schema.properties[key];
|
|
48
|
+
if (property.type === "object") {
|
|
49
|
+
const transformedProperty = transformSchema(property);
|
|
50
|
+
schema.properties[key] = transformedProperty;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return schema;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Given a set of workflows, will go through and generated types for each workflow.
|
|
57
|
+
*
|
|
58
|
+
* If the workflow has no trigger data JSON schema, will return empty lines.
|
|
59
|
+
*
|
|
60
|
+
* @param workflows List of workflows to generate types for
|
|
61
|
+
* @param targetLanguage Target programming language for type generation
|
|
62
|
+
* @returns Generated type definitions for the workflows
|
|
63
|
+
*/ async function generateWorkflowTypes(workflows, targetLanguage) {
|
|
64
|
+
const validWorkflows = workflows.filter((workflow)=>workflow.trigger_data_json_schema);
|
|
65
|
+
if (validWorkflows.length === 0) {
|
|
66
|
+
return {
|
|
67
|
+
result: undefined,
|
|
68
|
+
workflows: []
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
const schemaInput = new _quicktypecore.JSONSchemaInput(new _quicktypecore.FetchingJSONSchemaStore());
|
|
72
|
+
for (const workflow of validWorkflows){
|
|
73
|
+
const pascalCaseWorkflowKey = workflow.key.split(/[_-]/).map((part)=>part.charAt(0).toUpperCase() + part.slice(1)).join("");
|
|
74
|
+
const schema = transformSchema({
|
|
75
|
+
...workflow.trigger_data_json_schema,
|
|
76
|
+
title: `${pascalCaseWorkflowKey}Data`
|
|
77
|
+
});
|
|
78
|
+
schemaInput.addSource({
|
|
79
|
+
name: `${pascalCaseWorkflowKey}Data`,
|
|
80
|
+
schema: JSON.stringify(schema)
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
const inputData = new _quicktypecore.InputData();
|
|
84
|
+
inputData.addInput(schemaInput);
|
|
85
|
+
const result = await (0, _quicktypecore.quicktype)({
|
|
86
|
+
inputData,
|
|
87
|
+
lang: targetLanguage,
|
|
88
|
+
allPropertiesOptional: false,
|
|
89
|
+
alphabetizeProperties: true,
|
|
90
|
+
rendererOptions: {
|
|
91
|
+
"just-types": true,
|
|
92
|
+
"no-extra-properties": true,
|
|
93
|
+
"no-optional-null": true
|
|
94
|
+
}
|
|
95
|
+
});
|
|
96
|
+
return {
|
|
97
|
+
result,
|
|
98
|
+
workflows: validWorkflows
|
|
99
|
+
};
|
|
100
|
+
}
|