@knocklabs/cli 0.1.0-rc.2 → 0.1.0-rc.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +82 -6
- package/dist/commands/commit/index.js +4 -18
- package/dist/commands/commit/promote.js +4 -17
- package/dist/commands/translation/list.js +82 -0
- package/dist/commands/translation/pull.js +124 -0
- package/dist/commands/translation/push.js +130 -0
- package/dist/commands/translation/validate.js +122 -0
- package/dist/commands/workflow/activate.js +5 -18
- package/dist/commands/workflow/new.js +3 -3
- package/dist/commands/workflow/pull.js +70 -17
- package/dist/commands/workflow/push.js +3 -3
- package/dist/commands/workflow/validate.js +3 -3
- package/dist/lib/api-v1.js +38 -2
- package/dist/lib/base-command.js +2 -2
- package/dist/lib/helpers/error.js +16 -8
- package/dist/lib/helpers/flag.js +63 -3
- package/dist/lib/helpers/fs.js +52 -0
- package/dist/lib/helpers/json.js +6 -2
- package/dist/lib/helpers/object.js +43 -0
- package/dist/lib/helpers/page.js +3 -1
- package/dist/lib/helpers/request.js +17 -49
- package/dist/lib/helpers/ux.js +42 -0
- package/dist/lib/marshal/translation/helpers.js +185 -0
- package/dist/lib/marshal/translation/index.js +19 -0
- package/dist/lib/marshal/translation/reader.js +118 -0
- package/dist/lib/marshal/translation/types.js +4 -0
- package/dist/lib/marshal/translation/writer.js +86 -0
- package/dist/lib/marshal/workflow/generator.js +46 -5
- package/dist/lib/marshal/workflow/helpers.js +2 -0
- package/dist/lib/marshal/workflow/reader.js +136 -117
- package/dist/lib/marshal/workflow/writer.js +235 -98
- package/dist/lib/{helpers/dir-context.js → run-context/helpers.js} +1 -1
- package/dist/lib/run-context/index.js +22 -0
- package/dist/lib/{run-context.js → run-context/loader.js} +22 -7
- package/dist/lib/run-context/types.js +4 -0
- package/oclif.manifest.json +253 -1
- package/package.json +11 -10
- package/dist/lib/helpers/spinner.js +0 -20
|
@@ -7,36 +7,23 @@ Object.defineProperty(exports, "default", {
|
|
|
7
7
|
get: ()=>WorkflowActivate
|
|
8
8
|
});
|
|
9
9
|
const _core = require("@oclif/core");
|
|
10
|
-
const _enquirer = /*#__PURE__*/ _interopRequireDefault(require("enquirer"));
|
|
11
10
|
const _baseCommand = /*#__PURE__*/ _interopRequireDefault(require("../../lib/base-command"));
|
|
12
11
|
const _flag = require("../../lib/helpers/flag");
|
|
13
12
|
const _request = require("../../lib/helpers/request");
|
|
13
|
+
const _ux = require("../../lib/helpers/ux");
|
|
14
14
|
function _interopRequireDefault(obj) {
|
|
15
15
|
return obj && obj.__esModule ? obj : {
|
|
16
16
|
default: obj
|
|
17
17
|
};
|
|
18
18
|
}
|
|
19
|
-
const promptToConfirm = async ({ flags , args })=>{
|
|
20
|
-
const action = flags.status ? "Activate" : "Deactivate";
|
|
21
|
-
try {
|
|
22
|
-
const { input } = await _enquirer.default.prompt({
|
|
23
|
-
type: "confirm",
|
|
24
|
-
name: "input",
|
|
25
|
-
message: `${action} \`${args.workflowKey}\` workflow in \`${flags.environment}\` environment?`
|
|
26
|
-
});
|
|
27
|
-
return input;
|
|
28
|
-
} catch (error) {
|
|
29
|
-
console.log(error);
|
|
30
|
-
}
|
|
31
|
-
};
|
|
32
19
|
class WorkflowActivate extends _baseCommand.default {
|
|
33
20
|
async run() {
|
|
34
21
|
const { args , flags } = this.props;
|
|
35
22
|
// 1. Confirm before activating or deactivating the workflow, unless forced.
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
23
|
+
const action = flags.status ? "Activate" : "Deactivate";
|
|
24
|
+
const prompt = `${action} \`${args.workflowKey}\` workflow in \`${flags.environment}\` environment?`;
|
|
25
|
+
const input = flags.force || await (0, _ux.promptToConfirm)(prompt);
|
|
26
|
+
if (!input) return;
|
|
40
27
|
// 2. Proceed to make a request to set the workflow status.
|
|
41
28
|
const actioning = flags.status ? "Activating" : "Deactivating";
|
|
42
29
|
await (0, _request.withSpinner)(()=>{
|
|
@@ -11,7 +11,7 @@ const _core = require("@oclif/core");
|
|
|
11
11
|
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
12
12
|
const _baseCommand = /*#__PURE__*/ _interopRequireDefault(require("../../lib/base-command"));
|
|
13
13
|
const _object = require("../../lib/helpers/object");
|
|
14
|
-
const
|
|
14
|
+
const _ux = require("../../lib/helpers/ux");
|
|
15
15
|
const _workflow = /*#__PURE__*/ _interopRequireWildcard(require("../../lib/marshal/workflow"));
|
|
16
16
|
function _interopRequireDefault(obj) {
|
|
17
17
|
return obj && obj.__esModule ? obj : {
|
|
@@ -61,7 +61,7 @@ class WorkflowNew extends _baseCommand.default {
|
|
|
61
61
|
async run() {
|
|
62
62
|
const { args , flags } = this.props;
|
|
63
63
|
const { cwd , resourceDir } = this.runContext;
|
|
64
|
-
|
|
64
|
+
_ux.spinner.start("‣ Validating");
|
|
65
65
|
// 1. Ensure we aren't in any existing resource directory already.
|
|
66
66
|
// TODO: In the future, maybe check for the project context and if we are in
|
|
67
67
|
// /workflows directory.
|
|
@@ -84,7 +84,7 @@ class WorkflowNew extends _baseCommand.default {
|
|
|
84
84
|
if (pathExists && !flags.force) {
|
|
85
85
|
return this.error(`Cannot overwrite an existing path at ${newWorkflowDirPath}` + " (use --force to overwrite)");
|
|
86
86
|
}
|
|
87
|
-
|
|
87
|
+
_ux.spinner.stop();
|
|
88
88
|
// 5-A. We are good to generate a new workflow directory.
|
|
89
89
|
const dirContext = {
|
|
90
90
|
type: "workflow",
|
|
@@ -8,12 +8,15 @@ Object.defineProperty(exports, "default", {
|
|
|
8
8
|
});
|
|
9
9
|
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
10
10
|
const _core = require("@oclif/core");
|
|
11
|
-
const _enquirer = /*#__PURE__*/ _interopRequireDefault(require("enquirer"));
|
|
12
11
|
const _baseCommand = /*#__PURE__*/ _interopRequireDefault(require("../../lib/base-command"));
|
|
13
|
-
const
|
|
12
|
+
const _error = require("../../lib/helpers/error");
|
|
13
|
+
const _flag = /*#__PURE__*/ _interopRequireWildcard(require("../../lib/helpers/flag"));
|
|
14
14
|
const _object = require("../../lib/helpers/object");
|
|
15
|
+
const _page = require("../../lib/helpers/page");
|
|
15
16
|
const _request = require("../../lib/helpers/request");
|
|
17
|
+
const _ux = require("../../lib/helpers/ux");
|
|
16
18
|
const _workflow = /*#__PURE__*/ _interopRequireWildcard(require("../../lib/marshal/workflow"));
|
|
19
|
+
const _runContext = require("../../lib/run-context");
|
|
17
20
|
function _interopRequireDefault(obj) {
|
|
18
21
|
return obj && obj.__esModule ? obj : {
|
|
19
22
|
default: obj
|
|
@@ -58,26 +61,25 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
58
61
|
}
|
|
59
62
|
return newObj;
|
|
60
63
|
}
|
|
61
|
-
const promptToConfirm = async ({ key })=>{
|
|
62
|
-
try {
|
|
63
|
-
const { input } = await _enquirer.default.prompt({
|
|
64
|
-
type: "confirm",
|
|
65
|
-
name: "input",
|
|
66
|
-
message: `Create a new workflow directory \`${_nodePath.join(key, "/")}\`?`
|
|
67
|
-
});
|
|
68
|
-
return input;
|
|
69
|
-
} catch (error) {
|
|
70
|
-
console.log(error);
|
|
71
|
-
}
|
|
72
|
-
};
|
|
73
64
|
class WorkflowPull extends _baseCommand.default {
|
|
74
65
|
async run() {
|
|
66
|
+
const { args , flags } = this.props;
|
|
67
|
+
if (flags.all && args.workflowKey) {
|
|
68
|
+
return this.error(`workflowKey arg \`${args.workflowKey}\` cannot also be provided when using --all`);
|
|
69
|
+
}
|
|
70
|
+
return flags.all ? this.pullAllWorkflows() : this.pullOneWorkflow();
|
|
71
|
+
}
|
|
72
|
+
/*
|
|
73
|
+
* Pull one workflow
|
|
74
|
+
*/ async pullOneWorkflow() {
|
|
75
|
+
const { flags } = this.props;
|
|
75
76
|
// 1. Retrieve or build a new workflow directory context.
|
|
76
77
|
const dirContext = await this.getWorkflowDirContext();
|
|
77
78
|
if (dirContext.exists) {
|
|
78
79
|
this.log(`‣ Found \`${dirContext.key}\` at ${dirContext.abspath}`);
|
|
79
80
|
} else {
|
|
80
|
-
const
|
|
81
|
+
const prompt = `Create a new workflow directory \`${dirContext.key}\`?`;
|
|
82
|
+
const input = flags.force || await (0, _ux.promptToConfirm)(prompt);
|
|
81
83
|
if (!input) return;
|
|
82
84
|
}
|
|
83
85
|
// 2. Fetch the workflow with annotations.
|
|
@@ -107,7 +109,7 @@ class WorkflowPull extends _baseCommand.default {
|
|
|
107
109
|
type: "workflow",
|
|
108
110
|
key: workflowKey
|
|
109
111
|
};
|
|
110
|
-
return (0,
|
|
112
|
+
return (0, _runContext.ensureResourceDirForTarget)(resourceDir, target);
|
|
111
113
|
}
|
|
112
114
|
// Not inside any existing workflow directory, which means either create a
|
|
113
115
|
// new worfklow directory in the cwd, or update it if there is one already.
|
|
@@ -124,12 +126,63 @@ class WorkflowPull extends _baseCommand.default {
|
|
|
124
126
|
// Not in any workflow directory, nor a workflow key arg was given so error.
|
|
125
127
|
return this.error("Missing 1 required arg:\nworkflowKey");
|
|
126
128
|
}
|
|
129
|
+
/*
|
|
130
|
+
* Pull all workflows
|
|
131
|
+
*/ async pullAllWorkflows() {
|
|
132
|
+
const { flags } = this.props;
|
|
133
|
+
// TODO: In the future we should default to the knock project config first
|
|
134
|
+
// if present, before defaulting to the cwd.
|
|
135
|
+
const defaultToCwd = {
|
|
136
|
+
abspath: this.runContext.cwd,
|
|
137
|
+
exists: true
|
|
138
|
+
};
|
|
139
|
+
const targetDirCtx = flags["workflows-dir"] || defaultToCwd;
|
|
140
|
+
const prompt = targetDirCtx.exists ? `Pull latest workflows into ${targetDirCtx.abspath}?\n This will overwrite the contents of this directory.` : `Create a new workflows directory at ${targetDirCtx.abspath}?`;
|
|
141
|
+
const input = flags.force || await (0, _ux.promptToConfirm)(prompt);
|
|
142
|
+
if (!input) return;
|
|
143
|
+
// Fetch all workflows then write them to the local file system.
|
|
144
|
+
_ux.spinner.start(`‣ Loading`);
|
|
145
|
+
const workflows = await this.listAllWorkflows();
|
|
146
|
+
await _workflow.writeWorkflowsIndexDir(targetDirCtx, workflows);
|
|
147
|
+
_ux.spinner.stop();
|
|
148
|
+
const action = targetDirCtx.exists ? "updated" : "created";
|
|
149
|
+
this.log(`‣ Successfully ${action} the workflows directory at ${targetDirCtx.abspath}`);
|
|
150
|
+
}
|
|
151
|
+
async listAllWorkflows(pageParams = {}, workflowsFetchedSoFar = []) {
|
|
152
|
+
const props = (0, _object.merge)(this.props, {
|
|
153
|
+
flags: {
|
|
154
|
+
...pageParams,
|
|
155
|
+
annotate: true,
|
|
156
|
+
limit: _page.MAX_PAGINATION_LIMIT
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
const resp = await this.apiV1.listWorkflows(props);
|
|
160
|
+
if (!(0, _request.isSuccessResp)(resp)) {
|
|
161
|
+
const message = (0, _request.formatErrorRespMessage)(resp);
|
|
162
|
+
this.error(new _error.ApiError(message));
|
|
163
|
+
}
|
|
164
|
+
const { entries , page_info: pageInfo } = resp.data;
|
|
165
|
+
const workflows = [
|
|
166
|
+
...workflowsFetchedSoFar,
|
|
167
|
+
...entries
|
|
168
|
+
];
|
|
169
|
+
return pageInfo.after ? this.listAllWorkflows({
|
|
170
|
+
after: pageInfo.after
|
|
171
|
+
}, workflows) : workflows;
|
|
172
|
+
}
|
|
127
173
|
}
|
|
128
174
|
WorkflowPull.flags = {
|
|
129
175
|
environment: _core.Flags.string({
|
|
130
176
|
default: "development"
|
|
131
177
|
}),
|
|
132
|
-
|
|
178
|
+
all: _core.Flags.boolean(),
|
|
179
|
+
"workflows-dir": _flag.dirPath({
|
|
180
|
+
dependsOn: [
|
|
181
|
+
"all"
|
|
182
|
+
]
|
|
183
|
+
}),
|
|
184
|
+
"hide-uncommitted-changes": _core.Flags.boolean(),
|
|
185
|
+
force: _core.Flags.boolean()
|
|
133
186
|
};
|
|
134
187
|
WorkflowPull.args = [
|
|
135
188
|
{
|
|
@@ -10,11 +10,11 @@ const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
|
10
10
|
const _core = require("@oclif/core");
|
|
11
11
|
const _baseCommand = /*#__PURE__*/ _interopRequireDefault(require("../../lib/base-command"));
|
|
12
12
|
const _const = require("../../lib/helpers/const");
|
|
13
|
-
const _dirContext = require("../../lib/helpers/dir-context");
|
|
14
13
|
const _error = require("../../lib/helpers/error");
|
|
15
14
|
const _object = require("../../lib/helpers/object");
|
|
16
15
|
const _request = require("../../lib/helpers/request");
|
|
17
16
|
const _workflow = /*#__PURE__*/ _interopRequireWildcard(require("../../lib/marshal/workflow"));
|
|
17
|
+
const _runContext = require("../../lib/run-context");
|
|
18
18
|
function _interopRequireDefault(obj) {
|
|
19
19
|
return obj && obj.__esModule ? obj : {
|
|
20
20
|
default: obj
|
|
@@ -66,7 +66,7 @@ class WorkflowPush extends _baseCommand.default {
|
|
|
66
66
|
this.log(`‣ Reading \`${dirContext.key}\` at ${dirContext.abspath}`);
|
|
67
67
|
// 2. Read the workflow.json with its template files.
|
|
68
68
|
const [workflow, errors] = await _workflow.readWorkflowDir(dirContext, {
|
|
69
|
-
|
|
69
|
+
withExtractedFiles: true
|
|
70
70
|
});
|
|
71
71
|
if (errors.length > 0) {
|
|
72
72
|
this.error(`Found the following errors in \`${dirContext.key}\` ${_workflow.WORKFLOW_JSON}\n\n` + (0, _error.formatErrors)(errors));
|
|
@@ -97,7 +97,7 @@ class WorkflowPush extends _baseCommand.default {
|
|
|
97
97
|
type: "workflow",
|
|
98
98
|
key: workflowKey
|
|
99
99
|
};
|
|
100
|
-
return (0,
|
|
100
|
+
return (0, _runContext.ensureResourceDirForTarget)(resourceDir, target);
|
|
101
101
|
}
|
|
102
102
|
if (workflowKey) {
|
|
103
103
|
const dirPath = _nodePath.resolve(runCwd, workflowKey);
|
|
@@ -10,11 +10,11 @@ const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
|
10
10
|
const _core = require("@oclif/core");
|
|
11
11
|
const _baseCommand = /*#__PURE__*/ _interopRequireDefault(require("../../lib/base-command"));
|
|
12
12
|
const _const = require("../../lib/helpers/const");
|
|
13
|
-
const _dirContext = require("../../lib/helpers/dir-context");
|
|
14
13
|
const _error = require("../../lib/helpers/error");
|
|
15
14
|
const _object = require("../../lib/helpers/object");
|
|
16
15
|
const _request = require("../../lib/helpers/request");
|
|
17
16
|
const _workflow = /*#__PURE__*/ _interopRequireWildcard(require("../../lib/marshal/workflow"));
|
|
17
|
+
const _runContext = require("../../lib/run-context");
|
|
18
18
|
function _interopRequireDefault(obj) {
|
|
19
19
|
return obj && obj.__esModule ? obj : {
|
|
20
20
|
default: obj
|
|
@@ -66,7 +66,7 @@ class WorkflowValidate extends _baseCommand.default {
|
|
|
66
66
|
this.log(`‣ Reading \`${dirContext.key}\` at ${dirContext.abspath}`);
|
|
67
67
|
// 2. Read the workflow.json with its template files.
|
|
68
68
|
const [workflow, errors] = await _workflow.readWorkflowDir(dirContext, {
|
|
69
|
-
|
|
69
|
+
withExtractedFiles: true
|
|
70
70
|
});
|
|
71
71
|
if (errors.length > 0) {
|
|
72
72
|
this.error(`Found the following errors in \`${dirContext.key}\` ${_workflow.WORKFLOW_JSON}\n\n` + (0, _error.formatErrors)(errors));
|
|
@@ -93,7 +93,7 @@ class WorkflowValidate extends _baseCommand.default {
|
|
|
93
93
|
type: "workflow",
|
|
94
94
|
key: workflowKey
|
|
95
95
|
};
|
|
96
|
-
return (0,
|
|
96
|
+
return (0, _runContext.ensureResourceDirForTarget)(resourceDir, target);
|
|
97
97
|
}
|
|
98
98
|
if (workflowKey) {
|
|
99
99
|
const dirPath = _nodePath.resolve(runCwd, workflowKey);
|
package/dist/lib/api-v1.js
CHANGED
|
@@ -17,10 +17,10 @@ function _interopRequireDefault(obj) {
|
|
|
17
17
|
const DEFAULT_ORIGIN = "https://control.knock.app";
|
|
18
18
|
const API_VERSION = "v1";
|
|
19
19
|
class ApiV1 {
|
|
20
|
-
// By resources: Workflows
|
|
21
20
|
async ping() {
|
|
22
21
|
return this.get("/ping");
|
|
23
22
|
}
|
|
23
|
+
// By resources: Workflows
|
|
24
24
|
async listWorkflows({ flags }) {
|
|
25
25
|
const params = (0, _object.prune)({
|
|
26
26
|
environment: flags.environment,
|
|
@@ -80,7 +80,7 @@ class ApiV1 {
|
|
|
80
80
|
async commitAllChanges({ flags }) {
|
|
81
81
|
const params = (0, _object.prune)({
|
|
82
82
|
environment: flags.environment,
|
|
83
|
-
commit_message: flags
|
|
83
|
+
commit_message: flags["commit-message"]
|
|
84
84
|
});
|
|
85
85
|
return this.put(`/commits`, {}, {
|
|
86
86
|
params
|
|
@@ -94,6 +94,42 @@ class ApiV1 {
|
|
|
94
94
|
params
|
|
95
95
|
});
|
|
96
96
|
}
|
|
97
|
+
// By resources: Translations
|
|
98
|
+
async listTranslations({ flags }) {
|
|
99
|
+
const params = (0, _object.prune)({
|
|
100
|
+
environment: flags.environment,
|
|
101
|
+
hide_uncommitted_changes: flags["hide-uncommitted-changes"],
|
|
102
|
+
...(0, _page.toPageParams)(flags)
|
|
103
|
+
});
|
|
104
|
+
return this.get("/translations", {
|
|
105
|
+
params
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
async upsertTranslation({ flags }, translation) {
|
|
109
|
+
const params = (0, _object.prune)({
|
|
110
|
+
environment: flags.environment,
|
|
111
|
+
commit: flags.commit,
|
|
112
|
+
commit_message: flags["commit-message"],
|
|
113
|
+
namespace: translation.namespace
|
|
114
|
+
});
|
|
115
|
+
return this.put(`/translations/${translation.locale_code}`, {
|
|
116
|
+
translation
|
|
117
|
+
}, {
|
|
118
|
+
params
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
async validateTranslation({ flags }, translation) {
|
|
122
|
+
const params = (0, _object.prune)({
|
|
123
|
+
environment: flags.environment,
|
|
124
|
+
namespace: translation.namespace
|
|
125
|
+
});
|
|
126
|
+
const data = {
|
|
127
|
+
translation
|
|
128
|
+
};
|
|
129
|
+
return this.put(`/translations/${translation.locale_code}/validate`, data, {
|
|
130
|
+
params
|
|
131
|
+
});
|
|
132
|
+
}
|
|
97
133
|
// By methods:
|
|
98
134
|
async get(subpath, config) {
|
|
99
135
|
return this.client.get(`/${API_VERSION}` + subpath, config);
|
package/dist/lib/base-command.js
CHANGED
|
@@ -8,7 +8,7 @@ Object.defineProperty(exports, "default", {
|
|
|
8
8
|
});
|
|
9
9
|
const _core = require("@oclif/core");
|
|
10
10
|
const _apiV1 = /*#__PURE__*/ _interopRequireDefault(require("./api-v1"));
|
|
11
|
-
const _runContext = /*#__PURE__*/ _interopRequireWildcard(require("./run-context"));
|
|
11
|
+
const _runContext = /*#__PURE__*/ _interopRequireWildcard(require("./run-context/index"));
|
|
12
12
|
const _userConfig = /*#__PURE__*/ _interopRequireDefault(require("./user-config"));
|
|
13
13
|
function _interopRequireDefault(obj) {
|
|
14
14
|
return obj && obj.__esModule ? obj : {
|
|
@@ -64,7 +64,7 @@ class BaseCommand extends _core.Command {
|
|
|
64
64
|
// 3. Instantiate a knock api client.
|
|
65
65
|
this.apiV1 = new _apiV1.default(this.props.flags, this.config);
|
|
66
66
|
// 4. Load the run context of the invoked command.
|
|
67
|
-
this.runContext = await _runContext.load();
|
|
67
|
+
this.runContext = await _runContext.load(this.id);
|
|
68
68
|
}
|
|
69
69
|
}
|
|
70
70
|
// Global flags are inherited by any command that extends BaseCommand.
|
|
@@ -13,6 +13,8 @@ _export(exports, {
|
|
|
13
13
|
JsonSyntaxError: ()=>JsonSyntaxError,
|
|
14
14
|
JsonDataError: ()=>JsonDataError,
|
|
15
15
|
LiquidParseError: ()=>LiquidParseError,
|
|
16
|
+
SourceError: ()=>SourceError,
|
|
17
|
+
formatError: ()=>formatError,
|
|
16
18
|
formatErrors: ()=>formatErrors
|
|
17
19
|
});
|
|
18
20
|
const _string = require("./string");
|
|
@@ -44,12 +46,13 @@ class LiquidParseError extends CustomError {
|
|
|
44
46
|
this.context = context;
|
|
45
47
|
}
|
|
46
48
|
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
49
|
+
class SourceError extends CustomError {
|
|
50
|
+
constructor(message, source, tag){
|
|
51
|
+
super(tag ? `${tag}: ${message}` : message);
|
|
52
|
+
this.source = source;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const formatError = (error)=>{
|
|
53
56
|
switch(true){
|
|
54
57
|
case error instanceof ApiError:
|
|
55
58
|
case error instanceof JsonSyntaxError:
|
|
@@ -64,12 +67,17 @@ class LiquidParseError extends CustomError {
|
|
|
64
67
|
const e = error;
|
|
65
68
|
return `${e.name}: ${e.message + "\n" + e.context}`;
|
|
66
69
|
}
|
|
70
|
+
case error instanceof SourceError:
|
|
71
|
+
{
|
|
72
|
+
const e = error;
|
|
73
|
+
return `${e.source}\n` + (0, _string.indentString)(e.message, 2);
|
|
74
|
+
}
|
|
67
75
|
default:
|
|
68
76
|
throw new Error(`Unhandled error type: ${error}`);
|
|
69
77
|
}
|
|
70
78
|
};
|
|
71
79
|
const formatErrors = (errors, opts = {})=>{
|
|
72
|
-
const { joinBy ="\n\n" , indentBy =0 } = opts;
|
|
80
|
+
const { prependBy ="" , joinBy ="\n\n" , indentBy =0 } = opts;
|
|
73
81
|
const formatted = errors.map((e)=>formatError(e)).join(joinBy);
|
|
74
|
-
return (0, _string.indentString)(formatted, indentBy);
|
|
82
|
+
return (0, _string.indentString)(prependBy + formatted, indentBy);
|
|
75
83
|
};
|
package/dist/lib/helpers/flag.js
CHANGED
|
@@ -2,11 +2,58 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", {
|
|
3
3
|
value: true
|
|
4
4
|
});
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
5
|
+
function _export(target, all) {
|
|
6
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
get: all[name]
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
_export(exports, {
|
|
12
|
+
booleanStr: ()=>booleanStr,
|
|
13
|
+
dirPath: ()=>dirPath
|
|
8
14
|
});
|
|
15
|
+
const _nodePath = /*#__PURE__*/ _interopRequireWildcard(require("node:path"));
|
|
9
16
|
const _core = require("@oclif/core");
|
|
17
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
18
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
19
|
+
if (typeof WeakMap !== "function") return null;
|
|
20
|
+
var cacheBabelInterop = new WeakMap();
|
|
21
|
+
var cacheNodeInterop = new WeakMap();
|
|
22
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
23
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
24
|
+
})(nodeInterop);
|
|
25
|
+
}
|
|
26
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
27
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
28
|
+
return obj;
|
|
29
|
+
}
|
|
30
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
31
|
+
return {
|
|
32
|
+
default: obj
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
36
|
+
if (cache && cache.has(obj)) {
|
|
37
|
+
return cache.get(obj);
|
|
38
|
+
}
|
|
39
|
+
var newObj = {};
|
|
40
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
41
|
+
for(var key in obj){
|
|
42
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
43
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
44
|
+
if (desc && (desc.get || desc.set)) {
|
|
45
|
+
Object.defineProperty(newObj, key, desc);
|
|
46
|
+
} else {
|
|
47
|
+
newObj[key] = obj[key];
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
newObj.default = obj;
|
|
52
|
+
if (cache) {
|
|
53
|
+
cache.set(obj, newObj);
|
|
54
|
+
}
|
|
55
|
+
return newObj;
|
|
56
|
+
}
|
|
10
57
|
const booleanStr = _core.Flags.custom({
|
|
11
58
|
options: [
|
|
12
59
|
"true",
|
|
@@ -14,3 +61,16 @@ const booleanStr = _core.Flags.custom({
|
|
|
14
61
|
],
|
|
15
62
|
parse: async (input)=>input === "true"
|
|
16
63
|
});
|
|
64
|
+
const dirPath = _core.Flags.custom({
|
|
65
|
+
parse: async (input)=>{
|
|
66
|
+
const abspath = _nodePath.isAbsolute(input) ? input : _nodePath.resolve(process.cwd(), input);
|
|
67
|
+
const exists = await _fsExtra.pathExists(abspath);
|
|
68
|
+
if (exists && !(await _fsExtra.lstat(abspath)).isDirectory()) {
|
|
69
|
+
throw new Error(`${input} exists but is not a directory`);
|
|
70
|
+
}
|
|
71
|
+
return {
|
|
72
|
+
abspath,
|
|
73
|
+
exists
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
});
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
Object.defineProperty(exports, "isDirectory", {
|
|
6
|
+
enumerable: true,
|
|
7
|
+
get: ()=>isDirectory
|
|
8
|
+
});
|
|
9
|
+
const _fsExtra = /*#__PURE__*/ _interopRequireWildcard(require("fs-extra"));
|
|
10
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
11
|
+
if (typeof WeakMap !== "function") return null;
|
|
12
|
+
var cacheBabelInterop = new WeakMap();
|
|
13
|
+
var cacheNodeInterop = new WeakMap();
|
|
14
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
15
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
16
|
+
})(nodeInterop);
|
|
17
|
+
}
|
|
18
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
19
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
20
|
+
return obj;
|
|
21
|
+
}
|
|
22
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
23
|
+
return {
|
|
24
|
+
default: obj
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
28
|
+
if (cache && cache.has(obj)) {
|
|
29
|
+
return cache.get(obj);
|
|
30
|
+
}
|
|
31
|
+
var newObj = {};
|
|
32
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
33
|
+
for(var key in obj){
|
|
34
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
35
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
36
|
+
if (desc && (desc.get || desc.set)) {
|
|
37
|
+
Object.defineProperty(newObj, key, desc);
|
|
38
|
+
} else {
|
|
39
|
+
newObj[key] = obj[key];
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
newObj.default = obj;
|
|
44
|
+
if (cache) {
|
|
45
|
+
cache.set(obj, newObj);
|
|
46
|
+
}
|
|
47
|
+
return newObj;
|
|
48
|
+
}
|
|
49
|
+
const isDirectory = async (abspath)=>{
|
|
50
|
+
const exists = await _fsExtra.pathExists(abspath);
|
|
51
|
+
return exists && (await _fsExtra.lstat(abspath)).isDirectory();
|
|
52
|
+
};
|
package/dist/lib/helpers/json.js
CHANGED
|
@@ -10,6 +10,7 @@ function _export(target, all) {
|
|
|
10
10
|
}
|
|
11
11
|
_export(exports, {
|
|
12
12
|
DOUBLE_SPACES: ()=>DOUBLE_SPACES,
|
|
13
|
+
parseJson: ()=>parseJson,
|
|
13
14
|
readJson: ()=>readJson
|
|
14
15
|
});
|
|
15
16
|
const _jsonlint = /*#__PURE__*/ _interopRequireWildcard(require("@prantlf/jsonlint"));
|
|
@@ -55,8 +56,7 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
55
56
|
return newObj;
|
|
56
57
|
}
|
|
57
58
|
const DOUBLE_SPACES = " ";
|
|
58
|
-
const
|
|
59
|
-
const json = await _fsExtra.readFile(filePath, "utf8");
|
|
59
|
+
const parseJson = (json)=>{
|
|
60
60
|
let payload;
|
|
61
61
|
const errors = [];
|
|
62
62
|
try {
|
|
@@ -71,3 +71,7 @@ const readJson = async (filePath)=>{
|
|
|
71
71
|
errors
|
|
72
72
|
];
|
|
73
73
|
};
|
|
74
|
+
const readJson = async (filePath)=>{
|
|
75
|
+
const json = await _fsExtra.readFile(filePath, "utf8");
|
|
76
|
+
return parseJson(json);
|
|
77
|
+
};
|
|
@@ -11,6 +11,8 @@ function _export(target, all) {
|
|
|
11
11
|
_export(exports, {
|
|
12
12
|
split: ()=>split,
|
|
13
13
|
omitDeep: ()=>omitDeep,
|
|
14
|
+
mapValuesDeep: ()=>mapValuesDeep,
|
|
15
|
+
getLastFound: ()=>getLastFound,
|
|
14
16
|
prune: ()=>prune,
|
|
15
17
|
merge: ()=>merge,
|
|
16
18
|
ObjPath: ()=>ObjPath
|
|
@@ -40,6 +42,47 @@ const omitDeep = (input, paths)=>{
|
|
|
40
42
|
};
|
|
41
43
|
return Array.isArray(input) ? input.map((i)=>omitDeepOnOwnProps(i)) : omitDeepOnOwnProps(input);
|
|
42
44
|
};
|
|
45
|
+
const mapValuesDeep = (input, fn, parts = [])=>{
|
|
46
|
+
if ((0, _lodash.isPlainObject)(input)) {
|
|
47
|
+
const entries = Object.entries(input).map(([k, v])=>{
|
|
48
|
+
return Array.isArray(v) || (0, _lodash.isPlainObject)(v) ? [
|
|
49
|
+
k,
|
|
50
|
+
mapValuesDeep(v, fn, [
|
|
51
|
+
...parts,
|
|
52
|
+
k
|
|
53
|
+
])
|
|
54
|
+
] : [
|
|
55
|
+
k,
|
|
56
|
+
fn(v, k, [
|
|
57
|
+
...parts,
|
|
58
|
+
k
|
|
59
|
+
])
|
|
60
|
+
];
|
|
61
|
+
});
|
|
62
|
+
return Object.fromEntries(entries);
|
|
63
|
+
}
|
|
64
|
+
if (Array.isArray(input)) {
|
|
65
|
+
return input.map((item, idx)=>mapValuesDeep(item, fn, [
|
|
66
|
+
...parts,
|
|
67
|
+
idx
|
|
68
|
+
]));
|
|
69
|
+
}
|
|
70
|
+
return input;
|
|
71
|
+
};
|
|
72
|
+
const getLastFound = (obj, parts)=>{
|
|
73
|
+
let current = obj;
|
|
74
|
+
let found;
|
|
75
|
+
for (const part of parts){
|
|
76
|
+
const lookupable = Array.isArray(current) || (0, _lodash.isPlainObject)(current);
|
|
77
|
+
if (lookupable && part in current) {
|
|
78
|
+
found = current[part];
|
|
79
|
+
current = found;
|
|
80
|
+
continue;
|
|
81
|
+
}
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
return found;
|
|
85
|
+
};
|
|
43
86
|
const prune = (obj)=>(0, _lodash.omitBy)(obj, _lodash.isNil);
|
|
44
87
|
const merge = (obj, ...sources)=>(0, _lodash.merge)({}, obj, ...sources);
|
|
45
88
|
class ObjPath {
|
package/dist/lib/helpers/page.js
CHANGED
|
@@ -10,6 +10,7 @@ function _export(target, all) {
|
|
|
10
10
|
}
|
|
11
11
|
_export(exports, {
|
|
12
12
|
PageAction: ()=>PageAction,
|
|
13
|
+
MAX_PAGINATION_LIMIT: ()=>MAX_PAGINATION_LIMIT,
|
|
13
14
|
pageFlags: ()=>pageFlags,
|
|
14
15
|
toPageParams: ()=>toPageParams,
|
|
15
16
|
maybePromptPageAction: ()=>maybePromptPageAction,
|
|
@@ -25,11 +26,12 @@ function _interopRequireDefault(obj) {
|
|
|
25
26
|
default: obj
|
|
26
27
|
};
|
|
27
28
|
}
|
|
29
|
+
const MAX_PAGINATION_LIMIT = 100;
|
|
28
30
|
const pageFlags = {
|
|
29
31
|
after: _core.Flags.string(),
|
|
30
32
|
before: _core.Flags.string(),
|
|
31
33
|
limit: _core.Flags.integer({
|
|
32
|
-
max:
|
|
34
|
+
max: MAX_PAGINATION_LIMIT
|
|
33
35
|
})
|
|
34
36
|
};
|
|
35
37
|
const toPageParams = (flags)=>{
|