@openfn/cli 1.23.0 → 1.24.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +45 -9
- package/dist/process/runner.js +999 -804
- package/package.json +6 -6
package/dist/process/runner.js
CHANGED
|
@@ -21,13 +21,13 @@ var urlMap = {
|
|
|
21
21
|
["local"]: LOCAL_URL
|
|
22
22
|
};
|
|
23
23
|
var DEFAULT_ENV = "staging";
|
|
24
|
-
var getURL = (
|
|
25
|
-
if (
|
|
26
|
-
if (
|
|
27
|
-
return urlMap[
|
|
24
|
+
var getURL = (options8) => {
|
|
25
|
+
if (options8.apolloUrl) {
|
|
26
|
+
if (options8.apolloUrl in urlMap) {
|
|
27
|
+
return urlMap[options8.apolloUrl];
|
|
28
28
|
}
|
|
29
|
-
if (
|
|
30
|
-
return
|
|
29
|
+
if (options8.apolloUrl.startsWith("http")) {
|
|
30
|
+
return options8.apolloUrl;
|
|
31
31
|
}
|
|
32
32
|
throw new Error(`Unrecognised apollo URL`);
|
|
33
33
|
}
|
|
@@ -52,14 +52,14 @@ var outputFiles = (files, logger) => {
|
|
|
52
52
|
};
|
|
53
53
|
|
|
54
54
|
// src/apollo/handler.ts
|
|
55
|
-
var apolloHandler = async (
|
|
56
|
-
logger.always(`Calling Apollo service: ${
|
|
57
|
-
const json = await loadPayload(logger,
|
|
58
|
-
const url2 = getURL(
|
|
55
|
+
var apolloHandler = async (options8, logger) => {
|
|
56
|
+
logger.always(`Calling Apollo service: ${options8.service}`);
|
|
57
|
+
const json = await loadPayload(logger, options8.payload);
|
|
58
|
+
const url2 = getURL(options8);
|
|
59
59
|
logger.success(`Using apollo server at`, url2);
|
|
60
|
-
const result = await callApollo(url2,
|
|
60
|
+
const result = await callApollo(url2, options8.service, json, logger);
|
|
61
61
|
if (result) {
|
|
62
|
-
await serializeOutput(
|
|
62
|
+
await serializeOutput(options8, result, logger);
|
|
63
63
|
} else {
|
|
64
64
|
logger.warn("No output returned from Apollo");
|
|
65
65
|
}
|
|
@@ -79,15 +79,15 @@ var write = async (basePath, filePath, content, logger) => {
|
|
|
79
79
|
await writeFile(dest, content);
|
|
80
80
|
logger.success(`Wrote content to ${dest}`);
|
|
81
81
|
};
|
|
82
|
-
var serializeOutput = async (
|
|
83
|
-
if (
|
|
84
|
-
if (result.files && !
|
|
82
|
+
var serializeOutput = async (options8, result, logger) => {
|
|
83
|
+
if (options8.outputPath) {
|
|
84
|
+
if (result.files && !options8.outputPath.endsWith(".json")) {
|
|
85
85
|
for (const p in result.files) {
|
|
86
|
-
await write(
|
|
86
|
+
await write(options8.outputPath, p, result.files[p], logger);
|
|
87
87
|
}
|
|
88
88
|
} else {
|
|
89
89
|
await write(
|
|
90
|
-
|
|
90
|
+
options8.outputPath,
|
|
91
91
|
"",
|
|
92
92
|
JSON.stringify(result, null, 2),
|
|
93
93
|
logger
|
|
@@ -169,17 +169,17 @@ var namespaces = {
|
|
|
169
169
|
[COMPILER]: "CMP",
|
|
170
170
|
[JOB]: "JOB"
|
|
171
171
|
};
|
|
172
|
-
var createLogger2 = (name = "",
|
|
173
|
-
const logOptions =
|
|
172
|
+
var createLogger2 = (name = "", options8) => {
|
|
173
|
+
const logOptions = options8.log || {};
|
|
174
174
|
let json = false;
|
|
175
175
|
let level = logOptions[name] || logOptions.default || "default";
|
|
176
|
-
if (
|
|
176
|
+
if (options8.logJson) {
|
|
177
177
|
json = true;
|
|
178
178
|
}
|
|
179
179
|
return actualCreateLogger(namespaces[name] || name, {
|
|
180
180
|
level,
|
|
181
181
|
json,
|
|
182
|
-
sanitize:
|
|
182
|
+
sanitize: options8.sanitize || "none",
|
|
183
183
|
...logOptions
|
|
184
184
|
});
|
|
185
185
|
};
|
|
@@ -190,8 +190,8 @@ var createNullLogger = () => createLogger2(void 0, { log: { default: "none" } })
|
|
|
190
190
|
import fs from "node:fs";
|
|
191
191
|
import path2 from "node:path";
|
|
192
192
|
import { rmdir } from "node:fs/promises";
|
|
193
|
-
var getCachePath = async (plan,
|
|
194
|
-
const { baseDir } =
|
|
193
|
+
var getCachePath = async (plan, options8, stepId) => {
|
|
194
|
+
const { baseDir } = options8;
|
|
195
195
|
const { name } = plan.workflow;
|
|
196
196
|
const basePath = `${baseDir}/.cli-cache/${name}`;
|
|
197
197
|
if (stepId) {
|
|
@@ -199,10 +199,10 @@ var getCachePath = async (plan, options7, stepId) => {
|
|
|
199
199
|
}
|
|
200
200
|
return path2.resolve(basePath);
|
|
201
201
|
};
|
|
202
|
-
var ensureGitIgnore = (
|
|
203
|
-
if (!
|
|
202
|
+
var ensureGitIgnore = (options8) => {
|
|
203
|
+
if (!options8._hasGitIgnore) {
|
|
204
204
|
const ignorePath = path2.resolve(
|
|
205
|
-
|
|
205
|
+
options8.baseDir,
|
|
206
206
|
".cli-cache",
|
|
207
207
|
".gitignore"
|
|
208
208
|
);
|
|
@@ -212,19 +212,19 @@ var ensureGitIgnore = (options7) => {
|
|
|
212
212
|
fs.writeFileSync(ignorePath, "*");
|
|
213
213
|
}
|
|
214
214
|
}
|
|
215
|
-
|
|
215
|
+
options8._hasGitIgnore = true;
|
|
216
216
|
};
|
|
217
|
-
var saveToCache = async (plan, stepId, output,
|
|
218
|
-
if (
|
|
219
|
-
const cachePath = await getCachePath(plan,
|
|
217
|
+
var saveToCache = async (plan, stepId, output, options8, logger) => {
|
|
218
|
+
if (options8.cacheSteps) {
|
|
219
|
+
const cachePath = await getCachePath(plan, options8, stepId);
|
|
220
220
|
fs.mkdirSync(path2.dirname(cachePath), { recursive: true });
|
|
221
|
-
ensureGitIgnore(
|
|
221
|
+
ensureGitIgnore(options8);
|
|
222
222
|
logger.info(`Writing ${stepId} output to ${cachePath}`);
|
|
223
223
|
fs.writeFileSync(cachePath, JSON.stringify(output));
|
|
224
224
|
}
|
|
225
225
|
};
|
|
226
|
-
var clearCache = async (plan,
|
|
227
|
-
const cacheDir = await getCachePath(plan,
|
|
226
|
+
var clearCache = async (plan, options8, logger) => {
|
|
227
|
+
const cacheDir = await getCachePath(plan, options8);
|
|
228
228
|
try {
|
|
229
229
|
await rmdir(cacheDir, { recursive: true });
|
|
230
230
|
logger.info(`Cleared cache at ${cacheDir}`);
|
|
@@ -294,7 +294,7 @@ function parseAdaptors(plan) {
|
|
|
294
294
|
// src/execute/serialize-output.ts
|
|
295
295
|
import { mkdir as mkdir2, writeFile as writeFile2 } from "node:fs/promises";
|
|
296
296
|
import { dirname } from "node:path";
|
|
297
|
-
var serializeOutput2 = async (
|
|
297
|
+
var serializeOutput2 = async (options8, result, logger) => {
|
|
298
298
|
let output = result;
|
|
299
299
|
if (output && (output.configuration || output.data)) {
|
|
300
300
|
const { configuration, ...rest } = result;
|
|
@@ -305,14 +305,14 @@ var serializeOutput2 = async (options7, result, logger) => {
|
|
|
305
305
|
} else {
|
|
306
306
|
output = JSON.stringify(output, void 0, 2);
|
|
307
307
|
}
|
|
308
|
-
if (
|
|
308
|
+
if (options8.outputStdout) {
|
|
309
309
|
logger.success(`Result: `);
|
|
310
310
|
logger.always(output);
|
|
311
|
-
} else if (
|
|
312
|
-
await mkdir2(dirname(
|
|
313
|
-
logger.debug(`Writing output to ${
|
|
314
|
-
await writeFile2(
|
|
315
|
-
logger.success(`State written to ${
|
|
311
|
+
} else if (options8.outputPath) {
|
|
312
|
+
await mkdir2(dirname(options8.outputPath), { recursive: true });
|
|
313
|
+
logger.debug(`Writing output to ${options8.outputPath}`);
|
|
314
|
+
await writeFile2(options8.outputPath, output);
|
|
315
|
+
logger.success(`State written to ${options8.outputPath}`);
|
|
316
316
|
}
|
|
317
317
|
return output;
|
|
318
318
|
};
|
|
@@ -423,16 +423,16 @@ var removePackage = async (packageSpecifier, repoDir, logger) => {
|
|
|
423
423
|
logger.warn(`Failed to remove ${aliasedName}: ${error.message}`);
|
|
424
424
|
}
|
|
425
425
|
};
|
|
426
|
-
var clean = async (
|
|
427
|
-
if (
|
|
426
|
+
var clean = async (options8, logger) => {
|
|
427
|
+
if (options8.repoDir) {
|
|
428
428
|
const doIt = await logger.confirm(
|
|
429
|
-
`This will remove everything at ${
|
|
430
|
-
|
|
429
|
+
`This will remove everything at ${options8.repoDir}. Do you wish to proceed?`,
|
|
430
|
+
options8.force
|
|
431
431
|
);
|
|
432
432
|
if (doIt) {
|
|
433
433
|
return new Promise((resolve) => {
|
|
434
|
-
logger.info(`Cleaning repo at ${
|
|
435
|
-
exec(`npm exec rimraf ${
|
|
434
|
+
logger.info(`Cleaning repo at ${options8.repoDir} `);
|
|
435
|
+
exec(`npm exec rimraf ${options8.repoDir}`, () => {
|
|
436
436
|
logger.success("Repo cleaned");
|
|
437
437
|
resolve();
|
|
438
438
|
});
|
|
@@ -443,12 +443,12 @@ var clean = async (options7, logger) => {
|
|
|
443
443
|
logger.error("No repoDir path detected");
|
|
444
444
|
}
|
|
445
445
|
};
|
|
446
|
-
var pwd = async (
|
|
446
|
+
var pwd = async (options8, logger) => {
|
|
447
447
|
logger.info(`OPENFN_REPO_DIR is set to ${process.env.OPENFN_REPO_DIR}`);
|
|
448
|
-
logger.success(`Repo working directory is: ${
|
|
448
|
+
logger.success(`Repo working directory is: ${options8.repoDir}`);
|
|
449
449
|
};
|
|
450
|
-
var getDependencyList = async (
|
|
451
|
-
const pkg = await loadRepoPkg(
|
|
450
|
+
var getDependencyList = async (options8, _logger) => {
|
|
451
|
+
const pkg = await loadRepoPkg(options8.repoDir);
|
|
452
452
|
const result = {};
|
|
453
453
|
if (pkg) {
|
|
454
454
|
Object.keys(pkg.dependencies).forEach((key) => {
|
|
@@ -461,9 +461,9 @@ var getDependencyList = async (options7, _logger) => {
|
|
|
461
461
|
}
|
|
462
462
|
return result;
|
|
463
463
|
};
|
|
464
|
-
var list = async (
|
|
465
|
-
const tree = await getDependencyList(
|
|
466
|
-
await pwd(
|
|
464
|
+
var list = async (options8, logger) => {
|
|
465
|
+
const tree = await getDependencyList(options8, logger);
|
|
466
|
+
await pwd(options8, logger);
|
|
467
467
|
const output = {};
|
|
468
468
|
Object.keys(tree).forEach((key) => {
|
|
469
469
|
const versions = tree[key];
|
|
@@ -595,7 +595,7 @@ var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
|
595
595
|
return null;
|
|
596
596
|
};
|
|
597
597
|
var loadTransformOptions = async (opts, log2) => {
|
|
598
|
-
const
|
|
598
|
+
const options8 = {
|
|
599
599
|
logger: log2 || logger_default(COMPILER, opts),
|
|
600
600
|
trace: opts.trace
|
|
601
601
|
};
|
|
@@ -623,12 +623,12 @@ var loadTransformOptions = async (opts, log2) => {
|
|
|
623
623
|
exportAll: true
|
|
624
624
|
});
|
|
625
625
|
}
|
|
626
|
-
|
|
626
|
+
options8["add-imports"] = {
|
|
627
627
|
ignore: opts.ignoreImports,
|
|
628
628
|
adaptors: adaptorsConfig
|
|
629
629
|
};
|
|
630
630
|
}
|
|
631
|
-
return
|
|
631
|
+
return options8;
|
|
632
632
|
};
|
|
633
633
|
|
|
634
634
|
// src/util/load-state.ts
|
|
@@ -721,19 +721,19 @@ var load_state_default = async (plan, opts, log2, start) => {
|
|
|
721
721
|
};
|
|
722
722
|
|
|
723
723
|
// src/util/validate-adaptors.ts
|
|
724
|
-
var validateAdaptors = async (
|
|
725
|
-
if (
|
|
724
|
+
var validateAdaptors = async (options8, logger) => {
|
|
725
|
+
if (options8.skipAdaptorValidation) {
|
|
726
726
|
return;
|
|
727
727
|
}
|
|
728
|
-
const hasDeclaredAdaptors =
|
|
729
|
-
if (!
|
|
728
|
+
const hasDeclaredAdaptors = options8.adaptors && options8.adaptors.length > 0;
|
|
729
|
+
if (!options8.expressionPath && hasDeclaredAdaptors) {
|
|
730
730
|
logger.error("ERROR: adaptor and workflow provided");
|
|
731
731
|
logger.error(
|
|
732
732
|
"This is probably not what you meant to do. A workflow should declare an adaptor for each job."
|
|
733
733
|
);
|
|
734
734
|
throw new Error("adaptor and workflow provided");
|
|
735
735
|
}
|
|
736
|
-
if (
|
|
736
|
+
if (options8.expressionPath && !hasDeclaredAdaptors) {
|
|
737
737
|
logger.warn("WARNING: No adaptor provided!");
|
|
738
738
|
logger.warn(
|
|
739
739
|
"This job will probably fail. Pass an adaptor with the -a flag, eg:"
|
|
@@ -835,17 +835,17 @@ var resolve_path_default = (path17, root) => {
|
|
|
835
835
|
};
|
|
836
836
|
|
|
837
837
|
// src/util/load-plan.ts
|
|
838
|
-
var loadPlan = async (
|
|
839
|
-
const { workflowPath, planPath, expressionPath, workflowName } =
|
|
838
|
+
var loadPlan = async (options8, logger) => {
|
|
839
|
+
const { workflowPath, planPath, expressionPath, workflowName } = options8;
|
|
840
840
|
let workflowObj;
|
|
841
|
-
if (workflowName ||
|
|
841
|
+
if (workflowName || options8.workflow) {
|
|
842
842
|
logger.debug(
|
|
843
843
|
"Loading workflow from active project in workspace at ",
|
|
844
|
-
|
|
844
|
+
options8.workspace
|
|
845
845
|
);
|
|
846
|
-
const workspace2 = new Workspace(
|
|
846
|
+
const workspace2 = new Workspace(options8.workspace);
|
|
847
847
|
const proj = await workspace2.getCheckedOutProject();
|
|
848
|
-
const name = workflowName ||
|
|
848
|
+
const name = workflowName || options8.workflow;
|
|
849
849
|
const workflow2 = proj?.getWorkflow(name);
|
|
850
850
|
if (!workflow2) {
|
|
851
851
|
const e = new Error(`Could not find Workflow "${name}"`);
|
|
@@ -855,12 +855,12 @@ var loadPlan = async (options7, logger) => {
|
|
|
855
855
|
workflowObj = {
|
|
856
856
|
workflow: workflow2.toJSON()
|
|
857
857
|
};
|
|
858
|
-
|
|
859
|
-
|
|
858
|
+
options8.credentials ??= workspace2.getConfig().credentials;
|
|
859
|
+
options8.collectionsEndpoint ??= proj.openfn?.endpoint;
|
|
860
860
|
}
|
|
861
|
-
if (
|
|
862
|
-
const content = await fs3.readFile(path4.resolve(
|
|
863
|
-
|
|
861
|
+
if (options8.path && /ya?ml$/.test(options8.path)) {
|
|
862
|
+
const content = await fs3.readFile(path4.resolve(options8.path), "utf-8");
|
|
863
|
+
options8.baseDir = dirname2(options8.path);
|
|
864
864
|
workflowObj = yamlToJson(content);
|
|
865
865
|
const { options: o, ...rest } = workflowObj;
|
|
866
866
|
if (!workflowObj.workflow && workflowObj.options) {
|
|
@@ -868,25 +868,25 @@ var loadPlan = async (options7, logger) => {
|
|
|
868
868
|
}
|
|
869
869
|
}
|
|
870
870
|
if (!workflowObj && expressionPath) {
|
|
871
|
-
return loadExpression(
|
|
871
|
+
return loadExpression(options8, logger);
|
|
872
872
|
}
|
|
873
873
|
const jsonPath = planPath || workflowPath;
|
|
874
|
-
if (jsonPath && !
|
|
875
|
-
|
|
874
|
+
if (jsonPath && !options8.baseDir) {
|
|
875
|
+
options8.baseDir = path4.dirname(jsonPath);
|
|
876
876
|
}
|
|
877
877
|
workflowObj = workflowObj ?? await loadJson(jsonPath, logger);
|
|
878
878
|
const defaultName = workflowObj.name || path4.parse(jsonPath ?? "").name;
|
|
879
879
|
if (workflowObj.jobs) {
|
|
880
|
-
return loadOldWorkflow(workflowObj,
|
|
880
|
+
return loadOldWorkflow(workflowObj, options8, logger, defaultName);
|
|
881
881
|
} else if (workflowObj.workflow) {
|
|
882
882
|
return loadXPlan(
|
|
883
883
|
workflowObj,
|
|
884
|
-
Object.assign({}, workflowObj.options,
|
|
884
|
+
Object.assign({}, workflowObj.options, options8),
|
|
885
885
|
logger,
|
|
886
886
|
defaultName
|
|
887
887
|
);
|
|
888
888
|
} else {
|
|
889
|
-
return loadXPlan({ workflow: workflowObj },
|
|
889
|
+
return loadXPlan({ workflow: workflowObj }, options8, logger, defaultName);
|
|
890
890
|
}
|
|
891
891
|
};
|
|
892
892
|
var load_plan_default = loadPlan;
|
|
@@ -923,8 +923,8 @@ var maybeAssign = (a, b, keys) => {
|
|
|
923
923
|
}
|
|
924
924
|
});
|
|
925
925
|
};
|
|
926
|
-
var loadExpression = async (
|
|
927
|
-
const expressionPath =
|
|
926
|
+
var loadExpression = async (options8, logger) => {
|
|
927
|
+
const expressionPath = options8.expressionPath;
|
|
928
928
|
logger.debug(`Loading expression from ${expressionPath}`);
|
|
929
929
|
try {
|
|
930
930
|
const expression = await fs3.readFile(expressionPath, "utf8");
|
|
@@ -932,19 +932,19 @@ var loadExpression = async (options7, logger) => {
|
|
|
932
932
|
const step = {
|
|
933
933
|
expression,
|
|
934
934
|
// The adaptor should have been expanded nicely already, so we don't need intervene here
|
|
935
|
-
adaptors:
|
|
935
|
+
adaptors: options8.adaptors ?? []
|
|
936
936
|
};
|
|
937
937
|
const wfOptions = {};
|
|
938
|
-
maybeAssign(
|
|
938
|
+
maybeAssign(options8, wfOptions, ["timeout"]);
|
|
939
939
|
const plan = {
|
|
940
940
|
workflow: {
|
|
941
941
|
name,
|
|
942
942
|
steps: [step],
|
|
943
|
-
globals:
|
|
943
|
+
globals: options8.globals
|
|
944
944
|
},
|
|
945
945
|
options: wfOptions
|
|
946
946
|
};
|
|
947
|
-
return loadXPlan(plan,
|
|
947
|
+
return loadXPlan(plan, options8, logger);
|
|
948
948
|
} catch (e) {
|
|
949
949
|
abort_default(
|
|
950
950
|
logger,
|
|
@@ -955,7 +955,7 @@ var loadExpression = async (options7, logger) => {
|
|
|
955
955
|
return {};
|
|
956
956
|
}
|
|
957
957
|
};
|
|
958
|
-
var loadOldWorkflow = async (workflow2,
|
|
958
|
+
var loadOldWorkflow = async (workflow2, options8, logger, defaultName = "") => {
|
|
959
959
|
const plan = {
|
|
960
960
|
workflow: {
|
|
961
961
|
steps: workflow2.jobs
|
|
@@ -967,7 +967,7 @@ var loadOldWorkflow = async (workflow2, options7, logger, defaultName = "") => {
|
|
|
967
967
|
if (workflow2.id) {
|
|
968
968
|
plan.id = workflow2.id;
|
|
969
969
|
}
|
|
970
|
-
const final = await loadXPlan(plan,
|
|
970
|
+
const final = await loadXPlan(plan, options8, logger, defaultName);
|
|
971
971
|
logger.warn("Converted workflow into new format:");
|
|
972
972
|
logger.warn(final);
|
|
973
973
|
return final;
|
|
@@ -1100,7 +1100,7 @@ var ensureCollections = (plan, {
|
|
|
1100
1100
|
);
|
|
1101
1101
|
}
|
|
1102
1102
|
};
|
|
1103
|
-
var loadXPlan = async (plan,
|
|
1103
|
+
var loadXPlan = async (plan, options8, logger, defaultName = "") => {
|
|
1104
1104
|
if (!plan.options) {
|
|
1105
1105
|
plan.options = {};
|
|
1106
1106
|
}
|
|
@@ -1111,21 +1111,21 @@ var loadXPlan = async (plan, options7, logger, defaultName = "") => {
|
|
|
1111
1111
|
ensureCollections(
|
|
1112
1112
|
plan,
|
|
1113
1113
|
{
|
|
1114
|
-
version:
|
|
1115
|
-
apiKey:
|
|
1116
|
-
endpoint:
|
|
1114
|
+
version: options8.collectionsVersion,
|
|
1115
|
+
apiKey: options8.apiKey,
|
|
1116
|
+
endpoint: options8.collectionsEndpoint
|
|
1117
1117
|
},
|
|
1118
1118
|
logger
|
|
1119
1119
|
);
|
|
1120
|
-
if (
|
|
1121
|
-
plan.workflow.globals =
|
|
1122
|
-
await importGlobals(plan,
|
|
1123
|
-
await importExpressions(plan,
|
|
1124
|
-
if (
|
|
1120
|
+
if (options8.globals)
|
|
1121
|
+
plan.workflow.globals = options8.globals;
|
|
1122
|
+
await importGlobals(plan, options8.baseDir, logger);
|
|
1123
|
+
await importExpressions(plan, options8.baseDir, logger);
|
|
1124
|
+
if (options8.expandAdaptors) {
|
|
1125
1125
|
expand_adaptors_default(plan);
|
|
1126
1126
|
}
|
|
1127
|
-
await map_adaptors_to_monorepo_default(
|
|
1128
|
-
maybeAssign(
|
|
1127
|
+
await map_adaptors_to_monorepo_default(options8.monorepoPath, plan, logger);
|
|
1128
|
+
maybeAssign(options8, plan.options, ["timeout", "start"]);
|
|
1129
1129
|
logger.info(`Loaded workflow ${plan.workflow.name ?? ""}`);
|
|
1130
1130
|
return plan;
|
|
1131
1131
|
};
|
|
@@ -1168,7 +1168,7 @@ var fuzzy_match_step_default = (plan, stepPattern) => {
|
|
|
1168
1168
|
|
|
1169
1169
|
// src/util/validate-plan.ts
|
|
1170
1170
|
var assertWorkflowStructure = (plan, logger) => {
|
|
1171
|
-
const { workflow: workflow2, options:
|
|
1171
|
+
const { workflow: workflow2, options: options8 } = plan;
|
|
1172
1172
|
if (!workflow2 || typeof workflow2 !== "object") {
|
|
1173
1173
|
throw new Error(`Missing or invalid "workflow" key in execution plan`);
|
|
1174
1174
|
}
|
|
@@ -1181,7 +1181,7 @@ var assertWorkflowStructure = (plan, logger) => {
|
|
|
1181
1181
|
workflow2.steps.forEach((step, index) => {
|
|
1182
1182
|
assertStepStructure(step, index);
|
|
1183
1183
|
});
|
|
1184
|
-
assertOptionsStructure(
|
|
1184
|
+
assertOptionsStructure(options8, logger);
|
|
1185
1185
|
};
|
|
1186
1186
|
var assertStepStructure = (step, index) => {
|
|
1187
1187
|
const allowedKeys = [
|
|
@@ -1208,9 +1208,9 @@ var assertStepStructure = (step, index) => {
|
|
|
1208
1208
|
);
|
|
1209
1209
|
}
|
|
1210
1210
|
};
|
|
1211
|
-
var assertOptionsStructure = (
|
|
1211
|
+
var assertOptionsStructure = (options8 = {}, logger) => {
|
|
1212
1212
|
const allowedKeys = ["timeout", "stepTimeout", "start", "end", "sanitize"];
|
|
1213
|
-
for (const key in
|
|
1213
|
+
for (const key in options8) {
|
|
1214
1214
|
if (!allowedKeys.includes(key)) {
|
|
1215
1215
|
logger.warn(`Unrecognized option "${key}" in options object`);
|
|
1216
1216
|
}
|
|
@@ -1266,15 +1266,15 @@ var matchStep = (plan, stepPattern, stepName, logger) => {
|
|
|
1266
1266
|
}
|
|
1267
1267
|
return "";
|
|
1268
1268
|
};
|
|
1269
|
-
var loadAndApplyCredentialMap = async (plan,
|
|
1269
|
+
var loadAndApplyCredentialMap = async (plan, options8, logger) => {
|
|
1270
1270
|
let creds = {};
|
|
1271
|
-
if (
|
|
1271
|
+
if (options8.credentials) {
|
|
1272
1272
|
try {
|
|
1273
1273
|
const credsRaw = await readFile3(
|
|
1274
|
-
path5.resolve(
|
|
1274
|
+
path5.resolve(options8.workspace, options8.credentials),
|
|
1275
1275
|
"utf8"
|
|
1276
1276
|
);
|
|
1277
|
-
if (
|
|
1277
|
+
if (options8.credentials.endsWith(".json")) {
|
|
1278
1278
|
creds = JSON.parse(credsRaw);
|
|
1279
1279
|
} else {
|
|
1280
1280
|
creds = yamlToJson2(credsRaw);
|
|
@@ -1282,7 +1282,7 @@ var loadAndApplyCredentialMap = async (plan, options7, logger) => {
|
|
|
1282
1282
|
logger.info("Credential map loaded ");
|
|
1283
1283
|
} catch (e) {
|
|
1284
1284
|
if (e?.message?.match(/ENOENT/)) {
|
|
1285
|
-
logger.debug("Credential map not found at",
|
|
1285
|
+
logger.debug("Credential map not found at", options8.credentials);
|
|
1286
1286
|
} else {
|
|
1287
1287
|
logger.error("Error processing credential map:");
|
|
1288
1288
|
process.exitCode = 1;
|
|
@@ -1292,18 +1292,18 @@ var loadAndApplyCredentialMap = async (plan, options7, logger) => {
|
|
|
1292
1292
|
}
|
|
1293
1293
|
return apply_credential_map_default(plan, creds, logger);
|
|
1294
1294
|
};
|
|
1295
|
-
var executeHandler = async (
|
|
1295
|
+
var executeHandler = async (options8, logger) => {
|
|
1296
1296
|
const start = (/* @__PURE__ */ new Date()).getTime();
|
|
1297
|
-
assert_path_default(
|
|
1298
|
-
await validate_adaptors_default(
|
|
1299
|
-
let plan = await load_plan_default(
|
|
1297
|
+
assert_path_default(options8.path);
|
|
1298
|
+
await validate_adaptors_default(options8, logger);
|
|
1299
|
+
let plan = await load_plan_default(options8, logger);
|
|
1300
1300
|
validate_plan_default(plan, logger);
|
|
1301
|
-
await loadAndApplyCredentialMap(plan,
|
|
1302
|
-
if (
|
|
1303
|
-
await clearCache(plan,
|
|
1301
|
+
await loadAndApplyCredentialMap(plan, options8, logger);
|
|
1302
|
+
if (options8.cacheSteps) {
|
|
1303
|
+
await clearCache(plan, options8, logger);
|
|
1304
1304
|
}
|
|
1305
1305
|
const moduleResolutions = {};
|
|
1306
|
-
const { repoDir, monorepoPath, autoinstall } =
|
|
1306
|
+
const { repoDir, monorepoPath, autoinstall } = options8;
|
|
1307
1307
|
if (autoinstall) {
|
|
1308
1308
|
if (monorepoPath) {
|
|
1309
1309
|
logger.warn("Skipping auto-install as monorepo is being used");
|
|
@@ -1311,13 +1311,13 @@ var executeHandler = async (options7, logger) => {
|
|
|
1311
1311
|
const autoInstallTargets = get_autoinstall_targets_default(plan);
|
|
1312
1312
|
if (autoInstallTargets.length) {
|
|
1313
1313
|
logger.info("Auto-installing language adaptors");
|
|
1314
|
-
|
|
1314
|
+
options8.adaptors = await install(
|
|
1315
1315
|
{ packages: autoInstallTargets, repoDir },
|
|
1316
1316
|
logger
|
|
1317
1317
|
);
|
|
1318
|
-
if (autoInstallTargets.length ===
|
|
1318
|
+
if (autoInstallTargets.length === options8.adaptors.length) {
|
|
1319
1319
|
for (let i = 0; i < autoInstallTargets.length; i++) {
|
|
1320
|
-
moduleResolutions[autoInstallTargets[i]] =
|
|
1320
|
+
moduleResolutions[autoInstallTargets[i]] = options8.adaptors[i];
|
|
1321
1321
|
}
|
|
1322
1322
|
}
|
|
1323
1323
|
}
|
|
@@ -1325,35 +1325,35 @@ var executeHandler = async (options7, logger) => {
|
|
|
1325
1325
|
}
|
|
1326
1326
|
let customStart;
|
|
1327
1327
|
let customEnd;
|
|
1328
|
-
if (
|
|
1329
|
-
const step = matchStep(plan,
|
|
1328
|
+
if (options8.only) {
|
|
1329
|
+
const step = matchStep(plan, options8.only, "only", logger);
|
|
1330
1330
|
customStart = step;
|
|
1331
1331
|
customEnd = step;
|
|
1332
|
-
logger.always(`Only running workflow step "${
|
|
1332
|
+
logger.always(`Only running workflow step "${options8.start}"`);
|
|
1333
1333
|
} else {
|
|
1334
|
-
if (
|
|
1334
|
+
if (options8.start) {
|
|
1335
1335
|
customStart = matchStep(
|
|
1336
1336
|
plan,
|
|
1337
|
-
|
|
1337
|
+
options8.start ?? plan.options.start,
|
|
1338
1338
|
"start",
|
|
1339
1339
|
logger
|
|
1340
1340
|
);
|
|
1341
|
-
logger.info(`Starting workflow from step "${
|
|
1341
|
+
logger.info(`Starting workflow from step "${options8.start}"`);
|
|
1342
1342
|
}
|
|
1343
|
-
if (
|
|
1343
|
+
if (options8.end) {
|
|
1344
1344
|
customEnd = matchStep(
|
|
1345
1345
|
plan,
|
|
1346
|
-
|
|
1346
|
+
options8.end ?? plan.options.end,
|
|
1347
1347
|
"end",
|
|
1348
1348
|
logger
|
|
1349
1349
|
);
|
|
1350
|
-
logger.always(`Ending workflow at step "${
|
|
1350
|
+
logger.always(`Ending workflow at step "${options8.end}"`);
|
|
1351
1351
|
}
|
|
1352
1352
|
}
|
|
1353
|
-
const state = await load_state_default(plan,
|
|
1353
|
+
const state = await load_state_default(plan, options8, logger, customStart);
|
|
1354
1354
|
plan = override_plan_adaptors_default(plan, moduleResolutions);
|
|
1355
|
-
if (
|
|
1356
|
-
plan = await compile_default(plan,
|
|
1355
|
+
if (options8.compile) {
|
|
1356
|
+
plan = await compile_default(plan, options8, logger);
|
|
1357
1357
|
} else {
|
|
1358
1358
|
logger.info("Skipping compilation as noCompile is set");
|
|
1359
1359
|
}
|
|
@@ -1367,13 +1367,13 @@ var executeHandler = async (options7, logger) => {
|
|
|
1367
1367
|
workflow: plan.workflow
|
|
1368
1368
|
};
|
|
1369
1369
|
try {
|
|
1370
|
-
const result = await execute_default(finalPlan, state,
|
|
1371
|
-
if (
|
|
1370
|
+
const result = await execute_default(finalPlan, state, options8, logger);
|
|
1371
|
+
if (options8.cacheSteps) {
|
|
1372
1372
|
logger.success(
|
|
1373
1373
|
"Cached output written to ./cli-cache (see info logs for details)"
|
|
1374
1374
|
);
|
|
1375
1375
|
}
|
|
1376
|
-
await serialize_output_default(
|
|
1376
|
+
await serialize_output_default(options8, result, logger);
|
|
1377
1377
|
const duration = printDuration((/* @__PURE__ */ new Date()).getTime() - start);
|
|
1378
1378
|
if (result?.errors) {
|
|
1379
1379
|
logger.warn(
|
|
@@ -1396,22 +1396,22 @@ var handler_default2 = executeHandler;
|
|
|
1396
1396
|
|
|
1397
1397
|
// src/compile/handler.ts
|
|
1398
1398
|
import { writeFile as writeFile3 } from "node:fs/promises";
|
|
1399
|
-
var compileHandler = async (
|
|
1400
|
-
assert_path_default(
|
|
1399
|
+
var compileHandler = async (options8, logger) => {
|
|
1400
|
+
assert_path_default(options8.path);
|
|
1401
1401
|
let result;
|
|
1402
|
-
if (
|
|
1403
|
-
const { code } = await compile_default(
|
|
1402
|
+
if (options8.expressionPath) {
|
|
1403
|
+
const { code } = await compile_default(options8.expressionPath, options8, logger);
|
|
1404
1404
|
result = code;
|
|
1405
1405
|
} else {
|
|
1406
|
-
const plan = await load_plan_default(
|
|
1407
|
-
const compiledPlan = await compile_default(plan,
|
|
1406
|
+
const plan = await load_plan_default(options8, logger);
|
|
1407
|
+
const compiledPlan = await compile_default(plan, options8, logger);
|
|
1408
1408
|
result = JSON.stringify(compiledPlan, null, 2);
|
|
1409
1409
|
}
|
|
1410
|
-
if (
|
|
1410
|
+
if (options8.outputStdout) {
|
|
1411
1411
|
logger.success("Result:\n\n" + result);
|
|
1412
1412
|
} else {
|
|
1413
|
-
await writeFile3(
|
|
1414
|
-
logger.success(`Compiled to ${
|
|
1413
|
+
await writeFile3(options8.outputPath, result);
|
|
1414
|
+
logger.success(`Compiled to ${options8.outputPath}`);
|
|
1415
1415
|
}
|
|
1416
1416
|
};
|
|
1417
1417
|
var handler_default3 = compileHandler;
|
|
@@ -1424,27 +1424,27 @@ import { readFile as readFile4, writeFile as writeFile4 } from "node:fs/promises
|
|
|
1424
1424
|
import path6 from "node:path";
|
|
1425
1425
|
import { request } from "undici";
|
|
1426
1426
|
var DEFAULT_PAGE_SIZE = 1e3;
|
|
1427
|
-
var request_default = async (method,
|
|
1428
|
-
const base =
|
|
1429
|
-
const url2 = path6.join(base, "/collections",
|
|
1427
|
+
var request_default = async (method, options8, logger) => {
|
|
1428
|
+
const base = options8.lightning || process.env.OPENFN_ENDPOINT || "https://app.openfn.org";
|
|
1429
|
+
const url2 = path6.join(base, "/collections", options8.collectionName);
|
|
1430
1430
|
logger.debug("Calling Collections server at ", url2);
|
|
1431
1431
|
const headers = {
|
|
1432
|
-
Authorization: `Bearer ${
|
|
1432
|
+
Authorization: `Bearer ${options8.token}`
|
|
1433
1433
|
};
|
|
1434
1434
|
const query = Object.assign(
|
|
1435
1435
|
{
|
|
1436
|
-
key:
|
|
1437
|
-
limit:
|
|
1436
|
+
key: options8.key,
|
|
1437
|
+
limit: options8.pageSize || DEFAULT_PAGE_SIZE
|
|
1438
1438
|
},
|
|
1439
|
-
|
|
1439
|
+
options8.query
|
|
1440
1440
|
);
|
|
1441
1441
|
const args = {
|
|
1442
1442
|
headers,
|
|
1443
1443
|
method,
|
|
1444
1444
|
query
|
|
1445
1445
|
};
|
|
1446
|
-
if (
|
|
1447
|
-
args.body = JSON.stringify(
|
|
1446
|
+
if (options8.data) {
|
|
1447
|
+
args.body = JSON.stringify(options8.data);
|
|
1448
1448
|
headers["content-type"] = "application/json";
|
|
1449
1449
|
}
|
|
1450
1450
|
let result = {};
|
|
@@ -1455,11 +1455,11 @@ var request_default = async (method, options7, logger) => {
|
|
|
1455
1455
|
if (cursor) {
|
|
1456
1456
|
query.cursor = cursor;
|
|
1457
1457
|
}
|
|
1458
|
-
if (
|
|
1459
|
-
limit =
|
|
1458
|
+
if (options8.limit) {
|
|
1459
|
+
limit = options8.limit;
|
|
1460
1460
|
query.limit = Math.min(
|
|
1461
|
-
|
|
1462
|
-
|
|
1461
|
+
options8.pageSize || DEFAULT_PAGE_SIZE,
|
|
1462
|
+
options8.limit - count
|
|
1463
1463
|
);
|
|
1464
1464
|
}
|
|
1465
1465
|
try {
|
|
@@ -1553,7 +1553,7 @@ var ensureToken = (opts, logger) => {
|
|
|
1553
1553
|
}
|
|
1554
1554
|
}
|
|
1555
1555
|
};
|
|
1556
|
-
var buildQuery = (
|
|
1556
|
+
var buildQuery = (options8) => {
|
|
1557
1557
|
const map = {
|
|
1558
1558
|
createdBefore: "created_before",
|
|
1559
1559
|
createdAfter: "created_after",
|
|
@@ -1562,34 +1562,34 @@ var buildQuery = (options7) => {
|
|
|
1562
1562
|
};
|
|
1563
1563
|
const query = {};
|
|
1564
1564
|
Object.keys(map).forEach((key) => {
|
|
1565
|
-
if (
|
|
1566
|
-
query[map[key]] =
|
|
1565
|
+
if (options8[key]) {
|
|
1566
|
+
query[map[key]] = options8[key];
|
|
1567
1567
|
}
|
|
1568
1568
|
});
|
|
1569
1569
|
return query;
|
|
1570
1570
|
};
|
|
1571
|
-
var get = async (
|
|
1572
|
-
ensureToken(
|
|
1573
|
-
const multiMode =
|
|
1571
|
+
var get = async (options8, logger) => {
|
|
1572
|
+
ensureToken(options8, logger);
|
|
1573
|
+
const multiMode = options8.key.includes("*");
|
|
1574
1574
|
if (multiMode) {
|
|
1575
1575
|
logger.info(
|
|
1576
|
-
`Fetching multiple items from collection "${
|
|
1576
|
+
`Fetching multiple items from collection "${options8.collectionName}" with pattern ${options8.key}`
|
|
1577
1577
|
);
|
|
1578
1578
|
} else {
|
|
1579
1579
|
logger.info(
|
|
1580
|
-
`Fetching "${
|
|
1580
|
+
`Fetching "${options8.key}" from collection "${options8.collectionName}"`
|
|
1581
1581
|
);
|
|
1582
1582
|
}
|
|
1583
1583
|
let result = await request_default(
|
|
1584
1584
|
"GET",
|
|
1585
1585
|
{
|
|
1586
|
-
lightning:
|
|
1587
|
-
token:
|
|
1588
|
-
pageSize:
|
|
1589
|
-
limit:
|
|
1590
|
-
key:
|
|
1591
|
-
collectionName:
|
|
1592
|
-
query: buildQuery(
|
|
1586
|
+
lightning: options8.endpoint,
|
|
1587
|
+
token: options8.token,
|
|
1588
|
+
pageSize: options8.pageSize,
|
|
1589
|
+
limit: options8.limit,
|
|
1590
|
+
key: options8.key,
|
|
1591
|
+
collectionName: options8.collectionName,
|
|
1592
|
+
query: buildQuery(options8)
|
|
1593
1593
|
},
|
|
1594
1594
|
logger
|
|
1595
1595
|
);
|
|
@@ -1597,32 +1597,32 @@ var get = async (options7, logger) => {
|
|
|
1597
1597
|
logger.success(`Fetched ${Object.keys(result).length} items!`);
|
|
1598
1598
|
} else {
|
|
1599
1599
|
result = Object.values(result)[0];
|
|
1600
|
-
logger.success(`Fetched ${
|
|
1600
|
+
logger.success(`Fetched ${options8.key}`);
|
|
1601
1601
|
}
|
|
1602
|
-
if (
|
|
1602
|
+
if (options8.outputPath) {
|
|
1603
1603
|
const content = JSON.stringify(
|
|
1604
1604
|
result,
|
|
1605
1605
|
null,
|
|
1606
|
-
|
|
1606
|
+
options8.pretty ? 2 : void 0
|
|
1607
1607
|
);
|
|
1608
|
-
await writeFile4(
|
|
1609
|
-
logger.always(`Wrote items to ${
|
|
1608
|
+
await writeFile4(options8.outputPath, content);
|
|
1609
|
+
logger.always(`Wrote items to ${options8.outputPath}`);
|
|
1610
1610
|
} else {
|
|
1611
1611
|
logger.print(result);
|
|
1612
1612
|
}
|
|
1613
1613
|
};
|
|
1614
|
-
var set = async (
|
|
1615
|
-
if (
|
|
1614
|
+
var set = async (options8, logger) => {
|
|
1615
|
+
if (options8.key && options8.items) {
|
|
1616
1616
|
throwAbortableError(
|
|
1617
1617
|
"ARGUMENT_ERROR: arguments for key and items were provided",
|
|
1618
1618
|
"If upserting multiple items with --items, do not pass a key"
|
|
1619
1619
|
);
|
|
1620
1620
|
}
|
|
1621
|
-
ensureToken(
|
|
1622
|
-
logger.info(`Upserting items to collection "${
|
|
1621
|
+
ensureToken(options8, logger);
|
|
1622
|
+
logger.info(`Upserting items to collection "${options8.collectionName}"`);
|
|
1623
1623
|
const items = [];
|
|
1624
|
-
if (
|
|
1625
|
-
const resolvedPath = path7.resolve(
|
|
1624
|
+
if (options8.items) {
|
|
1625
|
+
const resolvedPath = path7.resolve(options8.items);
|
|
1626
1626
|
logger.debug("Loading items from ", resolvedPath);
|
|
1627
1627
|
const data = await readFile4(resolvedPath, "utf8");
|
|
1628
1628
|
const obj = JSON.parse(data);
|
|
@@ -1630,43 +1630,43 @@ var set = async (options7, logger) => {
|
|
|
1630
1630
|
items.push({ key, value: JSON.stringify(value) });
|
|
1631
1631
|
});
|
|
1632
1632
|
logger.info(`Upserting ${items.length} items`);
|
|
1633
|
-
} else if (
|
|
1634
|
-
const resolvedPath = path7.resolve(
|
|
1633
|
+
} else if (options8.key && options8.value) {
|
|
1634
|
+
const resolvedPath = path7.resolve(options8.value);
|
|
1635
1635
|
logger.debug("Loading value from ", resolvedPath);
|
|
1636
|
-
const data = await readFile4(path7.resolve(
|
|
1636
|
+
const data = await readFile4(path7.resolve(options8.value), "utf8");
|
|
1637
1637
|
const value = JSON.stringify(JSON.parse(data));
|
|
1638
|
-
items.push({ key:
|
|
1639
|
-
logger.info(`Upserting data to "${
|
|
1638
|
+
items.push({ key: options8.key, value });
|
|
1639
|
+
logger.info(`Upserting data to "${options8.key}"`);
|
|
1640
1640
|
} else {
|
|
1641
1641
|
throw new Error("INVALID_ARGUMENTS");
|
|
1642
1642
|
}
|
|
1643
1643
|
const result = await request_default(
|
|
1644
1644
|
"POST",
|
|
1645
1645
|
{
|
|
1646
|
-
lightning:
|
|
1647
|
-
token:
|
|
1648
|
-
key:
|
|
1649
|
-
collectionName:
|
|
1646
|
+
lightning: options8.endpoint,
|
|
1647
|
+
token: options8.token,
|
|
1648
|
+
key: options8.key,
|
|
1649
|
+
collectionName: options8.collectionName,
|
|
1650
1650
|
data: { items }
|
|
1651
1651
|
},
|
|
1652
1652
|
logger
|
|
1653
1653
|
);
|
|
1654
1654
|
logger.success(`Upserted ${result.upserted} items!`);
|
|
1655
1655
|
};
|
|
1656
|
-
var remove = async (
|
|
1657
|
-
ensureToken(
|
|
1656
|
+
var remove = async (options8, logger) => {
|
|
1657
|
+
ensureToken(options8, logger);
|
|
1658
1658
|
logger.info(
|
|
1659
|
-
`Removing "${
|
|
1659
|
+
`Removing "${options8.key}" from collection "${options8.collectionName}"`
|
|
1660
1660
|
);
|
|
1661
|
-
if (
|
|
1661
|
+
if (options8.dryRun) {
|
|
1662
1662
|
logger.info("--dry-run passed: fetching affected items");
|
|
1663
1663
|
let result = await request_default(
|
|
1664
1664
|
"GET",
|
|
1665
1665
|
{
|
|
1666
|
-
lightning:
|
|
1667
|
-
token:
|
|
1668
|
-
key:
|
|
1669
|
-
collectionName:
|
|
1666
|
+
lightning: options8.endpoint,
|
|
1667
|
+
token: options8.token,
|
|
1668
|
+
key: options8.key,
|
|
1669
|
+
collectionName: options8.collectionName
|
|
1670
1670
|
},
|
|
1671
1671
|
logger
|
|
1672
1672
|
);
|
|
@@ -1678,11 +1678,11 @@ var remove = async (options7, logger) => {
|
|
|
1678
1678
|
let result = await request_default(
|
|
1679
1679
|
"DELETE",
|
|
1680
1680
|
{
|
|
1681
|
-
lightning:
|
|
1682
|
-
token:
|
|
1683
|
-
key:
|
|
1684
|
-
collectionName:
|
|
1685
|
-
query: buildQuery(
|
|
1681
|
+
lightning: options8.endpoint,
|
|
1682
|
+
token: options8.token,
|
|
1683
|
+
key: options8.key,
|
|
1684
|
+
collectionName: options8.collectionName,
|
|
1685
|
+
query: buildQuery(options8)
|
|
1686
1686
|
},
|
|
1687
1687
|
logger
|
|
1688
1688
|
);
|
|
@@ -1696,9 +1696,9 @@ var handler_default4 = {
|
|
|
1696
1696
|
};
|
|
1697
1697
|
|
|
1698
1698
|
// src/test/handler.ts
|
|
1699
|
-
var testHandler = async (
|
|
1699
|
+
var testHandler = async (options8, logger) => {
|
|
1700
1700
|
logger.log("Running test workflow...");
|
|
1701
|
-
const opts = { ...
|
|
1701
|
+
const opts = { ...options8 };
|
|
1702
1702
|
opts.compile = true;
|
|
1703
1703
|
opts.adaptors = [];
|
|
1704
1704
|
const plan = {
|
|
@@ -1760,12 +1760,249 @@ import {
|
|
|
1760
1760
|
validateConfig
|
|
1761
1761
|
} from "@openfn/deploy";
|
|
1762
1762
|
|
|
1763
|
-
// src/deploy
|
|
1763
|
+
// src/projects/deploy.ts
|
|
1764
1764
|
import Project from "@openfn/project";
|
|
1765
|
-
import
|
|
1765
|
+
import c2 from "chalk";
|
|
1766
|
+
|
|
1767
|
+
// src/util/ensure-log-opts.ts
|
|
1768
|
+
var defaultLoggerOptions = {
|
|
1769
|
+
default: "default",
|
|
1770
|
+
// TODO fix to lower case
|
|
1771
|
+
job: "debug"
|
|
1772
|
+
};
|
|
1773
|
+
var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
|
|
1774
|
+
var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
|
|
1775
|
+
var componentShorthands = {
|
|
1776
|
+
cmp: "compiler",
|
|
1777
|
+
rt: "runtime",
|
|
1778
|
+
"r/t": "runtime"
|
|
1779
|
+
};
|
|
1780
|
+
var ensureLogOpts = (opts) => {
|
|
1781
|
+
const components = {};
|
|
1782
|
+
const outgoingOpts = opts;
|
|
1783
|
+
if (!opts.log && /^(version|test)$/.test(opts.command)) {
|
|
1784
|
+
outgoingOpts.log = { default: "info" };
|
|
1785
|
+
return outgoingOpts;
|
|
1786
|
+
}
|
|
1787
|
+
if (opts.log) {
|
|
1788
|
+
const parts = opts.log.split(",");
|
|
1789
|
+
parts.forEach((l) => {
|
|
1790
|
+
let component = "";
|
|
1791
|
+
let level = "";
|
|
1792
|
+
if (l.match(/=/)) {
|
|
1793
|
+
const parts2 = l.split("=");
|
|
1794
|
+
component = parts2[0].toLowerCase();
|
|
1795
|
+
if (componentShorthands[component]) {
|
|
1796
|
+
component = componentShorthands[component];
|
|
1797
|
+
}
|
|
1798
|
+
level = parts2[1].toLowerCase();
|
|
1799
|
+
} else {
|
|
1800
|
+
component = "default";
|
|
1801
|
+
level = l.toLowerCase();
|
|
1802
|
+
if (level === "none" && !parts.find((p) => p.startsWith("job"))) {
|
|
1803
|
+
components["job"] = "none";
|
|
1804
|
+
}
|
|
1805
|
+
}
|
|
1806
|
+
if (!/^(cli|runtime|compiler|job|default)$/i.test(component)) {
|
|
1807
|
+
throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
|
|
1808
|
+
}
|
|
1809
|
+
level = level.toLowerCase();
|
|
1810
|
+
if (!isValidLogLevel(level)) {
|
|
1811
|
+
throw new Error(ERROR_MESSAGE_LOG_LEVEL);
|
|
1812
|
+
}
|
|
1813
|
+
components[component] = level;
|
|
1814
|
+
});
|
|
1815
|
+
}
|
|
1816
|
+
outgoingOpts.log = {
|
|
1817
|
+
...defaultLoggerOptions,
|
|
1818
|
+
...components
|
|
1819
|
+
};
|
|
1820
|
+
return outgoingOpts;
|
|
1821
|
+
};
|
|
1822
|
+
var ensure_log_opts_default = ensureLogOpts;
|
|
1823
|
+
|
|
1824
|
+
// src/options.ts
|
|
1825
|
+
var setDefaultValue = (opts, key, value) => {
|
|
1826
|
+
const v = opts[key];
|
|
1827
|
+
if (isNaN(v) && !v) {
|
|
1828
|
+
opts[key] = value;
|
|
1829
|
+
}
|
|
1830
|
+
};
|
|
1831
|
+
var apiKey = {
|
|
1832
|
+
name: "apikey",
|
|
1833
|
+
yargs: {
|
|
1834
|
+
alias: ["pat", "token", "api-key"],
|
|
1835
|
+
description: "API Key, Personal Access Token (PAT), or other access token from Lightning"
|
|
1836
|
+
},
|
|
1837
|
+
ensure: (opts) => {
|
|
1838
|
+
if (!opts.apikey) {
|
|
1839
|
+
opts.apiKey = process.env.OPENFN_API_KEY;
|
|
1840
|
+
}
|
|
1841
|
+
}
|
|
1842
|
+
};
|
|
1843
|
+
var confirm = {
|
|
1844
|
+
name: "confirm",
|
|
1845
|
+
yargs: {
|
|
1846
|
+
alias: ["y"],
|
|
1847
|
+
boolean: true,
|
|
1848
|
+
description: "Skip confirmation prompts (e.g. 'Are you sure?')"
|
|
1849
|
+
},
|
|
1850
|
+
ensure: (opts) => {
|
|
1851
|
+
if (opts.y) {
|
|
1852
|
+
opts.confirm = false;
|
|
1853
|
+
}
|
|
1854
|
+
setDefaultValue(opts, "confirm", true);
|
|
1855
|
+
}
|
|
1856
|
+
};
|
|
1857
|
+
var endpoint = {
|
|
1858
|
+
name: "endpoint",
|
|
1859
|
+
yargs: {
|
|
1860
|
+
alias: ["lightning"],
|
|
1861
|
+
description: "[beta only] URL to Lightning endpoint"
|
|
1862
|
+
}
|
|
1863
|
+
};
|
|
1864
|
+
var force = {
|
|
1865
|
+
name: "force",
|
|
1866
|
+
yargs: {
|
|
1867
|
+
alias: ["f"],
|
|
1868
|
+
boolean: true,
|
|
1869
|
+
description: "Force metadata to be regenerated",
|
|
1870
|
+
default: false
|
|
1871
|
+
}
|
|
1872
|
+
};
|
|
1873
|
+
var log = {
|
|
1874
|
+
name: "log",
|
|
1875
|
+
yargs: {
|
|
1876
|
+
alias: ["l"],
|
|
1877
|
+
description: "Set the log level",
|
|
1878
|
+
string: true
|
|
1879
|
+
},
|
|
1880
|
+
ensure: (opts) => {
|
|
1881
|
+
ensure_log_opts_default(opts);
|
|
1882
|
+
}
|
|
1883
|
+
};
|
|
1884
|
+
var logJson = {
|
|
1885
|
+
name: "log-json",
|
|
1886
|
+
yargs: {
|
|
1887
|
+
description: "Output all logs as JSON objects",
|
|
1888
|
+
boolean: true
|
|
1889
|
+
}
|
|
1890
|
+
};
|
|
1891
|
+
var path8 = {
|
|
1892
|
+
name: "path",
|
|
1893
|
+
yargs: {
|
|
1894
|
+
description: "Path"
|
|
1895
|
+
}
|
|
1896
|
+
};
|
|
1897
|
+
var snapshots = {
|
|
1898
|
+
name: "snapshots",
|
|
1899
|
+
yargs: {
|
|
1900
|
+
description: "List of snapshot ids to pull",
|
|
1901
|
+
array: true
|
|
1902
|
+
}
|
|
1903
|
+
};
|
|
1904
|
+
var timeout = {
|
|
1905
|
+
name: "timeout",
|
|
1906
|
+
yargs: {
|
|
1907
|
+
alias: ["t"],
|
|
1908
|
+
number: true,
|
|
1909
|
+
description: "Set the timeout duration (ms). Defaults to 5 minutes.",
|
|
1910
|
+
default: 5 * 60 * 1e3
|
|
1911
|
+
}
|
|
1912
|
+
};
|
|
1913
|
+
var workflow = {
|
|
1914
|
+
name: "workflow",
|
|
1915
|
+
yargs: {
|
|
1916
|
+
string: true,
|
|
1917
|
+
description: "Name of the workflow to execute"
|
|
1918
|
+
}
|
|
1919
|
+
};
|
|
1920
|
+
|
|
1921
|
+
// src/util/get-cli-option-object.ts
|
|
1922
|
+
function getCLIOptionObject(arg) {
|
|
1923
|
+
if (isObject(arg)) {
|
|
1924
|
+
return arg;
|
|
1925
|
+
} else if (typeof arg === "string") {
|
|
1926
|
+
try {
|
|
1927
|
+
const p = JSON.parse(arg);
|
|
1928
|
+
if (isObject(p))
|
|
1929
|
+
return p;
|
|
1930
|
+
} catch (e) {
|
|
1931
|
+
}
|
|
1932
|
+
return Object.fromEntries(
|
|
1933
|
+
arg.split(",").map((pair) => {
|
|
1934
|
+
const [k, v] = pair.split("=");
|
|
1935
|
+
return [k.trim(), v.trim()];
|
|
1936
|
+
})
|
|
1937
|
+
);
|
|
1938
|
+
}
|
|
1939
|
+
}
|
|
1940
|
+
function isObject(arg) {
|
|
1941
|
+
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
1942
|
+
}
|
|
1943
|
+
|
|
1944
|
+
// src/projects/options.ts
|
|
1945
|
+
var env = {
|
|
1946
|
+
name: "env",
|
|
1947
|
+
yargs: {
|
|
1948
|
+
description: "Environment name (eg staging, prod, branch)",
|
|
1949
|
+
hidden: true
|
|
1950
|
+
}
|
|
1951
|
+
};
|
|
1952
|
+
var alias = {
|
|
1953
|
+
name: "alias",
|
|
1954
|
+
yargs: {
|
|
1955
|
+
alias: ["env"],
|
|
1956
|
+
description: "Environment name (eg staging, prod, branch)"
|
|
1957
|
+
}
|
|
1958
|
+
};
|
|
1959
|
+
var dryRun = {
|
|
1960
|
+
name: "dryRun",
|
|
1961
|
+
yargs: {
|
|
1962
|
+
description: "Runs the command but does not commit any changes to disk or app"
|
|
1963
|
+
}
|
|
1964
|
+
};
|
|
1965
|
+
var removeUnmapped = {
|
|
1966
|
+
name: "remove-unmapped",
|
|
1967
|
+
yargs: {
|
|
1968
|
+
boolean: true,
|
|
1969
|
+
description: "Removes all workflows that didn't get mapped from the final project after merge"
|
|
1970
|
+
}
|
|
1971
|
+
};
|
|
1972
|
+
var workflowMappings = {
|
|
1973
|
+
name: "workflow-mappings",
|
|
1974
|
+
yargs: {
|
|
1975
|
+
type: "string",
|
|
1976
|
+
coerce: getCLIOptionObject,
|
|
1977
|
+
description: "A manual object mapping of which workflows in source and target should be matched for a merge."
|
|
1978
|
+
}
|
|
1979
|
+
};
|
|
1980
|
+
var outputPath = {
|
|
1981
|
+
name: "output-path",
|
|
1982
|
+
yargs: {
|
|
1983
|
+
alias: ["o", "output"],
|
|
1984
|
+
type: "string",
|
|
1985
|
+
description: "Path to output the fetched project to"
|
|
1986
|
+
}
|
|
1987
|
+
};
|
|
1988
|
+
var workspace = {
|
|
1989
|
+
name: "workspace",
|
|
1990
|
+
yargs: {
|
|
1991
|
+
alias: ["w"],
|
|
1992
|
+
description: "Path to the project workspace (ie, path to openfn.yaml)"
|
|
1993
|
+
},
|
|
1994
|
+
ensure: (opts) => {
|
|
1995
|
+
const ws = opts.workspace ?? process.env.OPENFN_WORKSPACE;
|
|
1996
|
+
if (!ws) {
|
|
1997
|
+
opts.workspace = process.cwd();
|
|
1998
|
+
} else {
|
|
1999
|
+
opts.workspace = resolve_path_default(ws);
|
|
2000
|
+
}
|
|
2001
|
+
}
|
|
2002
|
+
};
|
|
1766
2003
|
|
|
1767
2004
|
// src/projects/util.ts
|
|
1768
|
-
import
|
|
2005
|
+
import path9 from "node:path";
|
|
1769
2006
|
import { mkdir as mkdir3, writeFile as writeFile5 } from "node:fs/promises";
|
|
1770
2007
|
|
|
1771
2008
|
// src/errors.ts
|
|
@@ -1776,17 +2013,17 @@ var CLIError = class extends Error {
|
|
|
1776
2013
|
};
|
|
1777
2014
|
|
|
1778
2015
|
// src/projects/util.ts
|
|
1779
|
-
var loadAppAuthConfig = (
|
|
2016
|
+
var loadAppAuthConfig = (options8, logger) => {
|
|
1780
2017
|
const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
|
|
1781
2018
|
const config2 = {
|
|
1782
|
-
apiKey:
|
|
1783
|
-
endpoint:
|
|
2019
|
+
apiKey: options8.apiKey,
|
|
2020
|
+
endpoint: options8.endpoint
|
|
1784
2021
|
};
|
|
1785
|
-
if (!
|
|
2022
|
+
if (!options8.apiKey && OPENFN_API_KEY) {
|
|
1786
2023
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
1787
2024
|
config2.apiKey = OPENFN_API_KEY;
|
|
1788
2025
|
}
|
|
1789
|
-
if (!
|
|
2026
|
+
if (!options8.endpoint && OPENFN_ENDPOINT) {
|
|
1790
2027
|
logger.info("Using OPENFN_ENDPOINT environment variable");
|
|
1791
2028
|
config2.endpoint = OPENFN_ENDPOINT;
|
|
1792
2029
|
}
|
|
@@ -1798,13 +2035,18 @@ var ensureExt = (filePath, ext) => {
|
|
|
1798
2035
|
}
|
|
1799
2036
|
return filePath;
|
|
1800
2037
|
};
|
|
1801
|
-
var
|
|
1802
|
-
const
|
|
2038
|
+
var getSerializePath = (project, workspacePath, outputPath2) => {
|
|
2039
|
+
const outputRoot = resolve_path_default(outputPath2 || workspacePath);
|
|
2040
|
+
const projectsDir = project?.config.dirs.projects ?? ".projects";
|
|
2041
|
+
return outputPath2 ?? `${outputRoot}/${projectsDir}/${project.qname}`;
|
|
2042
|
+
};
|
|
2043
|
+
var serialize = async (project, outputPath2, formatOverride, dryRun2 = false) => {
|
|
2044
|
+
const root = path9.dirname(outputPath2);
|
|
1803
2045
|
await mkdir3(root, { recursive: true });
|
|
1804
2046
|
const format = formatOverride ?? project.config?.formats.project;
|
|
1805
2047
|
const output = project?.serialize("project", { format });
|
|
1806
2048
|
const maybeWriteFile = (filePath, output2) => {
|
|
1807
|
-
if (!
|
|
2049
|
+
if (!dryRun2) {
|
|
1808
2050
|
return writeFile5(filePath, output2);
|
|
1809
2051
|
}
|
|
1810
2052
|
};
|
|
@@ -1853,6 +2095,31 @@ async function fetchProject(endpoint2, apiKey2, projectId, logger, snapshots2) {
|
|
|
1853
2095
|
throw error;
|
|
1854
2096
|
}
|
|
1855
2097
|
}
|
|
2098
|
+
async function deployProject(endpoint2, apiKey2, state, logger) {
|
|
2099
|
+
try {
|
|
2100
|
+
const url2 = getLightningUrl(endpoint2);
|
|
2101
|
+
const response = await fetch(url2, {
|
|
2102
|
+
method: "POST",
|
|
2103
|
+
headers: {
|
|
2104
|
+
Authorization: `Bearer ${apiKey2}`,
|
|
2105
|
+
"Content-Type": "application/json"
|
|
2106
|
+
},
|
|
2107
|
+
body: JSON.stringify(state)
|
|
2108
|
+
});
|
|
2109
|
+
if (!response.ok) {
|
|
2110
|
+
const body = await response.json();
|
|
2111
|
+
logger?.error("Failed to deploy project:");
|
|
2112
|
+
logger?.error(JSON.stringify(body, null, 2));
|
|
2113
|
+
throw new CLIError(
|
|
2114
|
+
`Failed to deploy project ${state.name}: ${response.status}`
|
|
2115
|
+
);
|
|
2116
|
+
}
|
|
2117
|
+
return await response.json();
|
|
2118
|
+
} catch (error) {
|
|
2119
|
+
handleCommonErrors({ endpoint: endpoint2, apiKey: apiKey2 }, error);
|
|
2120
|
+
throw error;
|
|
2121
|
+
}
|
|
2122
|
+
}
|
|
1856
2123
|
function handleCommonErrors(config2, error) {
|
|
1857
2124
|
if (error.cause?.code === "ECONNREFUSED") {
|
|
1858
2125
|
throw new DeployError(
|
|
@@ -1866,33 +2133,225 @@ var DeployError = class extends Error {
|
|
|
1866
2133
|
}
|
|
1867
2134
|
};
|
|
1868
2135
|
|
|
1869
|
-
// src/
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
});
|
|
1875
|
-
const state = project.serialize("state", { format: "json" });
|
|
1876
|
-
logger.debug("Converted local project to app state:");
|
|
1877
|
-
logger.debug(JSON.stringify(state, null, 2));
|
|
1878
|
-
config2.endpoint ??= project.openfn?.endpoint;
|
|
1879
|
-
logger.info("Sending project to app...");
|
|
1880
|
-
await deployProject(config2, state);
|
|
1881
|
-
logger.success("Updated project at", config2.endpoint);
|
|
1882
|
-
}
|
|
1883
|
-
|
|
1884
|
-
// src/deploy/handler.ts
|
|
1885
|
-
var actualDeploy = deploy;
|
|
1886
|
-
async function deployHandler(options7, logger, deployFn = actualDeploy) {
|
|
1887
|
-
if (options7.beta) {
|
|
1888
|
-
return handler(options7, logger);
|
|
2136
|
+
// src/util/command-builders.ts
|
|
2137
|
+
import c from "chalk";
|
|
2138
|
+
var expandYargs = (y) => {
|
|
2139
|
+
if (typeof y === "function") {
|
|
2140
|
+
return y();
|
|
1889
2141
|
}
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
|
|
2142
|
+
return y;
|
|
2143
|
+
};
|
|
2144
|
+
function build(opts, yargs) {
|
|
2145
|
+
return opts.reduce((_y, o) => {
|
|
2146
|
+
if (!o?.name) {
|
|
2147
|
+
console.error(`ERROR: INVALID COMMAND OPTION PASSED`, o);
|
|
2148
|
+
console.error("Check the options passed to the command builder");
|
|
2149
|
+
throw new Error("Invalid command");
|
|
2150
|
+
}
|
|
2151
|
+
return yargs.option(o.name, expandYargs(o.yargs));
|
|
2152
|
+
}, yargs);
|
|
2153
|
+
}
|
|
2154
|
+
var ensure = (command8, opts) => (yargs) => {
|
|
2155
|
+
yargs.command = command8;
|
|
2156
|
+
opts.filter((opt) => opt.ensure).forEach((opt) => {
|
|
2157
|
+
try {
|
|
2158
|
+
opt.ensure(yargs);
|
|
2159
|
+
} catch (e) {
|
|
2160
|
+
console.log(e);
|
|
2161
|
+
console.error(
|
|
2162
|
+
c.red(`
|
|
2163
|
+
Error parsing command arguments: ${command8}.${opt.name}
|
|
2164
|
+
`)
|
|
2165
|
+
);
|
|
2166
|
+
console.error(c.red("Aborting"));
|
|
2167
|
+
console.error();
|
|
2168
|
+
process.exit(9);
|
|
2169
|
+
}
|
|
2170
|
+
});
|
|
2171
|
+
};
|
|
2172
|
+
var override = (command8, yargs) => {
|
|
2173
|
+
return {
|
|
2174
|
+
...command8,
|
|
2175
|
+
yargs: {
|
|
2176
|
+
...command8.yargs || {},
|
|
2177
|
+
...yargs
|
|
2178
|
+
}
|
|
2179
|
+
};
|
|
2180
|
+
};
|
|
2181
|
+
|
|
2182
|
+
// src/projects/deploy.ts
|
|
2183
|
+
var options = [
|
|
2184
|
+
env,
|
|
2185
|
+
workspace,
|
|
2186
|
+
dryRun,
|
|
2187
|
+
apiKey,
|
|
2188
|
+
endpoint,
|
|
2189
|
+
log,
|
|
2190
|
+
logJson,
|
|
2191
|
+
snapshots,
|
|
2192
|
+
force,
|
|
2193
|
+
confirm
|
|
2194
|
+
];
|
|
2195
|
+
var printProjectName = (project) => `${project.id} (${project.openfn?.uuid || "<no UUID>"})`;
|
|
2196
|
+
var command = {
|
|
2197
|
+
command: "deploy",
|
|
2198
|
+
describe: `Deploy the checked out project to a Lightning Instance`,
|
|
2199
|
+
builder: (yargs) => build(options, yargs).positional("project", {
|
|
2200
|
+
describe: "The UUID, local id or local alias of the project to deploy to"
|
|
2201
|
+
}).example(
|
|
2202
|
+
"deploy",
|
|
2203
|
+
"Deploy the checkout project to the connected instance"
|
|
2204
|
+
),
|
|
2205
|
+
handler: ensure("project-deploy", options)
|
|
2206
|
+
};
|
|
2207
|
+
async function handler(options8, logger) {
|
|
2208
|
+
logger.warn(
|
|
2209
|
+
"WARNING: the project deploy command is in BETA and may not be stable. Use cautiously on production projects."
|
|
2210
|
+
);
|
|
2211
|
+
const config2 = loadAppAuthConfig(options8, logger);
|
|
2212
|
+
logger.info("Attempting to load checked-out project from workspace");
|
|
2213
|
+
const localProject = await Project.from("fs", {
|
|
2214
|
+
root: options8.workspace || "."
|
|
2215
|
+
});
|
|
2216
|
+
logger.success(`Loaded local project ${printProjectName(localProject)}`);
|
|
2217
|
+
let remoteProject;
|
|
2218
|
+
try {
|
|
2219
|
+
const { data } = await fetchProject(
|
|
2220
|
+
config2.endpoint,
|
|
2221
|
+
config2.apiKey,
|
|
2222
|
+
localProject.uuid ?? localProject.id,
|
|
2223
|
+
logger
|
|
2224
|
+
);
|
|
2225
|
+
remoteProject = await Project.from("state", data, {
|
|
2226
|
+
endpoint: config2.endpoint
|
|
2227
|
+
});
|
|
2228
|
+
logger.success("Downloaded latest version of project at ", config2.endpoint);
|
|
2229
|
+
} catch (e) {
|
|
2230
|
+
console.log(e);
|
|
2231
|
+
throw e;
|
|
2232
|
+
}
|
|
2233
|
+
if (!options8.force && localProject.uuid !== remoteProject.uuid) {
|
|
2234
|
+
logger.error(`UUID conflict!
|
|
2235
|
+
|
|
2236
|
+
Your local project (${localProject.uuid}) has a different UUID to the remote project (${remoteProject.uuid}).
|
|
2237
|
+
|
|
2238
|
+
Pass --force to override this error and deploy anyway.`);
|
|
2239
|
+
return false;
|
|
2240
|
+
}
|
|
2241
|
+
const diffs = reportDiff(remoteProject, localProject, logger);
|
|
2242
|
+
if (!diffs.length) {
|
|
2243
|
+
logger.success("Nothing to deploy");
|
|
2244
|
+
return;
|
|
2245
|
+
}
|
|
2246
|
+
if (!localProject.canMergeInto(remoteProject)) {
|
|
2247
|
+
if (!options8.force) {
|
|
2248
|
+
logger.error(`Error: Projects have diverged!
|
|
2249
|
+
|
|
2250
|
+
The remote project has been edited since the local project was branched. Changes may be lost.
|
|
2251
|
+
|
|
2252
|
+
Pass --force to override this error and deploy anyway.`);
|
|
2253
|
+
return;
|
|
2254
|
+
} else {
|
|
2255
|
+
logger.warn(
|
|
2256
|
+
"Remote project has not diverged from local project! Pushing anyway as -f passed"
|
|
2257
|
+
);
|
|
2258
|
+
}
|
|
2259
|
+
} else {
|
|
2260
|
+
logger.info(
|
|
2261
|
+
"Remote project has not diverged from local project - it is safe to deploy \u{1F389}"
|
|
2262
|
+
);
|
|
2263
|
+
}
|
|
2264
|
+
logger.info("Merging changes into remote project");
|
|
2265
|
+
const merged = Project.merge(localProject, remoteProject, {
|
|
2266
|
+
mode: "replace",
|
|
2267
|
+
force: true
|
|
2268
|
+
});
|
|
2269
|
+
const state = merged.serialize("state", {
|
|
2270
|
+
format: "json"
|
|
2271
|
+
});
|
|
2272
|
+
logger.debug("Converted merged local project to app state:");
|
|
2273
|
+
logger.debug(JSON.stringify(state, null, 2));
|
|
2274
|
+
config2.endpoint ??= localProject.openfn?.endpoint;
|
|
2275
|
+
if (options8.dryRun) {
|
|
2276
|
+
logger.always("dryRun option set: skipping upload step");
|
|
2277
|
+
} else {
|
|
2278
|
+
if (options8.confirm) {
|
|
2279
|
+
if (!await logger.confirm(
|
|
2280
|
+
`Ready to deploy changes to ${config2.endpoint}?`
|
|
2281
|
+
)) {
|
|
2282
|
+
logger.always("Cancelled deployment");
|
|
2283
|
+
return false;
|
|
2284
|
+
}
|
|
2285
|
+
}
|
|
2286
|
+
logger.info("Sending project to app...");
|
|
2287
|
+
const { data: result } = await deployProject(
|
|
2288
|
+
config2.endpoint,
|
|
2289
|
+
config2.apiKey,
|
|
2290
|
+
state,
|
|
2291
|
+
logger
|
|
2292
|
+
);
|
|
2293
|
+
const finalProject = await Project.from(
|
|
2294
|
+
"state",
|
|
2295
|
+
result,
|
|
2296
|
+
{
|
|
2297
|
+
endpoint: config2.endpoint
|
|
2298
|
+
},
|
|
2299
|
+
merged.config
|
|
2300
|
+
);
|
|
2301
|
+
const finalOutputPath = getSerializePath(localProject, options8.workspace);
|
|
2302
|
+
logger.debug("Updating local project at ", finalOutputPath);
|
|
2303
|
+
await serialize(finalProject, finalOutputPath);
|
|
2304
|
+
}
|
|
2305
|
+
logger.success("Updated project at", config2.endpoint);
|
|
2306
|
+
}
|
|
2307
|
+
var reportDiff = (local, remote, logger) => {
|
|
2308
|
+
const diffs = remote.diff(local);
|
|
2309
|
+
if (diffs.length === 0) {
|
|
2310
|
+
logger.info("No workflow changes detected");
|
|
2311
|
+
return diffs;
|
|
2312
|
+
}
|
|
2313
|
+
const added = diffs.filter((d) => d.type === "added");
|
|
2314
|
+
const changed = diffs.filter((d) => d.type === "changed");
|
|
2315
|
+
const removed = diffs.filter((d) => d.type === "removed");
|
|
2316
|
+
if (added.length > 0) {
|
|
2317
|
+
logger.break();
|
|
2318
|
+
logger.always(c2.green("Workflows added:"));
|
|
2319
|
+
for (const diff of added) {
|
|
2320
|
+
logger.always(c2.green(` - ${diff.id}`));
|
|
2321
|
+
}
|
|
2322
|
+
logger.break();
|
|
2323
|
+
}
|
|
2324
|
+
if (changed.length > 0) {
|
|
2325
|
+
logger.break();
|
|
2326
|
+
logger.always(c2.yellow("Workflows modified:"));
|
|
2327
|
+
for (const diff of changed) {
|
|
2328
|
+
logger.always(c2.yellow(` - ${diff.id}`));
|
|
2329
|
+
}
|
|
2330
|
+
logger.break();
|
|
2331
|
+
}
|
|
2332
|
+
if (removed.length > 0) {
|
|
2333
|
+
logger.break();
|
|
2334
|
+
logger.always(c2.red("Workflows removed:"));
|
|
2335
|
+
for (const diff of removed) {
|
|
2336
|
+
logger.always(c2.red(` - ${diff.id}`));
|
|
2337
|
+
}
|
|
2338
|
+
logger.break();
|
|
2339
|
+
}
|
|
2340
|
+
return diffs;
|
|
2341
|
+
};
|
|
2342
|
+
|
|
2343
|
+
// src/deploy/handler.ts
|
|
2344
|
+
var actualDeploy = deploy;
|
|
2345
|
+
async function deployHandler(options8, logger, deployFn = actualDeploy) {
|
|
2346
|
+
if (options8.beta) {
|
|
2347
|
+
return handler(options8, logger);
|
|
2348
|
+
}
|
|
2349
|
+
try {
|
|
2350
|
+
const config2 = mergeOverrides(await getConfig(options8.configPath), options8);
|
|
2351
|
+
logger.debug("Deploying with config", JSON.stringify(config2, null, 2));
|
|
2352
|
+
if (options8.confirm === false) {
|
|
2353
|
+
config2.requireConfirmation = options8.confirm;
|
|
2354
|
+
}
|
|
1896
2355
|
if (process.env["OPENFN_API_KEY"]) {
|
|
1897
2356
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
1898
2357
|
config2.apiKey = process.env["OPENFN_API_KEY"];
|
|
@@ -1916,15 +2375,15 @@ async function deployHandler(options7, logger, deployFn = actualDeploy) {
|
|
|
1916
2375
|
throw error;
|
|
1917
2376
|
}
|
|
1918
2377
|
}
|
|
1919
|
-
function mergeOverrides(config2,
|
|
2378
|
+
function mergeOverrides(config2, options8) {
|
|
1920
2379
|
return {
|
|
1921
2380
|
...config2,
|
|
1922
2381
|
apiKey: pickFirst(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
1923
2382
|
endpoint: pickFirst(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
1924
|
-
statePath: pickFirst(
|
|
1925
|
-
specPath: pickFirst(
|
|
1926
|
-
configPath:
|
|
1927
|
-
requireConfirmation: pickFirst(
|
|
2383
|
+
statePath: pickFirst(options8.statePath, config2.statePath),
|
|
2384
|
+
specPath: pickFirst(options8.projectPath, config2.specPath),
|
|
2385
|
+
configPath: options8.configPath,
|
|
2386
|
+
requireConfirmation: pickFirst(options8.confirm, config2.requireConfirmation)
|
|
1928
2387
|
};
|
|
1929
2388
|
}
|
|
1930
2389
|
function pickFirst(...args) {
|
|
@@ -1935,20 +2394,20 @@ var handler_default6 = deployHandler;
|
|
|
1935
2394
|
// src/docgen/handler.ts
|
|
1936
2395
|
import { writeFile as writeFile6 } from "node:fs/promises";
|
|
1937
2396
|
import { readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
|
|
1938
|
-
import
|
|
2397
|
+
import path10 from "node:path";
|
|
1939
2398
|
import { describePackage } from "@openfn/describe-package";
|
|
1940
2399
|
import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
|
|
1941
2400
|
var RETRY_DURATION = 500;
|
|
1942
2401
|
var RETRY_COUNT = 20;
|
|
1943
2402
|
var TIMEOUT_MS = 1e3 * 60;
|
|
1944
2403
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
1945
|
-
var ensurePath = (filePath) => mkdirSync(
|
|
2404
|
+
var ensurePath = (filePath) => mkdirSync(path10.dirname(filePath), { recursive: true });
|
|
1946
2405
|
var generatePlaceholder = (path17) => {
|
|
1947
2406
|
writeFileSync(path17, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
1948
2407
|
};
|
|
1949
2408
|
var finish = (logger, resultPath) => {
|
|
1950
2409
|
logger.success("Done! Docs can be found at:\n");
|
|
1951
|
-
logger.print(` ${
|
|
2410
|
+
logger.print(` ${path10.resolve(resultPath)}`);
|
|
1952
2411
|
};
|
|
1953
2412
|
var generateDocs = async (specifier, path17, docgen, logger) => {
|
|
1954
2413
|
const result = await docgen(specifier);
|
|
@@ -1987,8 +2446,8 @@ var waitForDocs = async (docs, path17, logger, retryDuration = RETRY_DURATION) =
|
|
|
1987
2446
|
throw e;
|
|
1988
2447
|
}
|
|
1989
2448
|
};
|
|
1990
|
-
var docgenHandler = (
|
|
1991
|
-
const { specifier, repoDir } =
|
|
2449
|
+
var docgenHandler = (options8, logger, docgen = actualDocGen, retryDuration = RETRY_DURATION) => {
|
|
2450
|
+
const { specifier, repoDir } = options8;
|
|
1992
2451
|
const { version } = getNameAndVersion4(specifier);
|
|
1993
2452
|
if (!version) {
|
|
1994
2453
|
logger.error("Error: No version number detected");
|
|
@@ -2029,14 +2488,14 @@ var handler_default7 = docgenHandler;
|
|
|
2029
2488
|
|
|
2030
2489
|
// src/docs/handler.ts
|
|
2031
2490
|
import { readFile as readFile5 } from "node:fs/promises";
|
|
2032
|
-
import
|
|
2491
|
+
import c3 from "chalk";
|
|
2033
2492
|
import { getNameAndVersion as getNameAndVersion5, getLatestVersion } from "@openfn/runtime";
|
|
2034
2493
|
var describeFn = (adaptorName, fn) => [
|
|
2035
|
-
|
|
2494
|
+
c3.green(
|
|
2036
2495
|
`## ${fn.name}(${fn.parameters.map(({ name }) => name).join(",")})`
|
|
2037
2496
|
),
|
|
2038
2497
|
`${fn.description}`,
|
|
2039
|
-
|
|
2498
|
+
c3.green("### Usage Examples"),
|
|
2040
2499
|
fn.examples.length ? fn.examples.map(({ code, caption }) => {
|
|
2041
2500
|
if (caption) {
|
|
2042
2501
|
return `${caption}:
|
|
@@ -2044,21 +2503,21 @@ ${code}`;
|
|
|
2044
2503
|
}
|
|
2045
2504
|
return code;
|
|
2046
2505
|
}).join("\n\n") : "None",
|
|
2047
|
-
|
|
2506
|
+
c3.green("### API Reference"),
|
|
2048
2507
|
`https://docs.openfn.org/adaptors/packages/${adaptorName.replace(
|
|
2049
2508
|
"@openfn/language-",
|
|
2050
2509
|
""
|
|
2051
2510
|
)}-docs#${fn.name}
|
|
2052
2511
|
`
|
|
2053
2512
|
].join("\n\n");
|
|
2054
|
-
var describeLib = (adaptorName, data) =>
|
|
2513
|
+
var describeLib = (adaptorName, data) => c3.green(`## ${adaptorName} ${data.version}`) + `
|
|
2055
2514
|
|
|
2056
2515
|
${data.functions.map(
|
|
2057
|
-
(fn) => ` ${
|
|
2516
|
+
(fn) => ` ${c3.yellow(fn.name)} (${fn.parameters.map((p) => p.name).join(", ")})`
|
|
2058
2517
|
).sort().join("\n")}
|
|
2059
2518
|
`;
|
|
2060
|
-
var docsHandler = async (
|
|
2061
|
-
const { adaptor, operation, repoDir } =
|
|
2519
|
+
var docsHandler = async (options8, logger) => {
|
|
2520
|
+
const { adaptor, operation, repoDir } = options8;
|
|
2062
2521
|
const adaptors = expand_adaptors_default([adaptor]);
|
|
2063
2522
|
const [adaptorName] = adaptors;
|
|
2064
2523
|
let { name, version } = getNameAndVersion5(adaptorName);
|
|
@@ -2118,12 +2577,12 @@ var handler_default8 = docsHandler;
|
|
|
2118
2577
|
import { getNameAndVersion as getNameAndVersion6 } from "@openfn/runtime";
|
|
2119
2578
|
import { createHash } from "node:crypto";
|
|
2120
2579
|
import { mkdir as mkdir4, readFile as readFile6, writeFile as writeFile7, readdir, rm } from "node:fs/promises";
|
|
2121
|
-
import
|
|
2580
|
+
import path11 from "node:path";
|
|
2122
2581
|
var UNSUPPORTED_FILE_NAME = "unsupported.json";
|
|
2123
2582
|
var getCachePath2 = (repoDir, key) => {
|
|
2124
|
-
const base =
|
|
2583
|
+
const base = path11.join(repoDir, "meta");
|
|
2125
2584
|
if (key) {
|
|
2126
|
-
return
|
|
2585
|
+
return path11.join(base, key.endsWith(".json") ? key : `${key}.json`);
|
|
2127
2586
|
}
|
|
2128
2587
|
return base;
|
|
2129
2588
|
};
|
|
@@ -2165,7 +2624,7 @@ var get2 = async (repoPath, key) => {
|
|
|
2165
2624
|
};
|
|
2166
2625
|
var set2 = async (repoPath, key, result) => {
|
|
2167
2626
|
const p = getCachePath2(repoPath, key);
|
|
2168
|
-
await mkdir4(
|
|
2627
|
+
await mkdir4(path11.dirname(p), { recursive: true });
|
|
2169
2628
|
await writeFile7(p, JSON.stringify(result));
|
|
2170
2629
|
};
|
|
2171
2630
|
var getUnsupportedCachePath = (repoDir) => {
|
|
@@ -2186,447 +2645,180 @@ var compareVersions = (version1, version2) => {
|
|
|
2186
2645
|
if (v1.major !== v2.major)
|
|
2187
2646
|
return v1.major - v2.major;
|
|
2188
2647
|
if (v1.minor !== v2.minor)
|
|
2189
|
-
return v1.minor - v2.minor;
|
|
2190
|
-
return v1.patch - v2.patch;
|
|
2191
|
-
};
|
|
2192
|
-
var isAdaptorUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
2193
|
-
const { name, version } = getNameAndVersion6(adaptorSpecifier);
|
|
2194
|
-
if (!version)
|
|
2195
|
-
return false;
|
|
2196
|
-
const cache = await getUnsupportedCache(repoDir);
|
|
2197
|
-
if (!cache || !cache[name]) {
|
|
2198
|
-
return false;
|
|
2199
|
-
}
|
|
2200
|
-
const cached = cache[name];
|
|
2201
|
-
const currentParsed = parseVersion(version);
|
|
2202
|
-
const cachedParsed = parseVersion(cached.lastCheckedVersion);
|
|
2203
|
-
if (currentParsed.major > cachedParsed.major || currentParsed.major === cachedParsed.major && currentParsed.minor > cachedParsed.minor) {
|
|
2204
|
-
return false;
|
|
2205
|
-
}
|
|
2206
|
-
return true;
|
|
2207
|
-
};
|
|
2208
|
-
var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
2209
|
-
const { name, version } = getNameAndVersion6(adaptorSpecifier);
|
|
2210
|
-
if (!version)
|
|
2211
|
-
return;
|
|
2212
|
-
const cachePath = getUnsupportedCachePath(repoDir);
|
|
2213
|
-
let cache = {};
|
|
2214
|
-
try {
|
|
2215
|
-
const cacheContent = await readFile6(cachePath, "utf8");
|
|
2216
|
-
cache = JSON.parse(cacheContent);
|
|
2217
|
-
} catch (error) {
|
|
2218
|
-
}
|
|
2219
|
-
const parsed = parseVersion(version);
|
|
2220
|
-
const existing = cache[name];
|
|
2221
|
-
if (!existing || compareVersions(version, existing.lastCheckedVersion) > 0) {
|
|
2222
|
-
cache[name] = {
|
|
2223
|
-
lastCheckedVersion: version,
|
|
2224
|
-
majorMinor: parsed.majorMinor,
|
|
2225
|
-
timestamp: Date.now()
|
|
2226
|
-
};
|
|
2227
|
-
await mkdir4(path10.dirname(cachePath), { recursive: true });
|
|
2228
|
-
await writeFile7(cachePath, JSON.stringify(cache, null, 2));
|
|
2229
|
-
}
|
|
2230
|
-
};
|
|
2231
|
-
|
|
2232
|
-
// src/metadata/handler.ts
|
|
2233
|
-
import { getModuleEntryPoint } from "@openfn/runtime";
|
|
2234
|
-
var decorateMetadata = (metadata) => {
|
|
2235
|
-
metadata.created = (/* @__PURE__ */ new Date()).toISOString();
|
|
2236
|
-
};
|
|
2237
|
-
var getAdaptorPath = async (adaptor, logger, repoDir) => {
|
|
2238
|
-
let adaptorPath;
|
|
2239
|
-
let adaptorSpecifier;
|
|
2240
|
-
if (adaptor.match("=")) {
|
|
2241
|
-
const parts = adaptor.split("=");
|
|
2242
|
-
adaptorSpecifier = parts[0];
|
|
2243
|
-
adaptorPath = parts[1];
|
|
2244
|
-
} else {
|
|
2245
|
-
if (adaptor.endsWith(".js")) {
|
|
2246
|
-
return adaptor;
|
|
2247
|
-
}
|
|
2248
|
-
adaptorSpecifier = adaptor;
|
|
2249
|
-
if (adaptor.startsWith("/")) {
|
|
2250
|
-
adaptorPath = adaptor;
|
|
2251
|
-
}
|
|
2252
|
-
}
|
|
2253
|
-
if (!adaptorPath || !adaptorPath.endsWith("js")) {
|
|
2254
|
-
const entry = await getModuleEntryPoint(
|
|
2255
|
-
adaptorSpecifier,
|
|
2256
|
-
adaptorPath,
|
|
2257
|
-
repoDir,
|
|
2258
|
-
logger
|
|
2259
|
-
);
|
|
2260
|
-
adaptorPath = entry?.path;
|
|
2261
|
-
}
|
|
2262
|
-
logger.debug("loading adaptor from", adaptorPath);
|
|
2263
|
-
return adaptorPath;
|
|
2264
|
-
};
|
|
2265
|
-
var shouldAutoinstall = (adaptor) => adaptor?.length > 0 && !adaptor.startsWith("/") && !adaptor.includes("=");
|
|
2266
|
-
var metadataHandler = async (options7, logger) => {
|
|
2267
|
-
const { repoDir, adaptors, keepUnsupported } = options7;
|
|
2268
|
-
let adaptor = adaptors[0];
|
|
2269
|
-
if (await isAdaptorUnsupported(adaptor, repoDir)) {
|
|
2270
|
-
logger.info(
|
|
2271
|
-
`Adaptor ${adaptor} is known to not support metadata (cached) - skipping lookup`
|
|
2272
|
-
);
|
|
2273
|
-
logger.error("No metadata helper found");
|
|
2274
|
-
process.exit(1);
|
|
2275
|
-
}
|
|
2276
|
-
const state = await load_state_default({}, options7, logger);
|
|
2277
|
-
logger.success(`Generating metadata`);
|
|
2278
|
-
logger.info("config:", state);
|
|
2279
|
-
const config2 = state.configuration;
|
|
2280
|
-
if (!config2 || Object.keys(config2).length === 0) {
|
|
2281
|
-
logger.error("ERROR: Invalid configuration passed");
|
|
2282
|
-
process.exit(1);
|
|
2283
|
-
}
|
|
2284
|
-
const finish2 = () => {
|
|
2285
|
-
logger.success("Done!");
|
|
2286
|
-
logger.print(getCachePath2(repoDir, id));
|
|
2287
|
-
};
|
|
2288
|
-
const id = generateKey(config2, adaptor);
|
|
2289
|
-
if (!options7.force) {
|
|
2290
|
-
logger.debug("config hash: ", id);
|
|
2291
|
-
const cached = await get2(repoDir, id);
|
|
2292
|
-
if (cached) {
|
|
2293
|
-
logger.success("Returning metadata from cache");
|
|
2294
|
-
return finish2();
|
|
2295
|
-
}
|
|
2296
|
-
}
|
|
2297
|
-
let wasAutoInstalled = false;
|
|
2298
|
-
try {
|
|
2299
|
-
if (shouldAutoinstall(adaptor)) {
|
|
2300
|
-
const autoinstallResult = await install(
|
|
2301
|
-
{ packages: [adaptor], repoDir },
|
|
2302
|
-
logger
|
|
2303
|
-
);
|
|
2304
|
-
wasAutoInstalled = true;
|
|
2305
|
-
adaptor = autoinstallResult[0];
|
|
2306
|
-
}
|
|
2307
|
-
const adaptorPath = await getAdaptorPath(adaptor, logger, options7.repoDir);
|
|
2308
|
-
if (!adaptorPath) {
|
|
2309
|
-
throw new Error(`Could not resolve adaptor path for ${adaptor}`);
|
|
2310
|
-
}
|
|
2311
|
-
const mod = await import(adaptorPath);
|
|
2312
|
-
if (mod.metadata && typeof mod.metadata === "function") {
|
|
2313
|
-
logger.info("Metadata function found. Generating metadata...");
|
|
2314
|
-
const result = await mod.metadata(config2);
|
|
2315
|
-
decorateMetadata(result);
|
|
2316
|
-
await set2(
|
|
2317
|
-
repoDir,
|
|
2318
|
-
id,
|
|
2319
|
-
result
|
|
2320
|
-
);
|
|
2321
|
-
finish2();
|
|
2322
|
-
} else {
|
|
2323
|
-
logger.error("No metadata helper found");
|
|
2324
|
-
if (wasAutoInstalled && !keepUnsupported) {
|
|
2325
|
-
logger.info("Removing unsupported adaptor from disk...");
|
|
2326
|
-
await removePackage(adaptor, repoDir, logger);
|
|
2327
|
-
await markAdaptorAsUnsupported(adaptor, repoDir);
|
|
2328
|
-
logger.info("Adaptor removed and marked as unsupported");
|
|
2329
|
-
} else if (wasAutoInstalled && keepUnsupported) {
|
|
2330
|
-
if (adaptor === "@openfn/language-openfn") {
|
|
2331
|
-
logger.log({ wasAutoInstalled, keepUnsupported });
|
|
2332
|
-
}
|
|
2333
|
-
logger.info(
|
|
2334
|
-
"Keeping unsupported adaptor as requested by --keep-unsupported flag"
|
|
2335
|
-
);
|
|
2336
|
-
await markAdaptorAsUnsupported(adaptor, repoDir);
|
|
2337
|
-
}
|
|
2338
|
-
process.exit(1);
|
|
2339
|
-
}
|
|
2340
|
-
} catch (e) {
|
|
2341
|
-
logger.error("Exception while generating metadata");
|
|
2342
|
-
logger.error(e);
|
|
2343
|
-
process.exit(1);
|
|
2344
|
-
}
|
|
2345
|
-
};
|
|
2346
|
-
var handler_default9 = metadataHandler;
|
|
2347
|
-
|
|
2348
|
-
// src/pull/handler.ts
|
|
2349
|
-
import path14 from "path";
|
|
2350
|
-
import fs5 from "node:fs/promises";
|
|
2351
|
-
import {
|
|
2352
|
-
getConfig as getConfig2,
|
|
2353
|
-
getProject,
|
|
2354
|
-
getSpec,
|
|
2355
|
-
getStateFromProjectPayload,
|
|
2356
|
-
syncRemoteSpec
|
|
2357
|
-
} from "@openfn/deploy";
|
|
2358
|
-
|
|
2359
|
-
// src/util/command-builders.ts
|
|
2360
|
-
import c2 from "chalk";
|
|
2361
|
-
var expandYargs = (y) => {
|
|
2362
|
-
if (typeof y === "function") {
|
|
2363
|
-
return y();
|
|
2364
|
-
}
|
|
2365
|
-
return y;
|
|
2366
|
-
};
|
|
2367
|
-
function build(opts, yargs) {
|
|
2368
|
-
return opts.reduce((_y, o) => {
|
|
2369
|
-
if (!o?.name) {
|
|
2370
|
-
console.error(`ERROR: INVALID COMMAND OPTION PASSED`, o);
|
|
2371
|
-
console.error("Check the options passed to the command builder");
|
|
2372
|
-
throw new Error("Invalid command");
|
|
2373
|
-
}
|
|
2374
|
-
return yargs.option(o.name, expandYargs(o.yargs));
|
|
2375
|
-
}, yargs);
|
|
2376
|
-
}
|
|
2377
|
-
var ensure = (command7, opts) => (yargs) => {
|
|
2378
|
-
yargs.command = command7;
|
|
2379
|
-
opts.filter((opt) => opt.ensure).forEach((opt) => {
|
|
2380
|
-
try {
|
|
2381
|
-
opt.ensure(yargs);
|
|
2382
|
-
} catch (e) {
|
|
2383
|
-
console.log(e);
|
|
2384
|
-
console.error(
|
|
2385
|
-
c2.red(`
|
|
2386
|
-
Error parsing command arguments: ${command7}.${opt.name}
|
|
2387
|
-
`)
|
|
2388
|
-
);
|
|
2389
|
-
console.error(c2.red("Aborting"));
|
|
2390
|
-
console.error();
|
|
2391
|
-
process.exit(9);
|
|
2392
|
-
}
|
|
2393
|
-
});
|
|
2394
|
-
};
|
|
2395
|
-
var override = (command7, yargs) => {
|
|
2396
|
-
return {
|
|
2397
|
-
...command7,
|
|
2398
|
-
yargs: {
|
|
2399
|
-
...command7.yargs || {},
|
|
2400
|
-
...yargs
|
|
2401
|
-
}
|
|
2402
|
-
};
|
|
2403
|
-
};
|
|
2404
|
-
|
|
2405
|
-
// src/projects/fetch.ts
|
|
2406
|
-
import path12 from "node:path";
|
|
2407
|
-
import Project2, { Workspace as Workspace2 } from "@openfn/project";
|
|
2408
|
-
|
|
2409
|
-
// src/util/ensure-log-opts.ts
|
|
2410
|
-
var defaultLoggerOptions = {
|
|
2411
|
-
default: "default",
|
|
2412
|
-
// TODO fix to lower case
|
|
2413
|
-
job: "debug"
|
|
2414
|
-
};
|
|
2415
|
-
var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
|
|
2416
|
-
var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
|
|
2417
|
-
var componentShorthands = {
|
|
2418
|
-
cmp: "compiler",
|
|
2419
|
-
rt: "runtime",
|
|
2420
|
-
"r/t": "runtime"
|
|
2421
|
-
};
|
|
2422
|
-
var ensureLogOpts = (opts) => {
|
|
2423
|
-
const components = {};
|
|
2424
|
-
const outgoingOpts = opts;
|
|
2425
|
-
if (!opts.log && /^(version|test)$/.test(opts.command)) {
|
|
2426
|
-
outgoingOpts.log = { default: "info" };
|
|
2427
|
-
return outgoingOpts;
|
|
2428
|
-
}
|
|
2429
|
-
if (opts.log) {
|
|
2430
|
-
const parts = opts.log.split(",");
|
|
2431
|
-
parts.forEach((l) => {
|
|
2432
|
-
let component = "";
|
|
2433
|
-
let level = "";
|
|
2434
|
-
if (l.match(/=/)) {
|
|
2435
|
-
const parts2 = l.split("=");
|
|
2436
|
-
component = parts2[0].toLowerCase();
|
|
2437
|
-
if (componentShorthands[component]) {
|
|
2438
|
-
component = componentShorthands[component];
|
|
2439
|
-
}
|
|
2440
|
-
level = parts2[1].toLowerCase();
|
|
2441
|
-
} else {
|
|
2442
|
-
component = "default";
|
|
2443
|
-
level = l.toLowerCase();
|
|
2444
|
-
if (level === "none" && !parts.find((p) => p.startsWith("job"))) {
|
|
2445
|
-
components["job"] = "none";
|
|
2446
|
-
}
|
|
2447
|
-
}
|
|
2448
|
-
if (!/^(cli|runtime|compiler|job|default)$/i.test(component)) {
|
|
2449
|
-
throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
|
|
2450
|
-
}
|
|
2451
|
-
level = level.toLowerCase();
|
|
2452
|
-
if (!isValidLogLevel(level)) {
|
|
2453
|
-
throw new Error(ERROR_MESSAGE_LOG_LEVEL);
|
|
2454
|
-
}
|
|
2455
|
-
components[component] = level;
|
|
2456
|
-
});
|
|
2457
|
-
}
|
|
2458
|
-
outgoingOpts.log = {
|
|
2459
|
-
...defaultLoggerOptions,
|
|
2460
|
-
...components
|
|
2461
|
-
};
|
|
2462
|
-
return outgoingOpts;
|
|
2463
|
-
};
|
|
2464
|
-
var ensure_log_opts_default = ensureLogOpts;
|
|
2465
|
-
|
|
2466
|
-
// src/options.ts
|
|
2467
|
-
var apiKey = {
|
|
2468
|
-
name: "apikey",
|
|
2469
|
-
yargs: {
|
|
2470
|
-
alias: ["pat", "token", "api-key"],
|
|
2471
|
-
description: "API Key, Personal Access Token (PAT), or other access token from Lightning"
|
|
2472
|
-
},
|
|
2473
|
-
ensure: (opts) => {
|
|
2474
|
-
if (!opts.apikey) {
|
|
2475
|
-
opts.apiKey = process.env.OPENFN_API_KEY;
|
|
2476
|
-
}
|
|
2477
|
-
}
|
|
2478
|
-
};
|
|
2479
|
-
var endpoint = {
|
|
2480
|
-
name: "endpoint",
|
|
2481
|
-
yargs: {
|
|
2482
|
-
alias: ["lightning"],
|
|
2483
|
-
description: "[beta only] URL to Lightning endpoint"
|
|
2484
|
-
}
|
|
2485
|
-
};
|
|
2486
|
-
var force = {
|
|
2487
|
-
name: "force",
|
|
2488
|
-
yargs: {
|
|
2489
|
-
alias: ["f"],
|
|
2490
|
-
boolean: true,
|
|
2491
|
-
description: "Force metadata to be regenerated",
|
|
2492
|
-
default: false
|
|
2493
|
-
}
|
|
2494
|
-
};
|
|
2495
|
-
var log = {
|
|
2496
|
-
name: "log",
|
|
2497
|
-
yargs: {
|
|
2498
|
-
alias: ["l"],
|
|
2499
|
-
description: "Set the log level",
|
|
2500
|
-
string: true
|
|
2501
|
-
},
|
|
2502
|
-
ensure: (opts) => {
|
|
2503
|
-
ensure_log_opts_default(opts);
|
|
2504
|
-
}
|
|
2505
|
-
};
|
|
2506
|
-
var logJson = {
|
|
2507
|
-
name: "log-json",
|
|
2508
|
-
yargs: {
|
|
2509
|
-
description: "Output all logs as JSON objects",
|
|
2510
|
-
boolean: true
|
|
2511
|
-
}
|
|
2512
|
-
};
|
|
2513
|
-
var projectPath = {
|
|
2514
|
-
name: "project-path",
|
|
2515
|
-
yargs: {
|
|
2516
|
-
string: true,
|
|
2517
|
-
alias: ["p"],
|
|
2518
|
-
description: "The location of your project.yaml file"
|
|
2519
|
-
}
|
|
2648
|
+
return v1.minor - v2.minor;
|
|
2649
|
+
return v1.patch - v2.patch;
|
|
2520
2650
|
};
|
|
2521
|
-
var
|
|
2522
|
-
name
|
|
2523
|
-
|
|
2524
|
-
|
|
2651
|
+
var isAdaptorUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
2652
|
+
const { name, version } = getNameAndVersion6(adaptorSpecifier);
|
|
2653
|
+
if (!version)
|
|
2654
|
+
return false;
|
|
2655
|
+
const cache = await getUnsupportedCache(repoDir);
|
|
2656
|
+
if (!cache || !cache[name]) {
|
|
2657
|
+
return false;
|
|
2525
2658
|
}
|
|
2526
|
-
|
|
2527
|
-
|
|
2528
|
-
|
|
2529
|
-
|
|
2530
|
-
|
|
2531
|
-
array: true
|
|
2659
|
+
const cached = cache[name];
|
|
2660
|
+
const currentParsed = parseVersion(version);
|
|
2661
|
+
const cachedParsed = parseVersion(cached.lastCheckedVersion);
|
|
2662
|
+
if (currentParsed.major > cachedParsed.major || currentParsed.major === cachedParsed.major && currentParsed.minor > cachedParsed.minor) {
|
|
2663
|
+
return false;
|
|
2532
2664
|
}
|
|
2665
|
+
return true;
|
|
2533
2666
|
};
|
|
2534
|
-
var
|
|
2535
|
-
name
|
|
2536
|
-
|
|
2537
|
-
|
|
2538
|
-
|
|
2539
|
-
|
|
2540
|
-
|
|
2667
|
+
var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
2668
|
+
const { name, version } = getNameAndVersion6(adaptorSpecifier);
|
|
2669
|
+
if (!version)
|
|
2670
|
+
return;
|
|
2671
|
+
const cachePath = getUnsupportedCachePath(repoDir);
|
|
2672
|
+
let cache = {};
|
|
2673
|
+
try {
|
|
2674
|
+
const cacheContent = await readFile6(cachePath, "utf8");
|
|
2675
|
+
cache = JSON.parse(cacheContent);
|
|
2676
|
+
} catch (error) {
|
|
2541
2677
|
}
|
|
2542
|
-
|
|
2543
|
-
|
|
2544
|
-
|
|
2545
|
-
|
|
2546
|
-
|
|
2547
|
-
|
|
2678
|
+
const parsed = parseVersion(version);
|
|
2679
|
+
const existing = cache[name];
|
|
2680
|
+
if (!existing || compareVersions(version, existing.lastCheckedVersion) > 0) {
|
|
2681
|
+
cache[name] = {
|
|
2682
|
+
lastCheckedVersion: version,
|
|
2683
|
+
majorMinor: parsed.majorMinor,
|
|
2684
|
+
timestamp: Date.now()
|
|
2685
|
+
};
|
|
2686
|
+
await mkdir4(path11.dirname(cachePath), { recursive: true });
|
|
2687
|
+
await writeFile7(cachePath, JSON.stringify(cache, null, 2));
|
|
2548
2688
|
}
|
|
2549
2689
|
};
|
|
2550
2690
|
|
|
2551
|
-
// src/
|
|
2552
|
-
|
|
2553
|
-
|
|
2554
|
-
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
|
|
2560
|
-
|
|
2691
|
+
// src/metadata/handler.ts
|
|
2692
|
+
import { getModuleEntryPoint } from "@openfn/runtime";
|
|
2693
|
+
var decorateMetadata = (metadata) => {
|
|
2694
|
+
metadata.created = (/* @__PURE__ */ new Date()).toISOString();
|
|
2695
|
+
};
|
|
2696
|
+
var getAdaptorPath = async (adaptor, logger, repoDir) => {
|
|
2697
|
+
let adaptorPath;
|
|
2698
|
+
let adaptorSpecifier;
|
|
2699
|
+
if (adaptor.match("=")) {
|
|
2700
|
+
const parts = adaptor.split("=");
|
|
2701
|
+
adaptorSpecifier = parts[0];
|
|
2702
|
+
adaptorPath = parts[1];
|
|
2703
|
+
} else {
|
|
2704
|
+
if (adaptor.endsWith(".js")) {
|
|
2705
|
+
return adaptor;
|
|
2706
|
+
}
|
|
2707
|
+
adaptorSpecifier = adaptor;
|
|
2708
|
+
if (adaptor.startsWith("/")) {
|
|
2709
|
+
adaptorPath = adaptor;
|
|
2561
2710
|
}
|
|
2562
|
-
return Object.fromEntries(
|
|
2563
|
-
arg.split(",").map((pair) => {
|
|
2564
|
-
const [k, v] = pair.split("=");
|
|
2565
|
-
return [k.trim(), v.trim()];
|
|
2566
|
-
})
|
|
2567
|
-
);
|
|
2568
|
-
}
|
|
2569
|
-
}
|
|
2570
|
-
function isObject(arg) {
|
|
2571
|
-
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
2572
|
-
}
|
|
2573
|
-
|
|
2574
|
-
// src/projects/options.ts
|
|
2575
|
-
var env = {
|
|
2576
|
-
name: "env",
|
|
2577
|
-
yargs: {
|
|
2578
|
-
description: "Environment name (eg staging, prod, branch)",
|
|
2579
|
-
hidden: true
|
|
2580
2711
|
}
|
|
2581
|
-
|
|
2582
|
-
|
|
2583
|
-
|
|
2584
|
-
|
|
2585
|
-
|
|
2586
|
-
|
|
2712
|
+
if (!adaptorPath || !adaptorPath.endsWith("js")) {
|
|
2713
|
+
const entry = await getModuleEntryPoint(
|
|
2714
|
+
adaptorSpecifier,
|
|
2715
|
+
adaptorPath,
|
|
2716
|
+
repoDir,
|
|
2717
|
+
logger
|
|
2718
|
+
);
|
|
2719
|
+
adaptorPath = entry?.path;
|
|
2587
2720
|
}
|
|
2721
|
+
logger.debug("loading adaptor from", adaptorPath);
|
|
2722
|
+
return adaptorPath;
|
|
2588
2723
|
};
|
|
2589
|
-
var
|
|
2590
|
-
|
|
2591
|
-
|
|
2592
|
-
|
|
2593
|
-
|
|
2724
|
+
var shouldAutoinstall = (adaptor) => adaptor?.length > 0 && !adaptor.startsWith("/") && !adaptor.includes("=");
|
|
2725
|
+
var metadataHandler = async (options8, logger) => {
|
|
2726
|
+
const { repoDir, adaptors, keepUnsupported } = options8;
|
|
2727
|
+
let adaptor = adaptors[0];
|
|
2728
|
+
if (await isAdaptorUnsupported(adaptor, repoDir)) {
|
|
2729
|
+
logger.info(
|
|
2730
|
+
`Adaptor ${adaptor} is known to not support metadata (cached) - skipping lookup`
|
|
2731
|
+
);
|
|
2732
|
+
logger.error("No metadata helper found");
|
|
2733
|
+
process.exit(1);
|
|
2594
2734
|
}
|
|
2595
|
-
};
|
|
2596
|
-
|
|
2597
|
-
|
|
2598
|
-
|
|
2599
|
-
|
|
2600
|
-
|
|
2601
|
-
|
|
2735
|
+
const state = await load_state_default({}, options8, logger);
|
|
2736
|
+
logger.success(`Generating metadata`);
|
|
2737
|
+
logger.info("config:", state);
|
|
2738
|
+
const config2 = state.configuration;
|
|
2739
|
+
if (!config2 || Object.keys(config2).length === 0) {
|
|
2740
|
+
logger.error("ERROR: Invalid configuration passed");
|
|
2741
|
+
process.exit(1);
|
|
2602
2742
|
}
|
|
2603
|
-
|
|
2604
|
-
|
|
2605
|
-
|
|
2606
|
-
|
|
2607
|
-
|
|
2608
|
-
|
|
2609
|
-
|
|
2743
|
+
const finish2 = () => {
|
|
2744
|
+
logger.success("Done!");
|
|
2745
|
+
logger.print(getCachePath2(repoDir, id));
|
|
2746
|
+
};
|
|
2747
|
+
const id = generateKey(config2, adaptor);
|
|
2748
|
+
if (!options8.force) {
|
|
2749
|
+
logger.debug("config hash: ", id);
|
|
2750
|
+
const cached = await get2(repoDir, id);
|
|
2751
|
+
if (cached) {
|
|
2752
|
+
logger.success("Returning metadata from cache");
|
|
2753
|
+
return finish2();
|
|
2754
|
+
}
|
|
2610
2755
|
}
|
|
2611
|
-
|
|
2612
|
-
|
|
2613
|
-
|
|
2614
|
-
|
|
2615
|
-
|
|
2616
|
-
|
|
2617
|
-
|
|
2618
|
-
|
|
2619
|
-
|
|
2620
|
-
|
|
2621
|
-
|
|
2756
|
+
let wasAutoInstalled = false;
|
|
2757
|
+
try {
|
|
2758
|
+
if (shouldAutoinstall(adaptor)) {
|
|
2759
|
+
const autoinstallResult = await install(
|
|
2760
|
+
{ packages: [adaptor], repoDir },
|
|
2761
|
+
logger
|
|
2762
|
+
);
|
|
2763
|
+
wasAutoInstalled = true;
|
|
2764
|
+
adaptor = autoinstallResult[0];
|
|
2765
|
+
}
|
|
2766
|
+
const adaptorPath = await getAdaptorPath(adaptor, logger, options8.repoDir);
|
|
2767
|
+
if (!adaptorPath) {
|
|
2768
|
+
throw new Error(`Could not resolve adaptor path for ${adaptor}`);
|
|
2769
|
+
}
|
|
2770
|
+
const mod = await import(adaptorPath);
|
|
2771
|
+
if (mod.metadata && typeof mod.metadata === "function") {
|
|
2772
|
+
logger.info("Metadata function found. Generating metadata...");
|
|
2773
|
+
const result = await mod.metadata(config2);
|
|
2774
|
+
decorateMetadata(result);
|
|
2775
|
+
await set2(
|
|
2776
|
+
repoDir,
|
|
2777
|
+
id,
|
|
2778
|
+
result
|
|
2779
|
+
);
|
|
2780
|
+
finish2();
|
|
2622
2781
|
} else {
|
|
2623
|
-
|
|
2782
|
+
logger.error("No metadata helper found");
|
|
2783
|
+
if (wasAutoInstalled && !keepUnsupported) {
|
|
2784
|
+
logger.info("Removing unsupported adaptor from disk...");
|
|
2785
|
+
await removePackage(adaptor, repoDir, logger);
|
|
2786
|
+
await markAdaptorAsUnsupported(adaptor, repoDir);
|
|
2787
|
+
logger.info("Adaptor removed and marked as unsupported");
|
|
2788
|
+
} else if (wasAutoInstalled && keepUnsupported) {
|
|
2789
|
+
if (adaptor === "@openfn/language-openfn") {
|
|
2790
|
+
logger.log({ wasAutoInstalled, keepUnsupported });
|
|
2791
|
+
}
|
|
2792
|
+
logger.info(
|
|
2793
|
+
"Keeping unsupported adaptor as requested by --keep-unsupported flag"
|
|
2794
|
+
);
|
|
2795
|
+
await markAdaptorAsUnsupported(adaptor, repoDir);
|
|
2796
|
+
}
|
|
2797
|
+
process.exit(1);
|
|
2624
2798
|
}
|
|
2799
|
+
} catch (e) {
|
|
2800
|
+
logger.error("Exception while generating metadata");
|
|
2801
|
+
logger.error(e);
|
|
2802
|
+
process.exit(1);
|
|
2625
2803
|
}
|
|
2626
2804
|
};
|
|
2805
|
+
var handler_default9 = metadataHandler;
|
|
2806
|
+
|
|
2807
|
+
// src/pull/handler.ts
|
|
2808
|
+
import path14 from "path";
|
|
2809
|
+
import fs5 from "node:fs/promises";
|
|
2810
|
+
import {
|
|
2811
|
+
getConfig as getConfig2,
|
|
2812
|
+
getProject,
|
|
2813
|
+
getSpec,
|
|
2814
|
+
getStateFromProjectPayload,
|
|
2815
|
+
syncRemoteSpec
|
|
2816
|
+
} from "@openfn/deploy";
|
|
2627
2817
|
|
|
2628
2818
|
// src/projects/fetch.ts
|
|
2629
|
-
|
|
2819
|
+
import path12 from "node:path";
|
|
2820
|
+
import Project2, { Workspace as Workspace2 } from "@openfn/project";
|
|
2821
|
+
var options2 = [
|
|
2630
2822
|
alias,
|
|
2631
2823
|
apiKey,
|
|
2632
2824
|
endpoint,
|
|
@@ -2641,42 +2833,44 @@ var options = [
|
|
|
2641
2833
|
env,
|
|
2642
2834
|
workspace
|
|
2643
2835
|
];
|
|
2644
|
-
var
|
|
2836
|
+
var command2 = {
|
|
2645
2837
|
command: "fetch [project]",
|
|
2646
2838
|
describe: `Download the latest version of a project from a lightning server (does not expand the project, use checkout)`,
|
|
2647
|
-
builder: (yargs) => build(
|
|
2839
|
+
builder: (yargs) => build(options2, yargs).positional("project", {
|
|
2648
2840
|
describe: "The id, alias or UUID of the project to fetch. If not set, will default to the active project"
|
|
2649
2841
|
}).example(
|
|
2650
2842
|
"fetch 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2651
2843
|
"Fetch an updated copy of a the above spec and state from a Lightning Instance"
|
|
2652
2844
|
),
|
|
2653
|
-
handler: ensure("project-fetch",
|
|
2845
|
+
handler: ensure("project-fetch", options2)
|
|
2654
2846
|
};
|
|
2655
|
-
var
|
|
2656
|
-
var handler2 = async (
|
|
2657
|
-
const workspacePath =
|
|
2847
|
+
var printProjectName2 = (project) => `${project.qname} (${project.id})`;
|
|
2848
|
+
var handler2 = async (options8, logger) => {
|
|
2849
|
+
const workspacePath = options8.workspace ?? process.cwd();
|
|
2658
2850
|
logger.debug("Using workspace at", workspacePath);
|
|
2659
2851
|
const workspace2 = new Workspace2(workspacePath, logger, false);
|
|
2660
|
-
const { outputPath: outputPath2 } =
|
|
2852
|
+
const { outputPath: outputPath2 } = options8;
|
|
2661
2853
|
const localTargetProject = await resolveOutputProject(
|
|
2662
2854
|
workspace2,
|
|
2663
|
-
|
|
2855
|
+
options8,
|
|
2664
2856
|
logger
|
|
2665
2857
|
);
|
|
2666
|
-
const remoteProject = await fetchRemoteProject(workspace2,
|
|
2667
|
-
ensureTargetCompatible(
|
|
2668
|
-
const
|
|
2669
|
-
|
|
2670
|
-
|
|
2858
|
+
const remoteProject = await fetchRemoteProject(workspace2, options8, logger);
|
|
2859
|
+
ensureTargetCompatible(options8, remoteProject, localTargetProject);
|
|
2860
|
+
const finalOutputPath = getSerializePath(
|
|
2861
|
+
remoteProject,
|
|
2862
|
+
workspacePath,
|
|
2863
|
+
outputPath2
|
|
2864
|
+
);
|
|
2671
2865
|
let format = void 0;
|
|
2672
2866
|
if (outputPath2) {
|
|
2673
2867
|
const ext = path12.extname(outputPath2).substring(1);
|
|
2674
2868
|
if (ext.length) {
|
|
2675
2869
|
format = ext;
|
|
2676
2870
|
}
|
|
2677
|
-
if (
|
|
2871
|
+
if (options8.alias) {
|
|
2678
2872
|
logger.warn(
|
|
2679
|
-
`WARNING: alias "${
|
|
2873
|
+
`WARNING: alias "${options8.alias}" was set, but will be ignored as output path was provided`
|
|
2680
2874
|
);
|
|
2681
2875
|
}
|
|
2682
2876
|
}
|
|
@@ -2686,55 +2880,55 @@ var handler2 = async (options7, logger) => {
|
|
|
2686
2880
|
);
|
|
2687
2881
|
return remoteProject;
|
|
2688
2882
|
};
|
|
2689
|
-
async function resolveOutputProject(workspace2,
|
|
2883
|
+
async function resolveOutputProject(workspace2, options8, logger) {
|
|
2690
2884
|
logger.debug("Checking for local copy of project...");
|
|
2691
|
-
if (
|
|
2885
|
+
if (options8.outputPath) {
|
|
2692
2886
|
try {
|
|
2693
|
-
const customProject = await Project2.from("path",
|
|
2887
|
+
const customProject = await Project2.from("path", options8.outputPath);
|
|
2694
2888
|
logger.debug(
|
|
2695
|
-
`Found existing local project ${
|
|
2696
|
-
|
|
2889
|
+
`Found existing local project ${printProjectName2(customProject)} at`,
|
|
2890
|
+
options8.outputPath
|
|
2697
2891
|
);
|
|
2698
2892
|
return customProject;
|
|
2699
2893
|
} catch (e) {
|
|
2700
|
-
logger.debug("No project found at",
|
|
2894
|
+
logger.debug("No project found at", options8.outputPath);
|
|
2701
2895
|
}
|
|
2702
2896
|
}
|
|
2703
|
-
if (
|
|
2704
|
-
const aliasProject = workspace2.get(
|
|
2897
|
+
if (options8.alias) {
|
|
2898
|
+
const aliasProject = workspace2.get(options8.alias);
|
|
2705
2899
|
if (aliasProject) {
|
|
2706
2900
|
logger.debug(
|
|
2707
2901
|
`Found local project from alias:`,
|
|
2708
|
-
|
|
2902
|
+
printProjectName2(aliasProject)
|
|
2709
2903
|
);
|
|
2710
2904
|
return aliasProject;
|
|
2711
2905
|
} else {
|
|
2712
|
-
logger.debug(`No local project found with alias ${
|
|
2906
|
+
logger.debug(`No local project found with alias ${options8.alias}`);
|
|
2713
2907
|
}
|
|
2714
2908
|
}
|
|
2715
|
-
const project = workspace2.get(
|
|
2909
|
+
const project = workspace2.get(options8.project);
|
|
2716
2910
|
if (project) {
|
|
2717
2911
|
logger.debug(
|
|
2718
2912
|
`Found local project from identifier:`,
|
|
2719
|
-
|
|
2913
|
+
printProjectName2(project)
|
|
2720
2914
|
);
|
|
2721
2915
|
return project;
|
|
2722
2916
|
} else {
|
|
2723
2917
|
logger.debug(
|
|
2724
2918
|
`No local project found matching identifier: `,
|
|
2725
|
-
|
|
2919
|
+
options8.project
|
|
2726
2920
|
);
|
|
2727
2921
|
}
|
|
2728
2922
|
}
|
|
2729
|
-
async function fetchRemoteProject(workspace2,
|
|
2923
|
+
async function fetchRemoteProject(workspace2, options8, logger) {
|
|
2730
2924
|
logger.debug(`Fetching latest project data from app`);
|
|
2731
|
-
const config2 = loadAppAuthConfig(
|
|
2732
|
-
let projectUUID =
|
|
2733
|
-
const localProject = workspace2.get(
|
|
2734
|
-
if (localProject?.openfn?.uuid && localProject.openfn.uuid !==
|
|
2925
|
+
const config2 = loadAppAuthConfig(options8, logger);
|
|
2926
|
+
let projectUUID = options8.project;
|
|
2927
|
+
const localProject = workspace2.get(options8.project);
|
|
2928
|
+
if (localProject?.openfn?.uuid && localProject.openfn.uuid !== options8.project) {
|
|
2735
2929
|
projectUUID = localProject.openfn.uuid;
|
|
2736
2930
|
logger.debug(
|
|
2737
|
-
`Resolved ${
|
|
2931
|
+
`Resolved ${options8.project} to UUID ${projectUUID} from local project ${printProjectName2(
|
|
2738
2932
|
localProject
|
|
2739
2933
|
)}}`
|
|
2740
2934
|
);
|
|
@@ -2754,7 +2948,7 @@ async function fetchRemoteProject(workspace2, options7, logger) {
|
|
|
2754
2948
|
},
|
|
2755
2949
|
{
|
|
2756
2950
|
...workspace2.getConfig(),
|
|
2757
|
-
alias:
|
|
2951
|
+
alias: options8.alias ?? localProject?.alias ?? "main"
|
|
2758
2952
|
}
|
|
2759
2953
|
);
|
|
2760
2954
|
logger.debug(
|
|
@@ -2762,20 +2956,20 @@ async function fetchRemoteProject(workspace2, options7, logger) {
|
|
|
2762
2956
|
);
|
|
2763
2957
|
return project;
|
|
2764
2958
|
}
|
|
2765
|
-
function ensureTargetCompatible(
|
|
2959
|
+
function ensureTargetCompatible(options8, remoteProject, localProject) {
|
|
2766
2960
|
if (localProject) {
|
|
2767
|
-
if (!
|
|
2961
|
+
if (!options8.force && localProject.uuid != remoteProject.uuid) {
|
|
2768
2962
|
const error = new Error("PROJECT_EXISTS");
|
|
2769
2963
|
error.message = "A project with a different UUID exists at this location";
|
|
2770
2964
|
error.fix = `You have tried to fetch a remote project into a local project with a different UUID
|
|
2771
2965
|
|
|
2772
2966
|
Try adding an alias to rename the new project:
|
|
2773
2967
|
|
|
2774
|
-
openfn fetch ${
|
|
2968
|
+
openfn fetch ${options8.project} --alias ${remoteProject.id}
|
|
2775
2969
|
|
|
2776
2970
|
To ignore this error and override the local file, pass --force (-f)
|
|
2777
2971
|
|
|
2778
|
-
openfn fetch ${
|
|
2972
|
+
openfn fetch ${options8.project} --force
|
|
2779
2973
|
`;
|
|
2780
2974
|
error.fetched_project = {
|
|
2781
2975
|
uuid: remoteProject.uuid,
|
|
@@ -2793,7 +2987,7 @@ To ignore this error and override the local file, pass --force (-f)
|
|
|
2793
2987
|
const hasAnyHistory = remoteProject.workflows.find(
|
|
2794
2988
|
(w) => w.workflow.history?.length
|
|
2795
2989
|
);
|
|
2796
|
-
const skipVersionCheck =
|
|
2990
|
+
const skipVersionCheck = options8.force || // The user forced the checkout
|
|
2797
2991
|
!hasAnyHistory;
|
|
2798
2992
|
if (!skipVersionCheck && !remoteProject.canMergeInto(localProject)) {
|
|
2799
2993
|
throw new Error("Error! An incompatible project exists at this location");
|
|
@@ -2806,19 +3000,19 @@ import Project3, { Workspace as Workspace3 } from "@openfn/project";
|
|
|
2806
3000
|
import path13 from "path";
|
|
2807
3001
|
import fs4 from "fs";
|
|
2808
3002
|
import { rimraf } from "rimraf";
|
|
2809
|
-
var
|
|
2810
|
-
var
|
|
3003
|
+
var options3 = [log, workspace];
|
|
3004
|
+
var command3 = {
|
|
2811
3005
|
command: "checkout <project>",
|
|
2812
3006
|
describe: "Switch to a different OpenFn project in the same workspace",
|
|
2813
|
-
handler: ensure("project-checkout",
|
|
2814
|
-
builder: (yargs) => build(
|
|
3007
|
+
handler: ensure("project-checkout", options3),
|
|
3008
|
+
builder: (yargs) => build(options3, yargs).positional("project", {
|
|
2815
3009
|
describe: "The id, alias or UUID of the project to chcekout",
|
|
2816
3010
|
demandOption: true
|
|
2817
3011
|
})
|
|
2818
3012
|
};
|
|
2819
|
-
var handler3 = async (
|
|
2820
|
-
const projectIdentifier =
|
|
2821
|
-
const workspacePath =
|
|
3013
|
+
var handler3 = async (options8, logger) => {
|
|
3014
|
+
const projectIdentifier = options8.project;
|
|
3015
|
+
const workspacePath = options8.workspace ?? process.cwd();
|
|
2822
3016
|
const workspace2 = new Workspace3(workspacePath, logger);
|
|
2823
3017
|
const { project: _, ...config2 } = workspace2.getConfig();
|
|
2824
3018
|
let switchProject;
|
|
@@ -2850,49 +3044,48 @@ var handler3 = async (options7, logger) => {
|
|
|
2850
3044
|
};
|
|
2851
3045
|
|
|
2852
3046
|
// src/projects/pull.ts
|
|
2853
|
-
var
|
|
3047
|
+
var options4 = [
|
|
2854
3048
|
alias,
|
|
2855
3049
|
env,
|
|
2856
3050
|
workspace,
|
|
2857
3051
|
apiKey,
|
|
2858
3052
|
endpoint,
|
|
2859
3053
|
log,
|
|
2860
|
-
override(
|
|
3054
|
+
override(path8, {
|
|
2861
3055
|
description: "path to output the project to"
|
|
2862
3056
|
}),
|
|
2863
3057
|
logJson,
|
|
2864
|
-
projectPath,
|
|
2865
3058
|
snapshots,
|
|
2866
|
-
|
|
3059
|
+
path8,
|
|
2867
3060
|
force
|
|
2868
3061
|
];
|
|
2869
|
-
var
|
|
3062
|
+
var command4 = {
|
|
2870
3063
|
command: "pull [project]",
|
|
2871
3064
|
describe: `Pull a project from a Lightning Instance and expand to the file system (ie fetch + checkout)`,
|
|
2872
|
-
builder: (yargs) => build(
|
|
3065
|
+
builder: (yargs) => build(options4, yargs).positional("project", {
|
|
2873
3066
|
describe: "The UUID, local id or local alias of the project to pull"
|
|
2874
3067
|
}).example(
|
|
2875
3068
|
"pull 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2876
3069
|
"Pull project with a UUID from a lightning instance"
|
|
2877
3070
|
),
|
|
2878
|
-
handler: ensure("project-pull",
|
|
3071
|
+
handler: ensure("project-pull", options4)
|
|
2879
3072
|
};
|
|
2880
|
-
async function handler4(
|
|
2881
|
-
await handler2(
|
|
3073
|
+
async function handler4(options8, logger) {
|
|
3074
|
+
await handler2(options8, logger);
|
|
2882
3075
|
logger.success(`Downloaded latest project version`);
|
|
2883
|
-
await handler3(
|
|
3076
|
+
await handler3(options8, logger);
|
|
2884
3077
|
logger.success(`Checked out project locally`);
|
|
2885
3078
|
}
|
|
2886
3079
|
var pull_default = handler4;
|
|
2887
3080
|
|
|
2888
3081
|
// src/pull/handler.ts
|
|
2889
|
-
async function pullHandler(
|
|
2890
|
-
if (
|
|
2891
|
-
|
|
2892
|
-
return pull_default(
|
|
3082
|
+
async function pullHandler(options8, logger) {
|
|
3083
|
+
if (options8.beta) {
|
|
3084
|
+
options8.project = options8.projectId;
|
|
3085
|
+
return pull_default(options8, logger);
|
|
2893
3086
|
}
|
|
2894
3087
|
try {
|
|
2895
|
-
const config2 = mergeOverrides2(await getConfig2(
|
|
3088
|
+
const config2 = mergeOverrides2(await getConfig2(options8.configPath), options8);
|
|
2896
3089
|
if (process.env["OPENFN_API_KEY"]) {
|
|
2897
3090
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
2898
3091
|
config2.apiKey = process.env["OPENFN_API_KEY"];
|
|
@@ -2906,8 +3099,8 @@ async function pullHandler(options7, logger) {
|
|
|
2906
3099
|
);
|
|
2907
3100
|
const { data: project } = await getProject(
|
|
2908
3101
|
config2,
|
|
2909
|
-
|
|
2910
|
-
|
|
3102
|
+
options8.projectId,
|
|
3103
|
+
options8.snapshots
|
|
2911
3104
|
);
|
|
2912
3105
|
if (!project) {
|
|
2913
3106
|
logger.error("ERROR: Project not found.");
|
|
@@ -2920,8 +3113,8 @@ async function pullHandler(options7, logger) {
|
|
|
2920
3113
|
const state = getStateFromProjectPayload(project);
|
|
2921
3114
|
logger.always("Downloading the project spec (as YAML) from the server.");
|
|
2922
3115
|
const queryParams = new URLSearchParams();
|
|
2923
|
-
queryParams.append("id",
|
|
2924
|
-
|
|
3116
|
+
queryParams.append("id", options8.projectId);
|
|
3117
|
+
options8.snapshots?.forEach(
|
|
2925
3118
|
(snapshot) => queryParams.append("snapshots[]", snapshot)
|
|
2926
3119
|
);
|
|
2927
3120
|
const url2 = new URL(
|
|
@@ -2970,13 +3163,13 @@ async function pullHandler(options7, logger) {
|
|
|
2970
3163
|
throw error;
|
|
2971
3164
|
}
|
|
2972
3165
|
}
|
|
2973
|
-
function mergeOverrides2(config2,
|
|
3166
|
+
function mergeOverrides2(config2, options8) {
|
|
2974
3167
|
return {
|
|
2975
3168
|
...config2,
|
|
2976
3169
|
apiKey: pickFirst2(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
2977
3170
|
endpoint: pickFirst2(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
2978
|
-
configPath:
|
|
2979
|
-
requireConfirmation: pickFirst2(
|
|
3171
|
+
configPath: options8.configPath,
|
|
3172
|
+
requireConfirmation: pickFirst2(options8.confirm, config2.requireConfirmation)
|
|
2980
3173
|
};
|
|
2981
3174
|
}
|
|
2982
3175
|
function pickFirst2(...args) {
|
|
@@ -2984,10 +3177,11 @@ function pickFirst2(...args) {
|
|
|
2984
3177
|
}
|
|
2985
3178
|
var handler_default10 = pullHandler;
|
|
2986
3179
|
|
|
2987
|
-
// src/projects/
|
|
2988
|
-
var
|
|
2989
|
-
__export(
|
|
3180
|
+
// src/projects/index.ts
|
|
3181
|
+
var projects_exports = {};
|
|
3182
|
+
__export(projects_exports, {
|
|
2990
3183
|
checkout: () => handler3,
|
|
3184
|
+
deploy: () => handler,
|
|
2991
3185
|
fetch: () => handler2,
|
|
2992
3186
|
list: () => handler5,
|
|
2993
3187
|
merge: () => handler7,
|
|
@@ -2997,19 +3191,19 @@ __export(handler_exports, {
|
|
|
2997
3191
|
|
|
2998
3192
|
// src/projects/list.ts
|
|
2999
3193
|
import { Workspace as Workspace4 } from "@openfn/project";
|
|
3000
|
-
var
|
|
3001
|
-
var
|
|
3194
|
+
var options5 = [log, workspace];
|
|
3195
|
+
var command5 = {
|
|
3002
3196
|
command: "list [project-path]",
|
|
3003
3197
|
describe: "List all the openfn projects available in the current directory",
|
|
3004
3198
|
aliases: ["project", "$0"],
|
|
3005
|
-
handler: ensure("project-list",
|
|
3006
|
-
builder: (yargs) => build(
|
|
3199
|
+
handler: ensure("project-list", options5),
|
|
3200
|
+
builder: (yargs) => build(options5, yargs)
|
|
3007
3201
|
};
|
|
3008
|
-
var handler5 = async (
|
|
3202
|
+
var handler5 = async (options8, logger) => {
|
|
3009
3203
|
logger.info("Searching for projects in workspace at:");
|
|
3010
|
-
logger.info(" ",
|
|
3204
|
+
logger.info(" ", options8.workspace);
|
|
3011
3205
|
logger.break();
|
|
3012
|
-
const workspace2 = new Workspace4(
|
|
3206
|
+
const workspace2 = new Workspace4(options8.workspace);
|
|
3013
3207
|
if (!workspace2.valid) {
|
|
3014
3208
|
throw new Error("No OpenFn projects found");
|
|
3015
3209
|
}
|
|
@@ -3028,25 +3222,25 @@ ${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
|
3028
3222
|
|
|
3029
3223
|
// src/projects/version.ts
|
|
3030
3224
|
import { Workspace as Workspace5 } from "@openfn/project";
|
|
3031
|
-
var
|
|
3032
|
-
var
|
|
3225
|
+
var options6 = [workflow, workspace, workflowMappings];
|
|
3226
|
+
var command6 = {
|
|
3033
3227
|
command: "version [workflow]",
|
|
3034
3228
|
describe: "Returns the version hash of a given workflow in a workspace",
|
|
3035
|
-
handler: ensure("project-version",
|
|
3036
|
-
builder: (yargs) => build(
|
|
3229
|
+
handler: ensure("project-version", options6),
|
|
3230
|
+
builder: (yargs) => build(options6, yargs)
|
|
3037
3231
|
};
|
|
3038
|
-
var handler6 = async (
|
|
3039
|
-
const workspace2 = new Workspace5(
|
|
3232
|
+
var handler6 = async (options8, logger) => {
|
|
3233
|
+
const workspace2 = new Workspace5(options8.workspace);
|
|
3040
3234
|
if (!workspace2.valid) {
|
|
3041
3235
|
logger.error("Command was run in an invalid openfn workspace");
|
|
3042
3236
|
return;
|
|
3043
3237
|
}
|
|
3044
3238
|
const output = /* @__PURE__ */ new Map();
|
|
3045
3239
|
const activeProject = workspace2.getActiveProject();
|
|
3046
|
-
if (
|
|
3047
|
-
const workflow2 = activeProject?.getWorkflow(
|
|
3240
|
+
if (options8.workflow) {
|
|
3241
|
+
const workflow2 = activeProject?.getWorkflow(options8.workflow);
|
|
3048
3242
|
if (!workflow2) {
|
|
3049
|
-
logger.error(`No workflow found with id ${
|
|
3243
|
+
logger.error(`No workflow found with id ${options8.workflow}`);
|
|
3050
3244
|
return;
|
|
3051
3245
|
}
|
|
3052
3246
|
output.set(workflow2.name || workflow2.id, workflow2.getVersionHash());
|
|
@@ -3060,7 +3254,7 @@ var handler6 = async (options7, logger) => {
|
|
|
3060
3254
|
return;
|
|
3061
3255
|
}
|
|
3062
3256
|
let final;
|
|
3063
|
-
if (
|
|
3257
|
+
if (options8.json) {
|
|
3064
3258
|
final = JSON.stringify(Object.fromEntries(output), void 0, 2);
|
|
3065
3259
|
} else {
|
|
3066
3260
|
final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
|
|
@@ -3074,7 +3268,7 @@ ${final}`);
|
|
|
3074
3268
|
import Project5, { Workspace as Workspace6 } from "@openfn/project";
|
|
3075
3269
|
import path15 from "node:path";
|
|
3076
3270
|
import fs6 from "node:fs/promises";
|
|
3077
|
-
var
|
|
3271
|
+
var options7 = [
|
|
3078
3272
|
removeUnmapped,
|
|
3079
3273
|
workflowMappings,
|
|
3080
3274
|
workspace,
|
|
@@ -3099,22 +3293,22 @@ var options6 = [
|
|
|
3099
3293
|
description: "Force a merge even when workflows are incompatible"
|
|
3100
3294
|
})
|
|
3101
3295
|
];
|
|
3102
|
-
var
|
|
3296
|
+
var command7 = {
|
|
3103
3297
|
command: "merge <project>",
|
|
3104
3298
|
describe: "Merges the specified project (by UUID, id or alias) into the currently checked out project",
|
|
3105
|
-
handler: ensure("project-merge",
|
|
3106
|
-
builder: (yargs) => build(
|
|
3299
|
+
handler: ensure("project-merge", options7),
|
|
3300
|
+
builder: (yargs) => build(options7, yargs)
|
|
3107
3301
|
};
|
|
3108
|
-
var handler7 = async (
|
|
3109
|
-
const workspacePath =
|
|
3302
|
+
var handler7 = async (options8, logger) => {
|
|
3303
|
+
const workspacePath = options8.workspace;
|
|
3110
3304
|
const workspace2 = new Workspace6(workspacePath);
|
|
3111
3305
|
if (!workspace2.valid) {
|
|
3112
3306
|
logger.error("Command was run in an invalid openfn workspace");
|
|
3113
3307
|
return;
|
|
3114
3308
|
}
|
|
3115
3309
|
let targetProject;
|
|
3116
|
-
if (
|
|
3117
|
-
const basePath = path15.resolve(
|
|
3310
|
+
if (options8.base) {
|
|
3311
|
+
const basePath = path15.resolve(options8.base);
|
|
3118
3312
|
logger.debug("Loading target project from path", basePath);
|
|
3119
3313
|
targetProject = await Project5.from("path", basePath);
|
|
3120
3314
|
} else {
|
|
@@ -3125,7 +3319,7 @@ var handler7 = async (options7, logger) => {
|
|
|
3125
3319
|
}
|
|
3126
3320
|
logger.debug(`Loading target project from workspace (${targetProject.id})`);
|
|
3127
3321
|
}
|
|
3128
|
-
const sourceProjectIdentifier =
|
|
3322
|
+
const sourceProjectIdentifier = options8.project;
|
|
3129
3323
|
let sourceProject;
|
|
3130
3324
|
if (/\.(ya?ml|json)$/.test(sourceProjectIdentifier)) {
|
|
3131
3325
|
const filePath = path15.join(workspacePath, sourceProjectIdentifier);
|
|
@@ -3151,20 +3345,20 @@ var handler7 = async (options7, logger) => {
|
|
|
3151
3345
|
logger.error("The checked out project has no id");
|
|
3152
3346
|
return;
|
|
3153
3347
|
}
|
|
3154
|
-
const finalPath =
|
|
3348
|
+
const finalPath = options8.outputPath ?? workspace2.getProjectPath(targetProject.id);
|
|
3155
3349
|
if (!finalPath) {
|
|
3156
3350
|
logger.error("Path to checked out project not found.");
|
|
3157
3351
|
return;
|
|
3158
3352
|
}
|
|
3159
3353
|
const final = Project5.merge(sourceProject, targetProject, {
|
|
3160
|
-
removeUnmapped:
|
|
3161
|
-
workflowMappings:
|
|
3162
|
-
force:
|
|
3354
|
+
removeUnmapped: options8.removeUnmapped,
|
|
3355
|
+
workflowMappings: options8.workflowMappings,
|
|
3356
|
+
force: options8.force
|
|
3163
3357
|
});
|
|
3164
3358
|
let outputFormat = workspace2.config.formats.project;
|
|
3165
|
-
if (
|
|
3359
|
+
if (options8.outputPath?.endsWith(".json")) {
|
|
3166
3360
|
outputFormat = "json";
|
|
3167
|
-
} else if (
|
|
3361
|
+
} else if (options8.outputPath?.endsWith(".yaml")) {
|
|
3168
3362
|
outputFormat = "yaml";
|
|
3169
3363
|
}
|
|
3170
3364
|
let finalState = final.serialize("state", {
|
|
@@ -3179,8 +3373,8 @@ var handler7 = async (options7, logger) => {
|
|
|
3179
3373
|
await handler3(
|
|
3180
3374
|
{
|
|
3181
3375
|
workspace: workspacePath,
|
|
3182
|
-
project:
|
|
3183
|
-
log:
|
|
3376
|
+
project: options8.outputPath ? finalPath : final.id,
|
|
3377
|
+
log: options8.log
|
|
3184
3378
|
},
|
|
3185
3379
|
logger
|
|
3186
3380
|
);
|
|
@@ -3210,8 +3404,8 @@ var loadVersionFromPath = (adaptorPath) => {
|
|
|
3210
3404
|
return "unknown";
|
|
3211
3405
|
}
|
|
3212
3406
|
};
|
|
3213
|
-
var printVersions = async (logger,
|
|
3214
|
-
const { adaptors, logJson: logJson2 } =
|
|
3407
|
+
var printVersions = async (logger, options8 = {}, includeComponents = false) => {
|
|
3408
|
+
const { adaptors, logJson: logJson2 } = options8;
|
|
3215
3409
|
let longestAdaptorName = "";
|
|
3216
3410
|
const adaptorList = [];
|
|
3217
3411
|
adaptors?.forEach((adaptor) => {
|
|
@@ -3221,7 +3415,7 @@ var printVersions = async (logger, options7 = {}, includeComponents = false) =>
|
|
|
3221
3415
|
const [namePart, pathPart] = adaptor.split("=");
|
|
3222
3416
|
adaptorVersion = loadVersionFromPath(pathPart);
|
|
3223
3417
|
adaptorName = getNameAndVersion7(namePart).name;
|
|
3224
|
-
} else if (
|
|
3418
|
+
} else if (options8.monorepoPath) {
|
|
3225
3419
|
adaptorName = getNameAndVersion7(adaptor).name;
|
|
3226
3420
|
adaptorVersion = "monorepo";
|
|
3227
3421
|
} else {
|
|
@@ -3310,8 +3504,8 @@ var handlers = {
|
|
|
3310
3504
|
docs: handler_default8,
|
|
3311
3505
|
metadata: handler_default9,
|
|
3312
3506
|
pull: handler_default10,
|
|
3313
|
-
projects:
|
|
3314
|
-
project:
|
|
3507
|
+
projects: projects_exports,
|
|
3508
|
+
project: projects_exports,
|
|
3315
3509
|
["collections-get"]: handler_default4.get,
|
|
3316
3510
|
["collections-set"]: handler_default4.set,
|
|
3317
3511
|
["collections-remove"]: handler_default4.remove,
|
|
@@ -3319,6 +3513,7 @@ var handlers = {
|
|
|
3319
3513
|
["repo-install"]: install,
|
|
3320
3514
|
["repo-pwd"]: pwd,
|
|
3321
3515
|
["repo-list"]: list,
|
|
3516
|
+
["project-deploy"]: handler,
|
|
3322
3517
|
["project-pull"]: handler4,
|
|
3323
3518
|
["project-list"]: handler5,
|
|
3324
3519
|
["project-version"]: handler6,
|
|
@@ -3327,13 +3522,13 @@ var handlers = {
|
|
|
3327
3522
|
["project-fetch"]: handler2,
|
|
3328
3523
|
version: async (opts, logger) => print_versions_default(logger, opts, true)
|
|
3329
3524
|
};
|
|
3330
|
-
var parse = async (
|
|
3331
|
-
const logger = log2 || logger_default(CLI,
|
|
3332
|
-
if (
|
|
3333
|
-
await print_versions_default(logger,
|
|
3525
|
+
var parse = async (options8, log2) => {
|
|
3526
|
+
const logger = log2 || logger_default(CLI, options8);
|
|
3527
|
+
if (options8.command === "execute" || options8.command === "test") {
|
|
3528
|
+
await print_versions_default(logger, options8);
|
|
3334
3529
|
}
|
|
3335
3530
|
report(logger);
|
|
3336
|
-
const { monorepoPath } =
|
|
3531
|
+
const { monorepoPath } = options8;
|
|
3337
3532
|
if (monorepoPath) {
|
|
3338
3533
|
if (monorepoPath === "ERR") {
|
|
3339
3534
|
logger.error(
|
|
@@ -3344,19 +3539,19 @@ var parse = async (options7, log2) => {
|
|
|
3344
3539
|
}
|
|
3345
3540
|
await validateMonoRepo(monorepoPath, logger);
|
|
3346
3541
|
logger.success(`Loading adaptors from monorepo at ${monorepoPath}`);
|
|
3347
|
-
|
|
3542
|
+
options8.adaptors = map_adaptors_to_monorepo_default(
|
|
3348
3543
|
monorepoPath,
|
|
3349
|
-
|
|
3544
|
+
options8.adaptors,
|
|
3350
3545
|
logger
|
|
3351
3546
|
);
|
|
3352
3547
|
}
|
|
3353
|
-
const handler8 = handlers[
|
|
3548
|
+
const handler8 = handlers[options8.command];
|
|
3354
3549
|
if (!handler8) {
|
|
3355
|
-
logger.error(`Unrecognised command: ${
|
|
3550
|
+
logger.error(`Unrecognised command: ${options8.command}`);
|
|
3356
3551
|
process.exit(1);
|
|
3357
3552
|
}
|
|
3358
3553
|
try {
|
|
3359
|
-
return await handler8(
|
|
3554
|
+
return await handler8(options8, logger);
|
|
3360
3555
|
} catch (e) {
|
|
3361
3556
|
if (!process.exitCode) {
|
|
3362
3557
|
process.exitCode = e.exitCode || 1;
|