@openfn/cli 1.14.0 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +22 -2
- package/dist/process/runner.js +82 -44
- package/package.json +5 -5
package/dist/index.js
CHANGED
|
@@ -320,11 +320,21 @@ var ignoreImports = {
|
|
|
320
320
|
};
|
|
321
321
|
var getBaseDir = (opts2) => {
|
|
322
322
|
const basePath = opts2.path ?? ".";
|
|
323
|
-
if (/\.(jso?n?)$/.test(basePath)) {
|
|
323
|
+
if (/\.(jso?n?|ya?ml)$/.test(basePath)) {
|
|
324
324
|
return nodePath.dirname(basePath);
|
|
325
325
|
}
|
|
326
326
|
return basePath;
|
|
327
327
|
};
|
|
328
|
+
var projectName = {
|
|
329
|
+
name: "project-name",
|
|
330
|
+
yargs: {
|
|
331
|
+
description: "The name of an openfn project",
|
|
332
|
+
string: true
|
|
333
|
+
},
|
|
334
|
+
ensure: (opts2) => {
|
|
335
|
+
return opts2.projectName;
|
|
336
|
+
}
|
|
337
|
+
};
|
|
328
338
|
var inputPath = {
|
|
329
339
|
name: "input-path",
|
|
330
340
|
yargs: {
|
|
@@ -1063,9 +1073,19 @@ var projectsCommand = {
|
|
|
1063
1073
|
};
|
|
1064
1074
|
var command_default11 = projectsCommand;
|
|
1065
1075
|
|
|
1076
|
+
// src/checkout/command.ts
|
|
1077
|
+
var options10 = [projectName, projectPath];
|
|
1078
|
+
var checkoutCommand = {
|
|
1079
|
+
command: "checkout <project-name>",
|
|
1080
|
+
describe: "Switch to a different openfn project in the same workspace",
|
|
1081
|
+
handler: ensure("checkout", options10),
|
|
1082
|
+
builder: (yargs2) => build(options10, yargs2)
|
|
1083
|
+
};
|
|
1084
|
+
var command_default12 = checkoutCommand;
|
|
1085
|
+
|
|
1066
1086
|
// src/cli.ts
|
|
1067
1087
|
var y = yargs(hideBin(process.argv));
|
|
1068
|
-
var cmd = y.command(command_default7).command(command_default3).command(command_default2).command(command_default4).command(install).command(repo).command(command_default10).command(command_default6).command(command_default).command(command_default8).command(command_default5).command(command_default9).command(command_default11).command({
|
|
1088
|
+
var cmd = y.command(command_default7).command(command_default3).command(command_default2).command(command_default4).command(install).command(repo).command(command_default10).command(command_default6).command(command_default).command(command_default8).command(command_default5).command(command_default9).command(command_default11).command(command_default12).command({
|
|
1069
1089
|
command: "version",
|
|
1070
1090
|
describe: "Show the currently installed version of the CLI, compiler and runtime.",
|
|
1071
1091
|
handler: (argv) => {
|
package/dist/process/runner.js
CHANGED
|
@@ -127,14 +127,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
127
127
|
});
|
|
128
128
|
});
|
|
129
129
|
};
|
|
130
|
-
var loadPayload = async (logger,
|
|
131
|
-
if (!
|
|
130
|
+
var loadPayload = async (logger, path14) => {
|
|
131
|
+
if (!path14) {
|
|
132
132
|
logger.warn("No JSON payload provided");
|
|
133
133
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
134
134
|
return {};
|
|
135
135
|
}
|
|
136
|
-
if (
|
|
137
|
-
const str = await readFile(
|
|
136
|
+
if (path14.endsWith(".json")) {
|
|
137
|
+
const str = await readFile(path14, "utf8");
|
|
138
138
|
const json = JSON.parse(str);
|
|
139
139
|
logger.debug("Loaded JSON payload");
|
|
140
140
|
return json;
|
|
@@ -256,13 +256,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
256
256
|
};
|
|
257
257
|
function parseAdaptors(plan) {
|
|
258
258
|
const extractInfo = (specifier) => {
|
|
259
|
-
const [module,
|
|
259
|
+
const [module, path14] = specifier.split("=");
|
|
260
260
|
const { name, version } = getNameAndVersion(module);
|
|
261
261
|
const info = {
|
|
262
262
|
name
|
|
263
263
|
};
|
|
264
|
-
if (
|
|
265
|
-
info.path =
|
|
264
|
+
if (path14) {
|
|
265
|
+
info.path = path14;
|
|
266
266
|
}
|
|
267
267
|
if (version) {
|
|
268
268
|
info.version = version;
|
|
@@ -522,10 +522,10 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
522
522
|
return specifier;
|
|
523
523
|
};
|
|
524
524
|
var resolveSpecifierPath = async (pattern, repoDir, log) => {
|
|
525
|
-
const [specifier,
|
|
526
|
-
if (
|
|
527
|
-
log.debug(`Resolved ${specifier} to path: ${
|
|
528
|
-
return
|
|
525
|
+
const [specifier, path14] = pattern.split("=");
|
|
526
|
+
if (path14) {
|
|
527
|
+
log.debug(`Resolved ${specifier} to path: ${path14}`);
|
|
528
|
+
return path14;
|
|
529
529
|
}
|
|
530
530
|
const repoPath = await getModulePath(specifier, repoDir, log);
|
|
531
531
|
if (repoPath) {
|
|
@@ -544,12 +544,12 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
544
544
|
let exports;
|
|
545
545
|
const [specifier] = adaptorInput.split("=");
|
|
546
546
|
log.debug(`Trying to preload types for ${specifier}`);
|
|
547
|
-
const
|
|
548
|
-
if (
|
|
547
|
+
const path14 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log);
|
|
548
|
+
if (path14) {
|
|
549
549
|
try {
|
|
550
|
-
exports = await preloadAdaptorExports(
|
|
550
|
+
exports = await preloadAdaptorExports(path14, log);
|
|
551
551
|
} catch (e) {
|
|
552
|
-
log.error(`Failed to load adaptor typedefs from path ${
|
|
552
|
+
log.error(`Failed to load adaptor typedefs from path ${path14}`);
|
|
553
553
|
log.error(e);
|
|
554
554
|
}
|
|
555
555
|
}
|
|
@@ -1001,8 +1001,8 @@ var loadXPlan = async (plan, options, logger, defaultName = "") => {
|
|
|
1001
1001
|
};
|
|
1002
1002
|
|
|
1003
1003
|
// src/util/assert-path.ts
|
|
1004
|
-
var assert_path_default = (
|
|
1005
|
-
if (!
|
|
1004
|
+
var assert_path_default = (path14) => {
|
|
1005
|
+
if (!path14) {
|
|
1006
1006
|
console.error("ERROR: no path provided!");
|
|
1007
1007
|
console.error("\nUsage:");
|
|
1008
1008
|
console.error(" open path/to/job");
|
|
@@ -1056,6 +1056,7 @@ var assertWorkflowStructure = (plan, logger) => {
|
|
|
1056
1056
|
var assertStepStructure = (step, index) => {
|
|
1057
1057
|
const allowedKeys = [
|
|
1058
1058
|
"id",
|
|
1059
|
+
"type",
|
|
1059
1060
|
"name",
|
|
1060
1061
|
"next",
|
|
1061
1062
|
"previous",
|
|
@@ -1687,20 +1688,20 @@ var RETRY_COUNT = 20;
|
|
|
1687
1688
|
var TIMEOUT_MS = 1e3 * 60;
|
|
1688
1689
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
1689
1690
|
var ensurePath = (filePath) => mkdirSync(path7.dirname(filePath), { recursive: true });
|
|
1690
|
-
var generatePlaceholder = (
|
|
1691
|
-
writeFileSync(
|
|
1691
|
+
var generatePlaceholder = (path14) => {
|
|
1692
|
+
writeFileSync(path14, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
1692
1693
|
};
|
|
1693
1694
|
var finish = (logger, resultPath) => {
|
|
1694
1695
|
logger.success("Done! Docs can be found at:\n");
|
|
1695
1696
|
logger.print(` ${path7.resolve(resultPath)}`);
|
|
1696
1697
|
};
|
|
1697
|
-
var generateDocs = async (specifier,
|
|
1698
|
+
var generateDocs = async (specifier, path14, docgen, logger) => {
|
|
1698
1699
|
const result = await docgen(specifier);
|
|
1699
|
-
await writeFile5(
|
|
1700
|
-
finish(logger,
|
|
1701
|
-
return
|
|
1700
|
+
await writeFile5(path14, JSON.stringify(result, null, 2));
|
|
1701
|
+
finish(logger, path14);
|
|
1702
|
+
return path14;
|
|
1702
1703
|
};
|
|
1703
|
-
var waitForDocs = async (docs,
|
|
1704
|
+
var waitForDocs = async (docs, path14, logger, retryDuration = RETRY_DURATION) => {
|
|
1704
1705
|
try {
|
|
1705
1706
|
if (docs.hasOwnProperty("loading")) {
|
|
1706
1707
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -1712,19 +1713,19 @@ var waitForDocs = async (docs, path13, logger, retryDuration = RETRY_DURATION) =
|
|
|
1712
1713
|
clearInterval(i);
|
|
1713
1714
|
reject(new Error("Timed out waiting for docs to load"));
|
|
1714
1715
|
}
|
|
1715
|
-
const updated = JSON.parse(readFileSync(
|
|
1716
|
+
const updated = JSON.parse(readFileSync(path14, "utf8"));
|
|
1716
1717
|
if (!updated.hasOwnProperty("loading")) {
|
|
1717
1718
|
logger.info("Docs found!");
|
|
1718
1719
|
clearInterval(i);
|
|
1719
|
-
resolve(
|
|
1720
|
+
resolve(path14);
|
|
1720
1721
|
}
|
|
1721
1722
|
count++;
|
|
1722
1723
|
}, retryDuration);
|
|
1723
1724
|
});
|
|
1724
1725
|
} else {
|
|
1725
|
-
logger.info(`Docs already written to cache at ${
|
|
1726
|
-
finish(logger,
|
|
1727
|
-
return
|
|
1726
|
+
logger.info(`Docs already written to cache at ${path14}`);
|
|
1727
|
+
finish(logger, path14);
|
|
1728
|
+
return path14;
|
|
1728
1729
|
}
|
|
1729
1730
|
} catch (e) {
|
|
1730
1731
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
@@ -1741,28 +1742,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
|
|
|
1741
1742
|
process.exit(9);
|
|
1742
1743
|
}
|
|
1743
1744
|
logger.success(`Generating docs for ${specifier}`);
|
|
1744
|
-
const
|
|
1745
|
-
ensurePath(
|
|
1745
|
+
const path14 = `${repoDir}/docs/${specifier}.json`;
|
|
1746
|
+
ensurePath(path14);
|
|
1746
1747
|
const handleError2 = () => {
|
|
1747
1748
|
logger.info("Removing placeholder");
|
|
1748
|
-
rmSync(
|
|
1749
|
+
rmSync(path14);
|
|
1749
1750
|
};
|
|
1750
1751
|
try {
|
|
1751
|
-
const existing = readFileSync(
|
|
1752
|
+
const existing = readFileSync(path14, "utf8");
|
|
1752
1753
|
const json = JSON.parse(existing);
|
|
1753
1754
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
1754
1755
|
logger.info(`Expired placeholder found. Removing.`);
|
|
1755
|
-
rmSync(
|
|
1756
|
+
rmSync(path14);
|
|
1756
1757
|
throw new Error("TIMEOUT");
|
|
1757
1758
|
}
|
|
1758
|
-
return waitForDocs(json,
|
|
1759
|
+
return waitForDocs(json, path14, logger, retryDuration);
|
|
1759
1760
|
} catch (e) {
|
|
1760
1761
|
if (e.message !== "TIMEOUT") {
|
|
1761
|
-
logger.info(`Docs JSON not found at ${
|
|
1762
|
+
logger.info(`Docs JSON not found at ${path14}`);
|
|
1762
1763
|
}
|
|
1763
1764
|
logger.debug("Generating placeholder");
|
|
1764
|
-
generatePlaceholder(
|
|
1765
|
-
return generateDocs(specifier,
|
|
1765
|
+
generatePlaceholder(path14);
|
|
1766
|
+
return generateDocs(specifier, path14, docgen, logger).catch((e2) => {
|
|
1766
1767
|
logger.error("Error generating documentation");
|
|
1767
1768
|
logger.error(e2);
|
|
1768
1769
|
handleError2();
|
|
@@ -1813,7 +1814,7 @@ var docsHandler = async (options, logger) => {
|
|
|
1813
1814
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
1814
1815
|
}
|
|
1815
1816
|
logger.info("Generating/loading documentation...");
|
|
1816
|
-
const
|
|
1817
|
+
const path14 = await handler_default7(
|
|
1817
1818
|
{
|
|
1818
1819
|
specifier: `${name}@${version}`,
|
|
1819
1820
|
repoDir
|
|
@@ -1822,8 +1823,8 @@ var docsHandler = async (options, logger) => {
|
|
|
1822
1823
|
createNullLogger()
|
|
1823
1824
|
);
|
|
1824
1825
|
let didError = false;
|
|
1825
|
-
if (
|
|
1826
|
-
const source = await readFile4(
|
|
1826
|
+
if (path14) {
|
|
1827
|
+
const source = await readFile4(path14, "utf8");
|
|
1827
1828
|
const data = JSON.parse(source);
|
|
1828
1829
|
let desc;
|
|
1829
1830
|
if (operation) {
|
|
@@ -2293,9 +2294,45 @@ ${project.workflows.map((w) => " - " + w.name).join("\n")}`;
|
|
|
2293
2294
|
}
|
|
2294
2295
|
var handler_default11 = projectsHandler;
|
|
2295
2296
|
|
|
2297
|
+
// src/checkout/handler.ts
|
|
2298
|
+
import { Workspace as Workspace2 } from "@openfn/project";
|
|
2299
|
+
import path12 from "path";
|
|
2300
|
+
import fs6 from "fs";
|
|
2301
|
+
import { rimraf as rimraf2 } from "rimraf";
|
|
2302
|
+
var checkoutHandler = async (options, logger) => {
|
|
2303
|
+
const commandPath = path12.resolve(process.cwd(), options.projectPath ?? ".");
|
|
2304
|
+
const workspace = new Workspace2(commandPath);
|
|
2305
|
+
if (!workspace.valid) {
|
|
2306
|
+
logger.error("Command was run in an invalid openfn workspace");
|
|
2307
|
+
return;
|
|
2308
|
+
}
|
|
2309
|
+
const switchProject = workspace.get(options.projectName);
|
|
2310
|
+
if (!switchProject) {
|
|
2311
|
+
logger.error(
|
|
2312
|
+
`Project with id ${options.projectName} not found in the workspace`
|
|
2313
|
+
);
|
|
2314
|
+
return;
|
|
2315
|
+
}
|
|
2316
|
+
const config = workspace.getConfig();
|
|
2317
|
+
await rimraf2(path12.join(commandPath, config?.workflowRoot || "workflows"));
|
|
2318
|
+
const files = switchProject.serialize("fs");
|
|
2319
|
+
for (const f in files) {
|
|
2320
|
+
if (files[f]) {
|
|
2321
|
+
fs6.mkdirSync(path12.join(commandPath, path12.dirname(f)), {
|
|
2322
|
+
recursive: true
|
|
2323
|
+
});
|
|
2324
|
+
fs6.writeFileSync(path12.join(commandPath, f), files[f]);
|
|
2325
|
+
} else {
|
|
2326
|
+
logger.warn("WARNING! No content for file", f);
|
|
2327
|
+
}
|
|
2328
|
+
}
|
|
2329
|
+
logger.success(`Expanded project to ${commandPath}`);
|
|
2330
|
+
};
|
|
2331
|
+
var handler_default12 = checkoutHandler;
|
|
2332
|
+
|
|
2296
2333
|
// src/util/print-versions.ts
|
|
2297
2334
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
2298
|
-
import
|
|
2335
|
+
import path13 from "node:path";
|
|
2299
2336
|
import url from "node:url";
|
|
2300
2337
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
2301
2338
|
import { mainSymbols } from "figures";
|
|
@@ -2307,7 +2344,7 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
2307
2344
|
var loadVersionFromPath = (adaptorPath) => {
|
|
2308
2345
|
try {
|
|
2309
2346
|
const pkg = JSON.parse(
|
|
2310
|
-
readFileSync2(
|
|
2347
|
+
readFileSync2(path13.resolve(adaptorPath, "package.json"), "utf8")
|
|
2311
2348
|
);
|
|
2312
2349
|
return pkg.version;
|
|
2313
2350
|
} catch (e) {
|
|
@@ -2342,7 +2379,7 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
|
|
|
2342
2379
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
2343
2380
|
);
|
|
2344
2381
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
2345
|
-
const dirname3 =
|
|
2382
|
+
const dirname3 = path13.dirname(url.fileURLToPath(import.meta.url));
|
|
2346
2383
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
2347
2384
|
const { version, dependencies } = pkg;
|
|
2348
2385
|
const compilerVersion = dependencies["@openfn/compiler"];
|
|
@@ -2396,6 +2433,7 @@ var handlers = {
|
|
|
2396
2433
|
metadata: handler_default9,
|
|
2397
2434
|
pull: handler_default10,
|
|
2398
2435
|
projects: handler_default11,
|
|
2436
|
+
checkout: handler_default12,
|
|
2399
2437
|
["collections-get"]: handler_default4.get,
|
|
2400
2438
|
["collections-set"]: handler_default4.set,
|
|
2401
2439
|
["collections-remove"]: handler_default4.remove,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.15.0",
|
|
4
4
|
"description": "CLI devtools for the OpenFn toolchain",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=18",
|
|
@@ -47,13 +47,13 @@
|
|
|
47
47
|
"undici": "7.12.0",
|
|
48
48
|
"ws": "^8.18.3",
|
|
49
49
|
"yargs": "^17.7.2",
|
|
50
|
-
"@openfn/compiler": "1.1.
|
|
50
|
+
"@openfn/compiler": "1.1.3",
|
|
51
|
+
"@openfn/deploy": "0.11.3",
|
|
51
52
|
"@openfn/describe-package": "0.1.5",
|
|
52
53
|
"@openfn/lexicon": "^1.2.3",
|
|
53
54
|
"@openfn/logger": "1.0.6",
|
|
54
|
-
"@openfn/
|
|
55
|
-
"@openfn/project": "^0.
|
|
56
|
-
"@openfn/runtime": "1.7.2"
|
|
55
|
+
"@openfn/runtime": "1.7.2",
|
|
56
|
+
"@openfn/project": "^0.4.0"
|
|
57
57
|
},
|
|
58
58
|
"files": [
|
|
59
59
|
"dist",
|