@openfn/cli 1.16.1 → 1.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +25 -1
- package/dist/process/runner.js +128 -73
- package/package.json +5 -5
package/dist/index.js
CHANGED
|
@@ -224,6 +224,14 @@ var apolloUrl = {
|
|
|
224
224
|
});
|
|
225
225
|
}
|
|
226
226
|
};
|
|
227
|
+
var json = {
|
|
228
|
+
name: "json",
|
|
229
|
+
yargs: {
|
|
230
|
+
boolean: true,
|
|
231
|
+
description: "Output the result as a json object",
|
|
232
|
+
default: false
|
|
233
|
+
}
|
|
234
|
+
};
|
|
227
235
|
var beta = {
|
|
228
236
|
name: "beta",
|
|
229
237
|
yargs: {
|
|
@@ -1136,9 +1144,25 @@ var mergeCommand = {
|
|
|
1136
1144
|
};
|
|
1137
1145
|
var command_default13 = mergeCommand;
|
|
1138
1146
|
|
|
1147
|
+
// src/version/command.ts
|
|
1148
|
+
var options12 = [
|
|
1149
|
+
workflow,
|
|
1150
|
+
projectName,
|
|
1151
|
+
projectPath,
|
|
1152
|
+
workflowMappings,
|
|
1153
|
+
json
|
|
1154
|
+
];
|
|
1155
|
+
var workflowVersionCommand = {
|
|
1156
|
+
command: "project version [workflow]",
|
|
1157
|
+
describe: "Returns the version has of a workflow",
|
|
1158
|
+
handler: ensure("project", options12),
|
|
1159
|
+
builder: (yargs2) => build(options12, yargs2)
|
|
1160
|
+
};
|
|
1161
|
+
var command_default14 = workflowVersionCommand;
|
|
1162
|
+
|
|
1139
1163
|
// src/cli.ts
|
|
1140
1164
|
var y = yargs(hideBin(process.argv));
|
|
1141
|
-
var cmd = y.command(command_default7).command(command_default3).command(command_default2).command(command_default4).command(install).command(repo).command(command_default10).command(command_default6).command(command_default).command(command_default8).command(command_default5).command(command_default9).command(command_default11).command(command_default12).command(command_default13).command({
|
|
1165
|
+
var cmd = y.command(command_default7).command(command_default3).command(command_default2).command(command_default4).command(install).command(repo).command(command_default10).command(command_default6).command(command_default).command(command_default8).command(command_default5).command(command_default9).command(command_default11).command(command_default12).command(command_default13).command(command_default14).command({
|
|
1142
1166
|
command: "version",
|
|
1143
1167
|
describe: "Show the currently installed version of the CLI, compiler and runtime.",
|
|
1144
1168
|
handler: (argv) => {
|
package/dist/process/runner.js
CHANGED
|
@@ -127,14 +127,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
127
127
|
});
|
|
128
128
|
});
|
|
129
129
|
};
|
|
130
|
-
var loadPayload = async (logger,
|
|
131
|
-
if (!
|
|
130
|
+
var loadPayload = async (logger, path16) => {
|
|
131
|
+
if (!path16) {
|
|
132
132
|
logger.warn("No JSON payload provided");
|
|
133
133
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
134
134
|
return {};
|
|
135
135
|
}
|
|
136
|
-
if (
|
|
137
|
-
const str = await readFile(
|
|
136
|
+
if (path16.endsWith(".json")) {
|
|
137
|
+
const str = await readFile(path16, "utf8");
|
|
138
138
|
const json = JSON.parse(str);
|
|
139
139
|
logger.debug("Loaded JSON payload");
|
|
140
140
|
return json;
|
|
@@ -256,13 +256,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
256
256
|
};
|
|
257
257
|
function parseAdaptors(plan) {
|
|
258
258
|
const extractInfo = (specifier) => {
|
|
259
|
-
const [module,
|
|
259
|
+
const [module, path16] = specifier.split("=");
|
|
260
260
|
const { name, version } = getNameAndVersion(module);
|
|
261
261
|
const info = {
|
|
262
262
|
name
|
|
263
263
|
};
|
|
264
|
-
if (
|
|
265
|
-
info.path =
|
|
264
|
+
if (path16) {
|
|
265
|
+
info.path = path16;
|
|
266
266
|
}
|
|
267
267
|
if (version) {
|
|
268
268
|
info.version = version;
|
|
@@ -522,10 +522,10 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
522
522
|
return specifier;
|
|
523
523
|
};
|
|
524
524
|
var resolveSpecifierPath = async (pattern, repoDir, log) => {
|
|
525
|
-
const [specifier,
|
|
526
|
-
if (
|
|
527
|
-
log.debug(`Resolved ${specifier} to path: ${
|
|
528
|
-
return
|
|
525
|
+
const [specifier, path16] = pattern.split("=");
|
|
526
|
+
if (path16) {
|
|
527
|
+
log.debug(`Resolved ${specifier} to path: ${path16}`);
|
|
528
|
+
return path16;
|
|
529
529
|
}
|
|
530
530
|
const repoPath = await getModulePath(specifier, repoDir, log);
|
|
531
531
|
if (repoPath) {
|
|
@@ -544,12 +544,12 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
544
544
|
let exports;
|
|
545
545
|
const [specifier] = adaptorInput.split("=");
|
|
546
546
|
log.debug(`Trying to preload types for ${specifier}`);
|
|
547
|
-
const
|
|
548
|
-
if (
|
|
547
|
+
const path16 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log);
|
|
548
|
+
if (path16) {
|
|
549
549
|
try {
|
|
550
|
-
exports = await preloadAdaptorExports(
|
|
550
|
+
exports = await preloadAdaptorExports(path16, log);
|
|
551
551
|
} catch (e) {
|
|
552
|
-
log.error(`Failed to load adaptor typedefs from path ${
|
|
552
|
+
log.error(`Failed to load adaptor typedefs from path ${path16}`);
|
|
553
553
|
log.error(e);
|
|
554
554
|
}
|
|
555
555
|
}
|
|
@@ -1001,8 +1001,8 @@ var loadXPlan = async (plan, options, logger, defaultName = "") => {
|
|
|
1001
1001
|
};
|
|
1002
1002
|
|
|
1003
1003
|
// src/util/assert-path.ts
|
|
1004
|
-
var assert_path_default = (
|
|
1005
|
-
if (!
|
|
1004
|
+
var assert_path_default = (path16) => {
|
|
1005
|
+
if (!path16) {
|
|
1006
1006
|
console.error("ERROR: no path provided!");
|
|
1007
1007
|
console.error("\nUsage:");
|
|
1008
1008
|
console.error(" open path/to/job");
|
|
@@ -1688,20 +1688,20 @@ var RETRY_COUNT = 20;
|
|
|
1688
1688
|
var TIMEOUT_MS = 1e3 * 60;
|
|
1689
1689
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
1690
1690
|
var ensurePath = (filePath) => mkdirSync(path7.dirname(filePath), { recursive: true });
|
|
1691
|
-
var generatePlaceholder = (
|
|
1692
|
-
writeFileSync(
|
|
1691
|
+
var generatePlaceholder = (path16) => {
|
|
1692
|
+
writeFileSync(path16, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
1693
1693
|
};
|
|
1694
1694
|
var finish = (logger, resultPath) => {
|
|
1695
1695
|
logger.success("Done! Docs can be found at:\n");
|
|
1696
1696
|
logger.print(` ${path7.resolve(resultPath)}`);
|
|
1697
1697
|
};
|
|
1698
|
-
var generateDocs = async (specifier,
|
|
1698
|
+
var generateDocs = async (specifier, path16, docgen, logger) => {
|
|
1699
1699
|
const result = await docgen(specifier);
|
|
1700
|
-
await writeFile5(
|
|
1701
|
-
finish(logger,
|
|
1702
|
-
return
|
|
1700
|
+
await writeFile5(path16, JSON.stringify(result, null, 2));
|
|
1701
|
+
finish(logger, path16);
|
|
1702
|
+
return path16;
|
|
1703
1703
|
};
|
|
1704
|
-
var waitForDocs = async (docs,
|
|
1704
|
+
var waitForDocs = async (docs, path16, logger, retryDuration = RETRY_DURATION) => {
|
|
1705
1705
|
try {
|
|
1706
1706
|
if (docs.hasOwnProperty("loading")) {
|
|
1707
1707
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -1713,19 +1713,19 @@ var waitForDocs = async (docs, path15, logger, retryDuration = RETRY_DURATION) =
|
|
|
1713
1713
|
clearInterval(i);
|
|
1714
1714
|
reject(new Error("Timed out waiting for docs to load"));
|
|
1715
1715
|
}
|
|
1716
|
-
const updated = JSON.parse(readFileSync(
|
|
1716
|
+
const updated = JSON.parse(readFileSync(path16, "utf8"));
|
|
1717
1717
|
if (!updated.hasOwnProperty("loading")) {
|
|
1718
1718
|
logger.info("Docs found!");
|
|
1719
1719
|
clearInterval(i);
|
|
1720
|
-
resolve(
|
|
1720
|
+
resolve(path16);
|
|
1721
1721
|
}
|
|
1722
1722
|
count++;
|
|
1723
1723
|
}, retryDuration);
|
|
1724
1724
|
});
|
|
1725
1725
|
} else {
|
|
1726
|
-
logger.info(`Docs already written to cache at ${
|
|
1727
|
-
finish(logger,
|
|
1728
|
-
return
|
|
1726
|
+
logger.info(`Docs already written to cache at ${path16}`);
|
|
1727
|
+
finish(logger, path16);
|
|
1728
|
+
return path16;
|
|
1729
1729
|
}
|
|
1730
1730
|
} catch (e) {
|
|
1731
1731
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
@@ -1742,28 +1742,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
|
|
|
1742
1742
|
process.exit(9);
|
|
1743
1743
|
}
|
|
1744
1744
|
logger.success(`Generating docs for ${specifier}`);
|
|
1745
|
-
const
|
|
1746
|
-
ensurePath(
|
|
1745
|
+
const path16 = `${repoDir}/docs/${specifier}.json`;
|
|
1746
|
+
ensurePath(path16);
|
|
1747
1747
|
const handleError2 = () => {
|
|
1748
1748
|
logger.info("Removing placeholder");
|
|
1749
|
-
rmSync(
|
|
1749
|
+
rmSync(path16);
|
|
1750
1750
|
};
|
|
1751
1751
|
try {
|
|
1752
|
-
const existing = readFileSync(
|
|
1752
|
+
const existing = readFileSync(path16, "utf8");
|
|
1753
1753
|
const json = JSON.parse(existing);
|
|
1754
1754
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
1755
1755
|
logger.info(`Expired placeholder found. Removing.`);
|
|
1756
|
-
rmSync(
|
|
1756
|
+
rmSync(path16);
|
|
1757
1757
|
throw new Error("TIMEOUT");
|
|
1758
1758
|
}
|
|
1759
|
-
return waitForDocs(json,
|
|
1759
|
+
return waitForDocs(json, path16, logger, retryDuration);
|
|
1760
1760
|
} catch (e) {
|
|
1761
1761
|
if (e.message !== "TIMEOUT") {
|
|
1762
|
-
logger.info(`Docs JSON not found at ${
|
|
1762
|
+
logger.info(`Docs JSON not found at ${path16}`);
|
|
1763
1763
|
}
|
|
1764
1764
|
logger.debug("Generating placeholder");
|
|
1765
|
-
generatePlaceholder(
|
|
1766
|
-
return generateDocs(specifier,
|
|
1765
|
+
generatePlaceholder(path16);
|
|
1766
|
+
return generateDocs(specifier, path16, docgen, logger).catch((e2) => {
|
|
1767
1767
|
logger.error("Error generating documentation");
|
|
1768
1768
|
logger.error(e2);
|
|
1769
1769
|
handleError2();
|
|
@@ -1814,7 +1814,7 @@ var docsHandler = async (options, logger) => {
|
|
|
1814
1814
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
1815
1815
|
}
|
|
1816
1816
|
logger.info("Generating/loading documentation...");
|
|
1817
|
-
const
|
|
1817
|
+
const path16 = await handler_default7(
|
|
1818
1818
|
{
|
|
1819
1819
|
specifier: `${name}@${version}`,
|
|
1820
1820
|
repoDir
|
|
@@ -1823,8 +1823,8 @@ var docsHandler = async (options, logger) => {
|
|
|
1823
1823
|
createNullLogger()
|
|
1824
1824
|
);
|
|
1825
1825
|
let didError = false;
|
|
1826
|
-
if (
|
|
1827
|
-
const source = await readFile4(
|
|
1826
|
+
if (path16) {
|
|
1827
|
+
const source = await readFile4(path16, "utf8");
|
|
1828
1828
|
const data = JSON.parse(source);
|
|
1829
1829
|
let desc;
|
|
1830
1830
|
if (operation) {
|
|
@@ -2294,85 +2294,139 @@ ${project.workflows.map((w) => " - " + w.name).join("\n")}`;
|
|
|
2294
2294
|
}
|
|
2295
2295
|
var handler_default11 = projectsHandler;
|
|
2296
2296
|
|
|
2297
|
-
// src/
|
|
2297
|
+
// src/version/handler.ts
|
|
2298
2298
|
import { Workspace as Workspace2 } from "@openfn/project";
|
|
2299
2299
|
import path12 from "path";
|
|
2300
|
+
var workflowVersionHandler = async (options, logger) => {
|
|
2301
|
+
const commandPath = path12.resolve(options.projectPath ?? ".");
|
|
2302
|
+
const workspace = new Workspace2(commandPath);
|
|
2303
|
+
if (!workspace.valid) {
|
|
2304
|
+
logger.error("Command was run in an invalid openfn workspace");
|
|
2305
|
+
return;
|
|
2306
|
+
}
|
|
2307
|
+
const output = /* @__PURE__ */ new Map();
|
|
2308
|
+
const activeProject = workspace.getActiveProject();
|
|
2309
|
+
if (options.workflow) {
|
|
2310
|
+
const workflow = activeProject?.getWorkflow(options.workflow);
|
|
2311
|
+
if (!workflow) {
|
|
2312
|
+
logger.error(`No workflow found with id/name ${options.workflow}`);
|
|
2313
|
+
return;
|
|
2314
|
+
}
|
|
2315
|
+
output.set(workflow.name || workflow.id, workflow.getVersionHash());
|
|
2316
|
+
} else {
|
|
2317
|
+
for (const wf of activeProject?.workflows || []) {
|
|
2318
|
+
output.set(wf.name || wf.id, wf.getVersionHash());
|
|
2319
|
+
}
|
|
2320
|
+
}
|
|
2321
|
+
if (!output.size) {
|
|
2322
|
+
logger.error("No workflow available");
|
|
2323
|
+
return;
|
|
2324
|
+
}
|
|
2325
|
+
let final;
|
|
2326
|
+
if (options.json) {
|
|
2327
|
+
final = JSON.stringify(Object.fromEntries(output), void 0, 2);
|
|
2328
|
+
} else {
|
|
2329
|
+
final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
|
|
2330
|
+
}
|
|
2331
|
+
logger.success(`Workflow(s) and their hashes
|
|
2332
|
+
|
|
2333
|
+
${final}`);
|
|
2334
|
+
};
|
|
2335
|
+
var handler_default12 = workflowVersionHandler;
|
|
2336
|
+
|
|
2337
|
+
// src/checkout/handler.ts
|
|
2338
|
+
import Project5, { Workspace as Workspace3 } from "@openfn/project";
|
|
2339
|
+
import path13 from "path";
|
|
2300
2340
|
import fs6 from "fs";
|
|
2301
2341
|
import { rimraf as rimraf2 } from "rimraf";
|
|
2302
2342
|
var checkoutHandler = async (options, logger) => {
|
|
2303
|
-
const commandPath =
|
|
2304
|
-
const workspace = new
|
|
2343
|
+
const commandPath = path13.resolve(options.projectPath ?? ".");
|
|
2344
|
+
const workspace = new Workspace3(commandPath);
|
|
2305
2345
|
if (!workspace.valid) {
|
|
2306
2346
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2307
2347
|
return;
|
|
2308
2348
|
}
|
|
2309
|
-
const
|
|
2349
|
+
const { project: _, ...config } = workspace.getConfig() ?? {};
|
|
2350
|
+
let switchProject;
|
|
2351
|
+
if (/\.(yaml|json)$/.test(options.projectName)) {
|
|
2352
|
+
const filePath = path13.join(commandPath, options.projectName);
|
|
2353
|
+
logger.debug("Loading project from path ", filePath);
|
|
2354
|
+
switchProject = await Project5.from("path", filePath, {
|
|
2355
|
+
config
|
|
2356
|
+
});
|
|
2357
|
+
} else {
|
|
2358
|
+
switchProject = workspace.get(options.projectName);
|
|
2359
|
+
}
|
|
2310
2360
|
if (!switchProject) {
|
|
2311
2361
|
logger.error(
|
|
2312
2362
|
`Project with id/name ${options.projectName} not found in the workspace`
|
|
2313
2363
|
);
|
|
2314
2364
|
return;
|
|
2315
2365
|
}
|
|
2316
|
-
|
|
2317
|
-
await rimraf2(path12.join(commandPath, config?.workflowRoot || "workflows"));
|
|
2366
|
+
await rimraf2(path13.join(commandPath, config.workflowRoot ?? "workflows"));
|
|
2318
2367
|
const files = switchProject.serialize("fs");
|
|
2319
2368
|
for (const f in files) {
|
|
2320
2369
|
if (files[f]) {
|
|
2321
|
-
fs6.mkdirSync(
|
|
2370
|
+
fs6.mkdirSync(path13.join(commandPath, path13.dirname(f)), {
|
|
2322
2371
|
recursive: true
|
|
2323
2372
|
});
|
|
2324
|
-
fs6.writeFileSync(
|
|
2373
|
+
fs6.writeFileSync(path13.join(commandPath, f), files[f]);
|
|
2325
2374
|
} else {
|
|
2326
2375
|
logger.warn("WARNING! No content for file", f);
|
|
2327
2376
|
}
|
|
2328
2377
|
}
|
|
2329
2378
|
logger.success(`Expanded project to ${commandPath}`);
|
|
2330
2379
|
};
|
|
2331
|
-
var
|
|
2380
|
+
var handler_default13 = checkoutHandler;
|
|
2332
2381
|
|
|
2333
2382
|
// src/merge/handler.ts
|
|
2334
|
-
import
|
|
2335
|
-
import
|
|
2383
|
+
import Project6, { Workspace as Workspace4 } from "@openfn/project";
|
|
2384
|
+
import path14 from "path";
|
|
2336
2385
|
import { promises as fs7 } from "fs";
|
|
2337
2386
|
var mergeHandler = async (options, logger) => {
|
|
2338
|
-
const commandPath =
|
|
2339
|
-
const workspace = new
|
|
2387
|
+
const commandPath = path14.resolve(options.projectPath ?? ".");
|
|
2388
|
+
const workspace = new Workspace4(commandPath);
|
|
2340
2389
|
if (!workspace.valid) {
|
|
2341
2390
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2342
2391
|
return;
|
|
2343
2392
|
}
|
|
2344
|
-
const
|
|
2345
|
-
if (!
|
|
2393
|
+
const targetProject = workspace.getActiveProject();
|
|
2394
|
+
if (!targetProject) {
|
|
2346
2395
|
logger.error(`No project currently checked out`);
|
|
2347
2396
|
return;
|
|
2348
2397
|
}
|
|
2349
|
-
|
|
2350
|
-
if (
|
|
2351
|
-
|
|
2352
|
-
|
|
2353
|
-
);
|
|
2398
|
+
let sourceProject;
|
|
2399
|
+
if (/\.(yaml|json)$/.test(options.projectName)) {
|
|
2400
|
+
const filePath = path14.join(commandPath, options.projectName);
|
|
2401
|
+
logger.debug("Loading source project from path ", filePath);
|
|
2402
|
+
sourceProject = await Project6.from("path", filePath);
|
|
2403
|
+
} else {
|
|
2404
|
+
sourceProject = workspace.get(options.projectName);
|
|
2405
|
+
}
|
|
2406
|
+
if (!sourceProject) {
|
|
2407
|
+
logger.error(`Project "${options.projectName}" not found in the workspace`);
|
|
2354
2408
|
return;
|
|
2355
2409
|
}
|
|
2356
|
-
if (
|
|
2410
|
+
if (targetProject.name === sourceProject.name) {
|
|
2357
2411
|
logger.error("Merging into the same project not allowed");
|
|
2358
2412
|
return;
|
|
2359
2413
|
}
|
|
2360
|
-
if (!
|
|
2414
|
+
if (!targetProject.name) {
|
|
2361
2415
|
logger.error("The checked out project has no name/id");
|
|
2362
2416
|
return;
|
|
2363
2417
|
}
|
|
2364
|
-
const finalPath = workspace.getProjectPath(
|
|
2418
|
+
const finalPath = workspace.getProjectPath(targetProject.name);
|
|
2365
2419
|
if (!finalPath) {
|
|
2366
2420
|
logger.error("Path to checked out project not found.");
|
|
2367
2421
|
return;
|
|
2368
2422
|
}
|
|
2369
|
-
const final =
|
|
2423
|
+
const final = Project6.merge(sourceProject, targetProject, {
|
|
2370
2424
|
removeUnmapped: options.removeUnmapped,
|
|
2371
2425
|
workflowMappings: options.workflowMappings
|
|
2372
2426
|
});
|
|
2373
2427
|
const yaml = final.serialize("state", { format: "yaml" });
|
|
2374
2428
|
await fs7.writeFile(finalPath, yaml);
|
|
2375
|
-
await
|
|
2429
|
+
await handler_default13(
|
|
2376
2430
|
{
|
|
2377
2431
|
command: "checkout",
|
|
2378
2432
|
projectPath: commandPath,
|
|
@@ -2381,14 +2435,14 @@ var mergeHandler = async (options, logger) => {
|
|
|
2381
2435
|
logger
|
|
2382
2436
|
);
|
|
2383
2437
|
logger.success(
|
|
2384
|
-
`Project ${
|
|
2438
|
+
`Project ${sourceProject.name} has been merged into Project ${targetProject.name} successfully`
|
|
2385
2439
|
);
|
|
2386
2440
|
};
|
|
2387
|
-
var
|
|
2441
|
+
var handler_default14 = mergeHandler;
|
|
2388
2442
|
|
|
2389
2443
|
// src/util/print-versions.ts
|
|
2390
2444
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
2391
|
-
import
|
|
2445
|
+
import path15 from "node:path";
|
|
2392
2446
|
import url from "node:url";
|
|
2393
2447
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
2394
2448
|
import { mainSymbols } from "figures";
|
|
@@ -2400,7 +2454,7 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
2400
2454
|
var loadVersionFromPath = (adaptorPath) => {
|
|
2401
2455
|
try {
|
|
2402
2456
|
const pkg = JSON.parse(
|
|
2403
|
-
readFileSync2(
|
|
2457
|
+
readFileSync2(path15.resolve(adaptorPath, "package.json"), "utf8")
|
|
2404
2458
|
);
|
|
2405
2459
|
return pkg.version;
|
|
2406
2460
|
} catch (e) {
|
|
@@ -2435,7 +2489,7 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
|
|
|
2435
2489
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
2436
2490
|
);
|
|
2437
2491
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
2438
|
-
const dirname3 =
|
|
2492
|
+
const dirname3 = path15.dirname(url.fileURLToPath(import.meta.url));
|
|
2439
2493
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
2440
2494
|
const { version, dependencies } = pkg;
|
|
2441
2495
|
const compilerVersion = dependencies["@openfn/compiler"];
|
|
@@ -2489,8 +2543,9 @@ var handlers = {
|
|
|
2489
2543
|
metadata: handler_default9,
|
|
2490
2544
|
pull: handler_default10,
|
|
2491
2545
|
projects: handler_default11,
|
|
2492
|
-
checkout:
|
|
2493
|
-
merge:
|
|
2546
|
+
checkout: handler_default13,
|
|
2547
|
+
merge: handler_default14,
|
|
2548
|
+
project: handler_default12,
|
|
2494
2549
|
["collections-get"]: handler_default4.get,
|
|
2495
2550
|
["collections-set"]: handler_default4.set,
|
|
2496
2551
|
["collections-remove"]: handler_default4.remove,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.17.0",
|
|
4
4
|
"description": "CLI devtools for the OpenFn toolchain",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=18",
|
|
@@ -49,11 +49,11 @@
|
|
|
49
49
|
"yargs": "^17.7.2",
|
|
50
50
|
"@openfn/compiler": "1.1.5",
|
|
51
51
|
"@openfn/deploy": "0.11.3",
|
|
52
|
-
"@openfn/lexicon": "^1.2.
|
|
52
|
+
"@openfn/lexicon": "^1.2.4",
|
|
53
|
+
"@openfn/logger": "1.0.6",
|
|
54
|
+
"@openfn/project": "^0.6.0",
|
|
53
55
|
"@openfn/describe-package": "0.1.5",
|
|
54
|
-
"@openfn/
|
|
55
|
-
"@openfn/runtime": "1.7.3",
|
|
56
|
-
"@openfn/logger": "1.0.6"
|
|
56
|
+
"@openfn/runtime": "1.7.3"
|
|
57
57
|
},
|
|
58
58
|
"files": [
|
|
59
59
|
"dist",
|