@openfn/cli 1.16.2 → 1.17.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -224,6 +224,14 @@ var apolloUrl = {
224
224
  });
225
225
  }
226
226
  };
227
+ var json = {
228
+ name: "json",
229
+ yargs: {
230
+ boolean: true,
231
+ description: "Output the result as a json object",
232
+ default: false
233
+ }
234
+ };
227
235
  var beta = {
228
236
  name: "beta",
229
237
  yargs: {
@@ -1112,7 +1120,7 @@ var projectsCommand = {
1112
1120
  var command_default11 = projectsCommand;
1113
1121
 
1114
1122
  // src/checkout/command.ts
1115
- var options10 = [projectName, projectPath];
1123
+ var options10 = [projectName, projectPath, log];
1116
1124
  var checkoutCommand = {
1117
1125
  command: "checkout <project-name>",
1118
1126
  describe: "Switch to a different openfn project in the same workspace",
@@ -1126,7 +1134,8 @@ var options11 = [
1126
1134
  projectName,
1127
1135
  projectPath,
1128
1136
  removeUnmapped,
1129
- workflowMappings
1137
+ workflowMappings,
1138
+ log
1130
1139
  ];
1131
1140
  var mergeCommand = {
1132
1141
  command: "merge [project-name]",
@@ -1136,9 +1145,25 @@ var mergeCommand = {
1136
1145
  };
1137
1146
  var command_default13 = mergeCommand;
1138
1147
 
1148
+ // src/version/command.ts
1149
+ var options12 = [
1150
+ workflow,
1151
+ projectName,
1152
+ projectPath,
1153
+ workflowMappings,
1154
+ json
1155
+ ];
1156
+ var workflowVersionCommand = {
1157
+ command: "project version [workflow]",
1158
+ describe: "Returns the version has of a workflow",
1159
+ handler: ensure("project", options12),
1160
+ builder: (yargs2) => build(options12, yargs2)
1161
+ };
1162
+ var command_default14 = workflowVersionCommand;
1163
+
1139
1164
  // src/cli.ts
1140
1165
  var y = yargs(hideBin(process.argv));
1141
- var cmd = y.command(command_default7).command(command_default3).command(command_default2).command(command_default4).command(install).command(repo).command(command_default10).command(command_default6).command(command_default).command(command_default8).command(command_default5).command(command_default9).command(command_default11).command(command_default12).command(command_default13).command({
1166
+ var cmd = y.command(command_default7).command(command_default3).command(command_default2).command(command_default4).command(install).command(repo).command(command_default10).command(command_default6).command(command_default).command(command_default8).command(command_default5).command(command_default9).command(command_default11).command(command_default12).command(command_default13).command(command_default14).command({
1142
1167
  command: "version",
1143
1168
  describe: "Show the currently installed version of the CLI, compiler and runtime.",
1144
1169
  handler: (argv) => {
@@ -127,14 +127,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
127
127
  });
128
128
  });
129
129
  };
130
- var loadPayload = async (logger, path15) => {
131
- if (!path15) {
130
+ var loadPayload = async (logger, path16) => {
131
+ if (!path16) {
132
132
  logger.warn("No JSON payload provided");
133
133
  logger.warn("Most apollo services require JSON to be uploaded");
134
134
  return {};
135
135
  }
136
- if (path15.endsWith(".json")) {
137
- const str = await readFile(path15, "utf8");
136
+ if (path16.endsWith(".json")) {
137
+ const str = await readFile(path16, "utf8");
138
138
  const json = JSON.parse(str);
139
139
  logger.debug("Loaded JSON payload");
140
140
  return json;
@@ -256,13 +256,13 @@ var execute_default = async (plan, input, opts, logger) => {
256
256
  };
257
257
  function parseAdaptors(plan) {
258
258
  const extractInfo = (specifier) => {
259
- const [module, path15] = specifier.split("=");
259
+ const [module, path16] = specifier.split("=");
260
260
  const { name, version } = getNameAndVersion(module);
261
261
  const info = {
262
262
  name
263
263
  };
264
- if (path15) {
265
- info.path = path15;
264
+ if (path16) {
265
+ info.path = path16;
266
266
  }
267
267
  if (version) {
268
268
  info.version = version;
@@ -522,10 +522,10 @@ var stripVersionSpecifier = (specifier) => {
522
522
  return specifier;
523
523
  };
524
524
  var resolveSpecifierPath = async (pattern, repoDir, log) => {
525
- const [specifier, path15] = pattern.split("=");
526
- if (path15) {
527
- log.debug(`Resolved ${specifier} to path: ${path15}`);
528
- return path15;
525
+ const [specifier, path16] = pattern.split("=");
526
+ if (path16) {
527
+ log.debug(`Resolved ${specifier} to path: ${path16}`);
528
+ return path16;
529
529
  }
530
530
  const repoPath = await getModulePath(specifier, repoDir, log);
531
531
  if (repoPath) {
@@ -544,12 +544,12 @@ var loadTransformOptions = async (opts, log) => {
544
544
  let exports;
545
545
  const [specifier] = adaptorInput.split("=");
546
546
  log.debug(`Trying to preload types for ${specifier}`);
547
- const path15 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log);
548
- if (path15) {
547
+ const path16 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log);
548
+ if (path16) {
549
549
  try {
550
- exports = await preloadAdaptorExports(path15, log);
550
+ exports = await preloadAdaptorExports(path16, log);
551
551
  } catch (e) {
552
- log.error(`Failed to load adaptor typedefs from path ${path15}`);
552
+ log.error(`Failed to load adaptor typedefs from path ${path16}`);
553
553
  log.error(e);
554
554
  }
555
555
  }
@@ -1001,8 +1001,8 @@ var loadXPlan = async (plan, options, logger, defaultName = "") => {
1001
1001
  };
1002
1002
 
1003
1003
  // src/util/assert-path.ts
1004
- var assert_path_default = (path15) => {
1005
- if (!path15) {
1004
+ var assert_path_default = (path16) => {
1005
+ if (!path16) {
1006
1006
  console.error("ERROR: no path provided!");
1007
1007
  console.error("\nUsage:");
1008
1008
  console.error(" open path/to/job");
@@ -1688,20 +1688,20 @@ var RETRY_COUNT = 20;
1688
1688
  var TIMEOUT_MS = 1e3 * 60;
1689
1689
  var actualDocGen = (specifier) => describePackage(specifier, {});
1690
1690
  var ensurePath = (filePath) => mkdirSync(path7.dirname(filePath), { recursive: true });
1691
- var generatePlaceholder = (path15) => {
1692
- writeFileSync(path15, `{ "loading": true, "timestamp": ${Date.now()}}`);
1691
+ var generatePlaceholder = (path16) => {
1692
+ writeFileSync(path16, `{ "loading": true, "timestamp": ${Date.now()}}`);
1693
1693
  };
1694
1694
  var finish = (logger, resultPath) => {
1695
1695
  logger.success("Done! Docs can be found at:\n");
1696
1696
  logger.print(` ${path7.resolve(resultPath)}`);
1697
1697
  };
1698
- var generateDocs = async (specifier, path15, docgen, logger) => {
1698
+ var generateDocs = async (specifier, path16, docgen, logger) => {
1699
1699
  const result = await docgen(specifier);
1700
- await writeFile5(path15, JSON.stringify(result, null, 2));
1701
- finish(logger, path15);
1702
- return path15;
1700
+ await writeFile5(path16, JSON.stringify(result, null, 2));
1701
+ finish(logger, path16);
1702
+ return path16;
1703
1703
  };
1704
- var waitForDocs = async (docs, path15, logger, retryDuration = RETRY_DURATION) => {
1704
+ var waitForDocs = async (docs, path16, logger, retryDuration = RETRY_DURATION) => {
1705
1705
  try {
1706
1706
  if (docs.hasOwnProperty("loading")) {
1707
1707
  logger.info("Docs are being loaded by another process. Waiting.");
@@ -1713,19 +1713,19 @@ var waitForDocs = async (docs, path15, logger, retryDuration = RETRY_DURATION) =
1713
1713
  clearInterval(i);
1714
1714
  reject(new Error("Timed out waiting for docs to load"));
1715
1715
  }
1716
- const updated = JSON.parse(readFileSync(path15, "utf8"));
1716
+ const updated = JSON.parse(readFileSync(path16, "utf8"));
1717
1717
  if (!updated.hasOwnProperty("loading")) {
1718
1718
  logger.info("Docs found!");
1719
1719
  clearInterval(i);
1720
- resolve(path15);
1720
+ resolve(path16);
1721
1721
  }
1722
1722
  count++;
1723
1723
  }, retryDuration);
1724
1724
  });
1725
1725
  } else {
1726
- logger.info(`Docs already written to cache at ${path15}`);
1727
- finish(logger, path15);
1728
- return path15;
1726
+ logger.info(`Docs already written to cache at ${path16}`);
1727
+ finish(logger, path16);
1728
+ return path16;
1729
1729
  }
1730
1730
  } catch (e) {
1731
1731
  logger.error("Existing doc JSON corrupt. Aborting");
@@ -1742,28 +1742,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
1742
1742
  process.exit(9);
1743
1743
  }
1744
1744
  logger.success(`Generating docs for ${specifier}`);
1745
- const path15 = `${repoDir}/docs/${specifier}.json`;
1746
- ensurePath(path15);
1745
+ const path16 = `${repoDir}/docs/${specifier}.json`;
1746
+ ensurePath(path16);
1747
1747
  const handleError2 = () => {
1748
1748
  logger.info("Removing placeholder");
1749
- rmSync(path15);
1749
+ rmSync(path16);
1750
1750
  };
1751
1751
  try {
1752
- const existing = readFileSync(path15, "utf8");
1752
+ const existing = readFileSync(path16, "utf8");
1753
1753
  const json = JSON.parse(existing);
1754
1754
  if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
1755
1755
  logger.info(`Expired placeholder found. Removing.`);
1756
- rmSync(path15);
1756
+ rmSync(path16);
1757
1757
  throw new Error("TIMEOUT");
1758
1758
  }
1759
- return waitForDocs(json, path15, logger, retryDuration);
1759
+ return waitForDocs(json, path16, logger, retryDuration);
1760
1760
  } catch (e) {
1761
1761
  if (e.message !== "TIMEOUT") {
1762
- logger.info(`Docs JSON not found at ${path15}`);
1762
+ logger.info(`Docs JSON not found at ${path16}`);
1763
1763
  }
1764
1764
  logger.debug("Generating placeholder");
1765
- generatePlaceholder(path15);
1766
- return generateDocs(specifier, path15, docgen, logger).catch((e2) => {
1765
+ generatePlaceholder(path16);
1766
+ return generateDocs(specifier, path16, docgen, logger).catch((e2) => {
1767
1767
  logger.error("Error generating documentation");
1768
1768
  logger.error(e2);
1769
1769
  handleError2();
@@ -1814,7 +1814,7 @@ var docsHandler = async (options, logger) => {
1814
1814
  logger.success(`Showing docs for ${adaptorName} v${version}`);
1815
1815
  }
1816
1816
  logger.info("Generating/loading documentation...");
1817
- const path15 = await handler_default7(
1817
+ const path16 = await handler_default7(
1818
1818
  {
1819
1819
  specifier: `${name}@${version}`,
1820
1820
  repoDir
@@ -1823,8 +1823,8 @@ var docsHandler = async (options, logger) => {
1823
1823
  createNullLogger()
1824
1824
  );
1825
1825
  let didError = false;
1826
- if (path15) {
1827
- const source = await readFile4(path15, "utf8");
1826
+ if (path16) {
1827
+ const source = await readFile4(path16, "utf8");
1828
1828
  const data = JSON.parse(source);
1829
1829
  let desc;
1830
1830
  if (operation) {
@@ -2294,14 +2294,54 @@ ${project.workflows.map((w) => " - " + w.name).join("\n")}`;
2294
2294
  }
2295
2295
  var handler_default11 = projectsHandler;
2296
2296
 
2297
- // src/checkout/handler.ts
2298
- import Project5, { Workspace as Workspace2 } from "@openfn/project";
2297
+ // src/version/handler.ts
2298
+ import { Workspace as Workspace2 } from "@openfn/project";
2299
2299
  import path12 from "path";
2300
+ var workflowVersionHandler = async (options, logger) => {
2301
+ const commandPath = path12.resolve(options.projectPath ?? ".");
2302
+ const workspace = new Workspace2(commandPath);
2303
+ if (!workspace.valid) {
2304
+ logger.error("Command was run in an invalid openfn workspace");
2305
+ return;
2306
+ }
2307
+ const output = /* @__PURE__ */ new Map();
2308
+ const activeProject = workspace.getActiveProject();
2309
+ if (options.workflow) {
2310
+ const workflow = activeProject?.getWorkflow(options.workflow);
2311
+ if (!workflow) {
2312
+ logger.error(`No workflow found with id/name ${options.workflow}`);
2313
+ return;
2314
+ }
2315
+ output.set(workflow.name || workflow.id, workflow.getVersionHash());
2316
+ } else {
2317
+ for (const wf of activeProject?.workflows || []) {
2318
+ output.set(wf.name || wf.id, wf.getVersionHash());
2319
+ }
2320
+ }
2321
+ if (!output.size) {
2322
+ logger.error("No workflow available");
2323
+ return;
2324
+ }
2325
+ let final;
2326
+ if (options.json) {
2327
+ final = JSON.stringify(Object.fromEntries(output), void 0, 2);
2328
+ } else {
2329
+ final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
2330
+ }
2331
+ logger.success(`Workflow(s) and their hashes
2332
+
2333
+ ${final}`);
2334
+ };
2335
+ var handler_default12 = workflowVersionHandler;
2336
+
2337
+ // src/checkout/handler.ts
2338
+ import Project5, { Workspace as Workspace3 } from "@openfn/project";
2339
+ import path13 from "path";
2300
2340
  import fs6 from "fs";
2301
2341
  import { rimraf as rimraf2 } from "rimraf";
2302
2342
  var checkoutHandler = async (options, logger) => {
2303
- const commandPath = path12.resolve(options.projectPath ?? ".");
2304
- const workspace = new Workspace2(commandPath);
2343
+ const commandPath = path13.resolve(options.projectPath ?? ".");
2344
+ const workspace = new Workspace3(commandPath);
2305
2345
  if (!workspace.valid) {
2306
2346
  logger.error("Command was run in an invalid openfn workspace");
2307
2347
  return;
@@ -2309,7 +2349,7 @@ var checkoutHandler = async (options, logger) => {
2309
2349
  const { project: _, ...config } = workspace.getConfig() ?? {};
2310
2350
  let switchProject;
2311
2351
  if (/\.(yaml|json)$/.test(options.projectName)) {
2312
- const filePath = path12.join(commandPath, options.projectName);
2352
+ const filePath = path13.join(commandPath, options.projectName);
2313
2353
  logger.debug("Loading project from path ", filePath);
2314
2354
  switchProject = await Project5.from("path", filePath, {
2315
2355
  config
@@ -2323,29 +2363,29 @@ var checkoutHandler = async (options, logger) => {
2323
2363
  );
2324
2364
  return;
2325
2365
  }
2326
- await rimraf2(path12.join(commandPath, config.workflowRoot ?? "workflows"));
2366
+ await rimraf2(path13.join(commandPath, config.workflowRoot ?? "workflows"));
2327
2367
  const files = switchProject.serialize("fs");
2328
2368
  for (const f in files) {
2329
2369
  if (files[f]) {
2330
- fs6.mkdirSync(path12.join(commandPath, path12.dirname(f)), {
2370
+ fs6.mkdirSync(path13.join(commandPath, path13.dirname(f)), {
2331
2371
  recursive: true
2332
2372
  });
2333
- fs6.writeFileSync(path12.join(commandPath, f), files[f]);
2373
+ fs6.writeFileSync(path13.join(commandPath, f), files[f]);
2334
2374
  } else {
2335
2375
  logger.warn("WARNING! No content for file", f);
2336
2376
  }
2337
2377
  }
2338
2378
  logger.success(`Expanded project to ${commandPath}`);
2339
2379
  };
2340
- var handler_default12 = checkoutHandler;
2380
+ var handler_default13 = checkoutHandler;
2341
2381
 
2342
2382
  // src/merge/handler.ts
2343
- import Project6, { Workspace as Workspace3 } from "@openfn/project";
2344
- import path13 from "path";
2383
+ import Project6, { Workspace as Workspace4 } from "@openfn/project";
2384
+ import path14 from "path";
2345
2385
  import { promises as fs7 } from "fs";
2346
2386
  var mergeHandler = async (options, logger) => {
2347
- const commandPath = path13.resolve(options.projectPath ?? ".");
2348
- const workspace = new Workspace3(commandPath);
2387
+ const commandPath = path14.resolve(options.projectPath ?? ".");
2388
+ const workspace = new Workspace4(commandPath);
2349
2389
  if (!workspace.valid) {
2350
2390
  logger.error("Command was run in an invalid openfn workspace");
2351
2391
  return;
@@ -2357,7 +2397,7 @@ var mergeHandler = async (options, logger) => {
2357
2397
  }
2358
2398
  let sourceProject;
2359
2399
  if (/\.(yaml|json)$/.test(options.projectName)) {
2360
- const filePath = path13.join(commandPath, options.projectName);
2400
+ const filePath = path14.join(commandPath, options.projectName);
2361
2401
  logger.debug("Loading source project from path ", filePath);
2362
2402
  sourceProject = await Project6.from("path", filePath);
2363
2403
  } else {
@@ -2386,7 +2426,7 @@ var mergeHandler = async (options, logger) => {
2386
2426
  });
2387
2427
  const yaml = final.serialize("state", { format: "yaml" });
2388
2428
  await fs7.writeFile(finalPath, yaml);
2389
- await handler_default12(
2429
+ await handler_default13(
2390
2430
  {
2391
2431
  command: "checkout",
2392
2432
  projectPath: commandPath,
@@ -2398,11 +2438,11 @@ var mergeHandler = async (options, logger) => {
2398
2438
  `Project ${sourceProject.name} has been merged into Project ${targetProject.name} successfully`
2399
2439
  );
2400
2440
  };
2401
- var handler_default13 = mergeHandler;
2441
+ var handler_default14 = mergeHandler;
2402
2442
 
2403
2443
  // src/util/print-versions.ts
2404
2444
  import { readFileSync as readFileSync2 } from "node:fs";
2405
- import path14 from "node:path";
2445
+ import path15 from "node:path";
2406
2446
  import url from "node:url";
2407
2447
  import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
2408
2448
  import { mainSymbols } from "figures";
@@ -2414,7 +2454,7 @@ var { triangleRightSmall: t } = mainSymbols;
2414
2454
  var loadVersionFromPath = (adaptorPath) => {
2415
2455
  try {
2416
2456
  const pkg = JSON.parse(
2417
- readFileSync2(path14.resolve(adaptorPath, "package.json"), "utf8")
2457
+ readFileSync2(path15.resolve(adaptorPath, "package.json"), "utf8")
2418
2458
  );
2419
2459
  return pkg.version;
2420
2460
  } catch (e) {
@@ -2449,7 +2489,7 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
2449
2489
  ...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
2450
2490
  );
2451
2491
  const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
2452
- const dirname3 = path14.dirname(url.fileURLToPath(import.meta.url));
2492
+ const dirname3 = path15.dirname(url.fileURLToPath(import.meta.url));
2453
2493
  const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
2454
2494
  const { version, dependencies } = pkg;
2455
2495
  const compilerVersion = dependencies["@openfn/compiler"];
@@ -2503,8 +2543,9 @@ var handlers = {
2503
2543
  metadata: handler_default9,
2504
2544
  pull: handler_default10,
2505
2545
  projects: handler_default11,
2506
- checkout: handler_default12,
2507
- merge: handler_default13,
2546
+ checkout: handler_default13,
2547
+ merge: handler_default14,
2548
+ project: handler_default12,
2508
2549
  ["collections-get"]: handler_default4.get,
2509
2550
  ["collections-set"]: handler_default4.set,
2510
2551
  ["collections-remove"]: handler_default4.remove,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openfn/cli",
3
- "version": "1.16.2",
3
+ "version": "1.17.1",
4
4
  "description": "CLI devtools for the OpenFn toolchain",
5
5
  "engines": {
6
6
  "node": ">=18",
@@ -48,11 +48,11 @@
48
48
  "ws": "^8.18.3",
49
49
  "yargs": "^17.7.2",
50
50
  "@openfn/compiler": "1.1.5",
51
+ "@openfn/deploy": "0.11.3",
51
52
  "@openfn/describe-package": "0.1.5",
52
53
  "@openfn/lexicon": "^1.2.4",
53
- "@openfn/deploy": "0.11.3",
54
54
  "@openfn/runtime": "1.7.3",
55
- "@openfn/project": "^0.5.1",
55
+ "@openfn/project": "^0.6.0",
56
56
  "@openfn/logger": "1.0.6"
57
57
  },
58
58
  "files": [