@openfn/cli 0.0.31 → 0.0.34

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -36,6 +36,14 @@ Get help:
36
36
  openfn help
37
37
  ```
38
38
 
39
+ ## Migrating from devtools
40
+
41
+ If you're coming to the CLI from the old openfn devtools, here are a couple of key points to be aware of:
42
+
43
+ * The CLI has a shorter, sleeker syntax, so your command should be much shorter
44
+ * The CLI will automatically install adaptors for you (with full version control)
45
+ * By default, the CLI will only write state.data to output. This is to encourage better state management. Pass `--no-strict-output` to save the entire state object.
46
+
39
47
  ## Basic Usage
40
48
 
41
49
  You're probably here to run jobs (expressions), which the CLI makes easy:
@@ -46,13 +54,13 @@ openfn path/to/job.js -ia adaptor-name
46
54
 
47
55
  You MUST specify which adaptor to use. Pass the `-i` flag to auto-install that adaptor (it's safe to do this redundantly).
48
56
 
49
- If output.json is not passed, the CLI will create an `output.json` next to the job file. You can pass a path to state by adding `-s path/to/state.json`, and output by passing `-o path/to/output.json`. You can use `-S` and `-O` to pass state through stdin and return the output through stdout.
57
+ When the job is finished, the CLI will write the `data` property of your state to disk. By default the CLI will create an `output.json` next to the job file. You can pass a path to output by passing `-o path/to/output.json` and state by adding `-s path/to/state.json`. You can use `-S` and `-O` to pass state through stdin and return the output through stdout. To write the entire state object (not just `data`), pass `--no-strict-output`.
50
58
 
51
- The CLI can auto-install language adaptors to its own privately maintained repo. Run `openfn repo list` to see where the repo is, and what's in it. Set the `OPENFN_REPO_DIR` env var to specify the repo folder. When autoinstalling, the CLI will check to see if a matching version is found in the repo.
59
+ The CLI can auto-install language adaptors to its own privately maintained repo, just include the `-i` flag in the command and your adaptors will be forever fully managed. Run `openfn repo list` to see where the repo is, and what's in it. Set the `OPENFN_REPO_DIR` env var to specify the repo folder. When autoinstalling, the CLI will check to see if a matching version is found in the repo.
52
60
 
53
61
  You can specify adaptors with a shorthand (`http`) or use the full package name (`@openfn/language-http`). You can add a specific version like `http@2.0.0`. You can pass a path to a locally installed adaptor like `http=/repo/openfn/adaptors/my-http-build`.
54
62
 
55
- If you have the adaptors monorepo set up on your machine, you can also run from that. Pass the `-m` flag to load from the monorepo. Set the monorepo location by setting the OPENFN_ADAPTORS_REPO env var to a valid path. This runs from the built package, so remember to build an adaptor before running!
63
+ If you have the adaptors monorepo set up on your machine, you can also run adaptors straight from source. Pass the `-m <path>` flag to load from the monorepo. You can also set the monorepo location by setting the `OPENFN_ADAPTORS_REPO` env var to a valid path. After that just include `-m` to load from the monorepo. Remember that adaptors will be loaded from the BUILT package in `dist`, so remember to build an adaptor before running!
56
64
 
57
65
  You can pass `--log info` to get more feedback about what's happening, or `--log debug` for more details than you could ever use.
58
66
 
@@ -119,9 +127,11 @@ The CLI uses openfn's own runtime to execute jobs in a safe environment.
119
127
 
120
128
  All jobs which work against `@openfn/core` will work in the new CLI and runtime environment (note: although this is a work in progress and we are actively looking for help to test this!).
121
129
 
130
+ If you want to see how the compiler is changing your job, run `openfn compile path/to/job -a <adaptor>` to return the compiled code to stdout. Add `-o path/to/output.js` to save the result to disk.
131
+
122
132
  ## New Runtime notes
123
133
 
124
- The new openfunction runtime basically does one thing: load a Javascript Module, find the default export, and execute the functions it holds.
134
+ The new OpenFn runtime will create a secure sandboxed environemtn which loads a Javascript Module, finds the default export, and execute the functions held within it.
125
135
 
126
136
  So long as your job has an array of functions as its default export, it will run in the new runtime.
127
137
 
@@ -1,7 +1,8 @@
1
1
  // src/util/expand-adaptors.ts
2
2
  var expand_adaptors_default = (names) => names?.map((name) => {
3
3
  if (typeof name === "string") {
4
- if (name.startsWith("@openfn/language-")) {
4
+ const [left] = name.split("=");
5
+ if (left.match("/") || left.endsWith(".js")) {
5
6
  return name;
6
7
  }
7
8
  return `@openfn/language-${name}`;
package/dist/index.js CHANGED
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  DEFAULT_REPO_DIR,
4
4
  expand_adaptors_default
5
- } from "./chunk-Q2H7WYJB.js";
5
+ } from "./chunk-XYZNU5CH.js";
6
6
 
7
7
  // src/process/spawn.ts
8
8
  import path from "node:path";
@@ -163,6 +163,15 @@ var expandAdaptors = {
163
163
  setDefaultValue(opts2, "expandAdaptors", true);
164
164
  }
165
165
  };
166
+ var force = {
167
+ name: "force",
168
+ yargs: {
169
+ alias: ["f"],
170
+ boolean: true,
171
+ description: "Force metadata to be regenerated",
172
+ default: false
173
+ }
174
+ };
166
175
  var immutable = {
167
176
  name: "immutable",
168
177
  yargs: {
@@ -171,6 +180,17 @@ var immutable = {
171
180
  default: false
172
181
  }
173
182
  };
183
+ var ignoreImports = {
184
+ name: "ignore-imports",
185
+ yargs: {
186
+ description: "Don't auto-import references in compiled code. Can take a list of names to ignore."
187
+ },
188
+ ensure: (opts2) => {
189
+ if (typeof opts2.ignoreImports === "string") {
190
+ opts2.ignoreImports = opts2.ignoreImports.split(",").map((s) => s.trim());
191
+ }
192
+ }
193
+ };
174
194
  var getBaseDir = (opts2) => {
175
195
  const basePath = opts2.path ?? ".";
176
196
  if (basePath.endsWith(".js")) {
@@ -304,6 +324,7 @@ var options = [
304
324
  autoinstall,
305
325
  compile,
306
326
  immutable,
327
+ ignoreImports,
307
328
  jobPath,
308
329
  logJson,
309
330
  outputPath,
@@ -318,7 +339,13 @@ var options = [
318
339
  ];
319
340
  var executeCommand = {
320
341
  command: "execute [path]",
321
- desc: `Run an openfn job. Get more help by running openfn <command> help`,
342
+ desc: `Run an openfn job. Get more help by running openfn <command> help.
343
+
344
+ Execute will run a job/expression and write the output state to disk (to ./state.json unless otherwise specified)
345
+
346
+ By default only state.data will be written to the output. Include --no-strict-output to write the entire state object.
347
+
348
+ Remember to include the adaptor name with -a. Auto install adaptors with the -i flag.`,
322
349
  aliases: ["$0"],
323
350
  handler: ensure("execute", options),
324
351
  builder: (yargs2) => build(options, yargs2).positional("path", {
@@ -326,13 +353,16 @@ var executeCommand = {
326
353
  demandOption: true
327
354
  }).example(
328
355
  "openfn foo/job.js",
329
- "Reads foo/job.js, looks for state and output in foo"
356
+ "Execute foo/job.js with no adaptor and write the final state to foo/job.json"
357
+ ).example(
358
+ "openfn job.js -ia common",
359
+ "Execute job.js using @openfn/language-commom , with autoinstall enabled)"
330
360
  ).example(
331
- "openfn job.js -a common",
332
- "Run job.js using @openfn/language-common"
361
+ "openfn job.js -a common --log info",
362
+ "Execute job.js with common adaptor and info-level logging"
333
363
  ).example(
334
- "openfn install -a common",
335
- "Install the latest version of language-common to the repo"
364
+ "openfn compile job.js -a http",
365
+ "Compile job.js with the http adaptor and print the code to stdout"
336
366
  )
337
367
  };
338
368
  var command_default = executeCommand;
@@ -341,12 +371,14 @@ var command_default = executeCommand;
341
371
  var options2 = [
342
372
  expandAdaptors,
343
373
  adaptors,
374
+ ignoreImports,
344
375
  jobPath,
345
376
  logJson,
346
377
  override(outputStdout, {
347
378
  default: true
348
379
  }),
349
380
  outputPath,
381
+ repoDir,
350
382
  useAdaptorsMonorepo
351
383
  ];
352
384
  var compileCommand = {
@@ -402,8 +434,30 @@ var command_default5 = {
402
434
  builder: (yargs2) => yargs2.example("docs common fn", "Print help for the common fn operation")
403
435
  };
404
436
 
437
+ // src/metadata/command.ts
438
+ var options3 = [
439
+ expandAdaptors,
440
+ adaptors,
441
+ force,
442
+ logJson,
443
+ repoDir,
444
+ statePath,
445
+ stateStdin,
446
+ useAdaptorsMonorepo
447
+ ];
448
+ var command_default6 = {
449
+ command: "metadata",
450
+ desc: "Generate metadata for an adaptor config",
451
+ handler: ensure("metadata", options3),
452
+ builder: (yargs2) => build(options3, yargs2).example(
453
+ "metadata -a salesforce -s tmp/state.json",
454
+ "Generate salesforce metadata from config in state.json"
455
+ )
456
+ };
457
+
405
458
  // src/cli.ts
406
- var cmd = yargs(hideBin(process.argv)).command(command_default).command(command_default2).command(install).command(repo).command(command_default3).command(command_default5).command(command_default4).option("log", {
459
+ var y = yargs(hideBin(process.argv));
460
+ var cmd = y.command(command_default).command(command_default2).command(install).command(repo).command(command_default3).command(command_default5).command(command_default6).command(command_default4).option("log", {
407
461
  alias: ["l"],
408
462
  description: "Set the default log level to none, default, info or debug",
409
463
  array: true
@@ -418,7 +472,7 @@ var cmd = yargs(hideBin(process.argv)).command(command_default).command(command_
418
472
  handler: (argv) => {
419
473
  argv.command = "version";
420
474
  }
421
- }).help();
475
+ }).wrap(y.terminalWidth()).help();
422
476
 
423
477
  // src/index.ts
424
478
  var opts = cmd.parse();
@@ -10,7 +10,7 @@ import {
10
10
  expand_adaptors_default,
11
11
  logger_default,
12
12
  printDuration
13
- } from "../chunk-Q2H7WYJB.js";
13
+ } from "../chunk-XYZNU5CH.js";
14
14
 
15
15
  // src/execute/handler.ts
16
16
  import { readFile } from "node:fs/promises";
@@ -71,11 +71,11 @@ var execute_default = (code, state, opts) => {
71
71
  function parseAdaptors(opts) {
72
72
  const adaptors = {};
73
73
  opts.adaptors.reduce((obj, exp) => {
74
- const [module, path4] = exp.split("=");
74
+ const [module, path5] = exp.split("=");
75
75
  const { name, version } = getNameAndVersion(module);
76
76
  const info = {};
77
- if (path4) {
78
- info.path = path4;
77
+ if (path5) {
78
+ info.path = path5;
79
79
  }
80
80
  if (version) {
81
81
  info.version = version;
@@ -92,7 +92,6 @@ import { getModulePath } from "@openfn/runtime";
92
92
  var compile_default = async (opts, log) => {
93
93
  log.debug("Loading job...");
94
94
  const compilerOptions = await loadTransformOptions(opts, log);
95
- compilerOptions.logger = logger_default(COMPILER, opts);
96
95
  const job = compile(opts.jobSource || opts.jobPath, compilerOptions);
97
96
  if (opts.jobPath) {
98
97
  log.success(`Compiled job from ${opts.jobPath}`);
@@ -109,10 +108,10 @@ var stripVersionSpecifier = (specifier) => {
109
108
  return specifier;
110
109
  };
111
110
  var resolveSpecifierPath = async (pattern, repoDir, log) => {
112
- const [specifier, path4] = pattern.split("=");
113
- if (path4) {
114
- log.debug(`Resolved ${specifier} to path: ${path4}`);
115
- return path4;
111
+ const [specifier, path5] = pattern.split("=");
112
+ if (path5) {
113
+ log.debug(`Resolved ${specifier} to path: ${path5}`);
114
+ return path5;
116
115
  }
117
116
  const repoPath = await getModulePath(specifier, repoDir, log);
118
117
  if (repoPath) {
@@ -122,22 +121,19 @@ var resolveSpecifierPath = async (pattern, repoDir, log) => {
122
121
  };
123
122
  var loadTransformOptions = async (opts, log) => {
124
123
  const options = {
125
- logger: log
124
+ logger: log || logger_default(COMPILER, opts)
126
125
  };
127
- if (opts.adaptors?.length) {
126
+ if (opts.adaptors?.length && opts.ignoreImports != true) {
128
127
  let exports;
129
128
  const [pattern] = opts.adaptors;
130
129
  const [specifier] = pattern.split("=");
131
- log.debug(`Attempting to preload typedefs for ${specifier}`);
132
- const path4 = await resolveSpecifierPath(pattern, opts.repoDir, log);
133
- if (path4) {
130
+ log.debug(`Attempting to preload types for ${specifier}`);
131
+ const path5 = await resolveSpecifierPath(pattern, opts.repoDir, log);
132
+ if (path5) {
134
133
  try {
135
- exports = await preloadAdaptorExports(path4);
136
- if (exports) {
137
- log.info(`Loaded typedefs for ${specifier}`);
138
- }
134
+ exports = await preloadAdaptorExports(path5, log);
139
135
  } catch (e) {
140
- log.error(`Failed to load adaptor typedefs from path ${path4}`);
136
+ log.error(`Failed to load adaptor typedefs from path ${path5}`);
141
137
  log.error(e);
142
138
  }
143
139
  }
@@ -145,6 +141,7 @@ var loadTransformOptions = async (opts, log) => {
145
141
  log.debug(`No module exports found for ${pattern}`);
146
142
  }
147
143
  options["add-imports"] = {
144
+ ignore: opts.ignoreImports,
148
145
  adaptor: {
149
146
  name: stripVersionSpecifier(specifier),
150
147
  exports,
@@ -366,20 +363,20 @@ var RETRY_COUNT = 20;
366
363
  var TIMEOUT_MS = 1e3 * 60;
367
364
  var actualDocGen = (specifier) => describePackage(specifier, {});
368
365
  var ensurePath = (filePath) => mkdirSync(path.dirname(filePath), { recursive: true });
369
- var generatePlaceholder = (path4) => {
370
- writeFileSync(path4, `{ "loading": true, "timestamp": ${Date.now()}}`);
366
+ var generatePlaceholder = (path5) => {
367
+ writeFileSync(path5, `{ "loading": true, "timestamp": ${Date.now()}}`);
371
368
  };
372
369
  var finish = (logger, resultPath) => {
373
370
  logger.success("Done! Docs can be found at:\n");
374
371
  logger.print(` ${path.resolve(resultPath)}`);
375
372
  };
376
- var generateDocs = async (specifier, path4, docgen, logger) => {
373
+ var generateDocs = async (specifier, path5, docgen, logger) => {
377
374
  const result = await docgen(specifier);
378
- await writeFile3(path4, JSON.stringify(result, null, 2));
379
- finish(logger, path4);
380
- return path4;
375
+ await writeFile3(path5, JSON.stringify(result, null, 2));
376
+ finish(logger, path5);
377
+ return path5;
381
378
  };
382
- var waitForDocs = async (docs, path4, logger, retryDuration = RETRY_DURATION) => {
379
+ var waitForDocs = async (docs, path5, logger, retryDuration = RETRY_DURATION) => {
383
380
  try {
384
381
  if (docs.hasOwnProperty("loading")) {
385
382
  logger.info("Docs are being loaded by another process. Waiting.");
@@ -391,19 +388,19 @@ var waitForDocs = async (docs, path4, logger, retryDuration = RETRY_DURATION) =>
391
388
  clearInterval(i);
392
389
  reject(new Error("Timed out waiting for docs to load"));
393
390
  }
394
- const updated = JSON.parse(readFileSync(path4, "utf8"));
391
+ const updated = JSON.parse(readFileSync(path5, "utf8"));
395
392
  if (!updated.hasOwnProperty("loading")) {
396
393
  logger.info("Docs found!");
397
394
  clearInterval(i);
398
- resolve(path4);
395
+ resolve(path5);
399
396
  }
400
397
  count++;
401
398
  }, retryDuration);
402
399
  });
403
400
  } else {
404
- logger.info(`Docs already written to cache at ${path4}`);
405
- finish(logger, path4);
406
- return path4;
401
+ logger.info(`Docs already written to cache at ${path5}`);
402
+ finish(logger, path5);
403
+ return path5;
407
404
  }
408
405
  } catch (e) {
409
406
  logger.error("Existing doc JSON corrupt. Aborting");
@@ -420,28 +417,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
420
417
  process.exit(9);
421
418
  }
422
419
  logger.success(`Generating docs for ${specifier}`);
423
- const path4 = `${repoDir}/docs/${specifier}.json`;
424
- ensurePath(path4);
420
+ const path5 = `${repoDir}/docs/${specifier}.json`;
421
+ ensurePath(path5);
425
422
  const handleError = () => {
426
423
  logger.info("Removing placeholder");
427
- rmSync(path4);
424
+ rmSync(path5);
428
425
  };
429
426
  try {
430
- const existing = readFileSync(path4, "utf8");
427
+ const existing = readFileSync(path5, "utf8");
431
428
  const json = JSON.parse(existing);
432
429
  if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
433
430
  logger.info(`Expired placeholder found. Removing.`);
434
- rmSync(path4);
431
+ rmSync(path5);
435
432
  throw new Error("TIMEOUT");
436
433
  }
437
- return waitForDocs(json, path4, logger, retryDuration);
434
+ return waitForDocs(json, path5, logger, retryDuration);
438
435
  } catch (e) {
439
436
  if (e.message !== "TIMEOUT") {
440
- logger.info(`Docs JSON not found at ${path4}`);
437
+ logger.info(`Docs JSON not found at ${path5}`);
441
438
  }
442
439
  logger.debug("Generating placeholder");
443
- generatePlaceholder(path4);
444
- return generateDocs(specifier, path4, docgen, logger).catch((e2) => {
440
+ generatePlaceholder(path5);
441
+ return generateDocs(specifier, path5, docgen, logger).catch((e2) => {
445
442
  logger.error("Error generating documentation");
446
443
  logger.error(e2);
447
444
  handleError();
@@ -489,15 +486,15 @@ var docsHandler = async (options, logger) => {
489
486
  logger.success(`Showing docs for ${adaptorName} v${version}`);
490
487
  }
491
488
  logger.info("Generating/loading documentation...");
492
- const path4 = await handler_default4(
489
+ const path5 = await handler_default4(
493
490
  {
494
491
  specifier: `${name}@${version}`,
495
492
  repoDir
496
493
  },
497
494
  createNullLogger()
498
495
  );
499
- if (path4) {
500
- const source = await readFile2(path4, "utf8");
496
+ if (path5) {
497
+ const source = await readFile2(path5, "utf8");
501
498
  const data = JSON.parse(source);
502
499
  let desc;
503
500
  if (operation) {
@@ -518,9 +515,110 @@ var docsHandler = async (options, logger) => {
518
515
  };
519
516
  var handler_default5 = docsHandler;
520
517
 
518
+ // src/metadata/cache.ts
519
+ import { createHash } from "node:crypto";
520
+ import { readFileSync as readFileSync2 } from "node:fs";
521
+ import path2 from "node:path";
522
+ import { writeFile as writeFile4, mkdir } from "node:fs/promises";
523
+ var getPath = (repoDir, key) => `${repoDir}/meta/${key}.json`;
524
+ var generateKey = (config) => createHash("sha256").update(JSON.stringify(config)).digest("hex");
525
+ var get = (repoPath, key) => {
526
+ try {
527
+ const data = readFileSync2(getPath(repoPath, key));
528
+ const json = JSON.parse(data);
529
+ return json;
530
+ } catch (e) {
531
+ return null;
532
+ }
533
+ };
534
+ var set = async (repoPath, key, data) => {
535
+ const fullPath = getPath(repoPath, key);
536
+ await mkdir(path2.dirname(fullPath), { recursive: true });
537
+ await writeFile4(fullPath, JSON.stringify(data));
538
+ };
539
+ var cache_default = { get, set, generateKey, getPath };
540
+
541
+ // src/metadata/handler.ts
542
+ import { getModuleEntryPoint } from "@openfn/runtime";
543
+ var decorateMetadata = (metadata) => {
544
+ metadata.created = new Date().toISOString();
545
+ };
546
+ var getAdaptorPath = async (adaptor, logger, repoDir) => {
547
+ let adaptorPath;
548
+ let adaptorSpecifier;
549
+ if (adaptor.match("=")) {
550
+ const parts = adaptor.split("=");
551
+ adaptorSpecifier = parts[0];
552
+ adaptorPath = parts[1];
553
+ } else {
554
+ if (adaptor.endsWith(".js")) {
555
+ return adaptor;
556
+ }
557
+ adaptorSpecifier = adaptor;
558
+ if (adaptor.startsWith("/")) {
559
+ adaptorPath = adaptor;
560
+ }
561
+ }
562
+ if (!adaptorPath || !adaptorPath.endsWith("js")) {
563
+ const entry = await getModuleEntryPoint(
564
+ adaptorSpecifier,
565
+ adaptorPath,
566
+ repoDir,
567
+ logger
568
+ );
569
+ adaptorPath = entry?.path;
570
+ }
571
+ logger.debug("loading adaptor from", adaptorPath);
572
+ return adaptorPath;
573
+ };
574
+ var metadataHandler = async (options, logger) => {
575
+ const { repoDir, adaptors } = options;
576
+ const adaptor = adaptors[0];
577
+ const state = await load_state_default(options, logger);
578
+ logger.success(`Generating metadata`);
579
+ const config = state.configuration;
580
+ logger.info("config:", config);
581
+ if (!config || Object.keys(config).length === 0) {
582
+ logger.error("ERROR: Invalid configuration passed");
583
+ process.exit(1);
584
+ }
585
+ const finish2 = () => {
586
+ logger.success("Done!");
587
+ logger.print(cache_default.getPath(repoDir, id));
588
+ };
589
+ const id = cache_default.generateKey(config);
590
+ if (!options.force) {
591
+ logger.debug("config hash: ", id);
592
+ const cached = await cache_default.get(repoDir, id);
593
+ if (cached) {
594
+ logger.success("Returning metadata from cache");
595
+ return finish2();
596
+ }
597
+ }
598
+ try {
599
+ const adaptorPath = await getAdaptorPath(adaptor, logger, options.repoDir);
600
+ const mod = await import(adaptorPath);
601
+ if (mod.metadata) {
602
+ logger.info("Metadata function found. Generating metadata...");
603
+ const result = await mod.metadata(config);
604
+ decorateMetadata(result);
605
+ await cache_default.set(repoDir, id, result);
606
+ finish2();
607
+ } else {
608
+ logger.error("No metadata helper found");
609
+ process.exit(1);
610
+ }
611
+ } catch (e) {
612
+ logger.error("Exception while generating metadata");
613
+ logger.error(e);
614
+ process.exit(1);
615
+ }
616
+ };
617
+ var handler_default6 = metadataHandler;
618
+
521
619
  // src/util/use-adaptors-repo.ts
522
620
  import { readFile as readFile3 } from "node:fs/promises";
523
- import path2 from "node:path";
621
+ import path3 from "node:path";
524
622
  import assert from "node:assert";
525
623
  import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
526
624
  var validateMonoRepo = async (repoPath, log) => {
@@ -544,7 +642,7 @@ var updatePath = (adaptor, repoPath, log) => {
544
642
  );
545
643
  }
546
644
  const shortName = name.replace("@openfn/language-", "");
547
- const abspath = path2.resolve(repoPath, "packages", shortName);
645
+ const abspath = path3.resolve(repoPath, "packages", shortName);
548
646
  return `${name}=${abspath}`;
549
647
  };
550
648
  var useAdaptorsRepo = async (adaptors, repoPath, log) => {
@@ -560,8 +658,8 @@ var useAdaptorsRepo = async (adaptors, repoPath, log) => {
560
658
  var use_adaptors_repo_default = useAdaptorsRepo;
561
659
 
562
660
  // src/util/print-versions.ts
563
- import { readFileSync as readFileSync2 } from "node:fs";
564
- import path3 from "node:path";
661
+ import { readFileSync as readFileSync3 } from "node:fs";
662
+ import path4 from "node:path";
565
663
  import { getNameAndVersion as getNameAndVersion5 } from "@openfn/runtime";
566
664
  import { mainSymbols } from "figures";
567
665
  var NODE = "node.js";
@@ -571,7 +669,7 @@ var COMPILER2 = "compiler";
571
669
  var { triangleRightSmall: t } = mainSymbols;
572
670
  var loadVersionFromPath = (adaptorPath) => {
573
671
  try {
574
- const pkg = JSON.parse(readFileSync2(path3.resolve(adaptorPath, "package.json"), "utf8"));
672
+ const pkg = JSON.parse(readFileSync3(path4.resolve(adaptorPath, "package.json"), "utf8"));
575
673
  return pkg.version;
576
674
  } catch (e) {
577
675
  return "unknown";
@@ -641,6 +739,7 @@ var handlers = {
641
739
  test: handler_default3,
642
740
  docgen: handler_default4,
643
741
  docs: handler_default5,
742
+ metadata: handler_default6,
644
743
  ["repo-clean"]: clean,
645
744
  ["repo-install"]: install,
646
745
  ["repo-pwd"]: pwd,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openfn/cli",
3
- "version": "0.0.31",
3
+ "version": "0.0.34",
4
4
  "description": "CLI devtools for the openfn toolchain.",
5
5
  "engines": {
6
6
  "node": ">=18",
@@ -41,10 +41,10 @@
41
41
  "rimraf": "^3.0.2",
42
42
  "treeify": "^1.1.0",
43
43
  "yargs": "^17.5.1",
44
- "@openfn/compiler": "0.0.25",
45
- "@openfn/describe-package": "0.0.14",
46
- "@openfn/logger": "0.0.10",
47
- "@openfn/runtime": "0.0.19"
44
+ "@openfn/compiler": "0.0.28",
45
+ "@openfn/describe-package": "0.0.15",
46
+ "@openfn/logger": "0.0.12",
47
+ "@openfn/runtime": "0.0.21"
48
48
  },
49
49
  "files": [
50
50
  "dist",