@openfn/cli 0.0.27 → 0.0.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -46,11 +46,13 @@ openfn path/to/job.js -ia adaptor-name
46
46
 
47
47
  You MUST specify which adaptor to use. Pass the `-i` flag to auto-install that adaptor (it's safe to do this redundantly).
48
48
 
49
- If output.json and state.json are not passed, the CLI will look for them next to the job.js file. You can pass a path to state by adding `-s path/to/state.json`, and output by passing `-o path/to/output.json`. You can use `-S` and `-O` to pass state through stdin and return the output through stdout.
49
+ If output.json is not passed, the CLI will create an `output.json` next to the job file. You can pass a path to state by adding `-s path/to/state.json`, and output by passing `-o path/to/output.json`. You can use `-S` and `-O` to pass state through stdin and return the output through stdout.
50
50
 
51
51
  The CLI can auto-install language adaptors to its own privately maintained repo. Run `openfn repo list` to see where the repo is, and what's in it. Set the `OPENFN_REPO_DIR` env var to specify the repo folder. When autoinstalling, the CLI will check to see if a matching version is found in the repo.
52
52
 
53
- You can specify adaptors with a shorthand (`http`) or use the full package name (`@openfn/language-http`). You can add a specific version like `http@2.0.0`. You can pass a path to a locally installed adaptor like `http=/repo/openfn/adaptors/my-http-build`. Set the OPENFN_ADAPTORS_REPO env var to load adaptors straight out of the monorepo (pass the `--no-adaptors-repo` flag to disable this for a single run).
53
+ You can specify adaptors with a shorthand (`http`) or use the full package name (`@openfn/language-http`). You can add a specific version like `http@2.0.0`. You can pass a path to a locally installed adaptor like `http=/repo/openfn/adaptors/my-http-build`.
54
+
55
+ If you have the adaptors monorepo set up on your machine, you can also run from that. Pass the `-m` flag to load from the monorepo. Set the monorepo location by setting the OPENFN_ADAPTORS_REPO env var to a valid path. This runs from the built package, so remember to build an adaptor before running!
54
56
 
55
57
  You can pass `--log info` to get more feedback about what's happening, or `--log debug` for more details than you could ever use.
56
58
 
@@ -91,6 +93,16 @@ If something unexpected happens during a command, your first step should be to r
91
93
 
92
94
  `debug` level logging is highly verbose and aims to tell you everything that's going on under-the hood. This is aimed mostly at CLI/runtime developers and can be very useful for debugging problems.
93
95
 
96
+ ## Structred/JSON logging
97
+
98
+ By default all logs will be printed as human-readable strings.
99
+
100
+ For a more structured output, you can emit logs as JSON objects with `level`, `name` and `message` properties:
101
+ ```
102
+ { level: 'info', name: 'CLI', message: ['Loaded adaptor'] }
103
+ ```
104
+
105
+ Pass `--log-json` to the CLI to do this. You can also set the OPENFN_LOG_JSON env var (and use `--no-log-json` to disable).
94
106
 
95
107
  ## Compilation
96
108
 
package/dist/index.js CHANGED
@@ -95,6 +95,10 @@ var executeCommand = {
95
95
  return applyExecuteOptions(yargs2).option("immutable", {
96
96
  boolean: true,
97
97
  description: "Treat state as immutable"
98
+ }).option("use-adaptors-monorepo", {
99
+ alias: "m",
100
+ boolean: true,
101
+ description: "Load adaptors from the monorepo. The OPENFN_ADAPTORS_REPO env var must be set to a valid path"
98
102
  }).option("autoinstall", {
99
103
  alias: "i",
100
104
  boolean: true,
@@ -116,7 +120,7 @@ var executeCommand = {
116
120
  description: "Skip compilation"
117
121
  }).option("no-strict-output", {
118
122
  boolean: true,
119
- description: "Allow properties other than data to be returned in the output."
123
+ description: "Allow properties other than data to be returned in the output"
120
124
  }).example(
121
125
  "openfn foo/job.js",
122
126
  "Reads foo/job.js, looks for state and output in foo"
@@ -141,7 +145,7 @@ var applyExecuteOptions = (yargs2) => yargs2.positional("path", {
141
145
  description: "Print output to stdout (instead of a file)"
142
146
  }).option("adaptors", {
143
147
  alias: ["a", "adaptor"],
144
- description: "A language adaptor to use for the job. Short-form names are allowed. Can include an explicit path to a local adaptor build.",
148
+ description: "A language adaptor to use for the job. Short-form names are allowed. Can include an explicit path to a local adaptor build",
145
149
  array: true
146
150
  }).option("no-expand", {
147
151
  description: "Don attempt to auto-expand adaptor shorthand names",
@@ -209,6 +213,9 @@ var cmd = yargs(hideBin(process.argv)).command(command_default).command(command_
209
213
  alias: ["l"],
210
214
  description: "Set the default log level to none, default, info or debug",
211
215
  array: true
216
+ }).option("log-json", {
217
+ description: "Output all logs as JSON objects",
218
+ boolean: true
212
219
  }).example("openfn execute help", "Show documentation for the execute command").example(
213
220
  "openfn docs @openfn/language-common each",
214
221
  "Get more help on the common.each command"
@@ -12,8 +12,11 @@ var namespaces = {
12
12
  [JOB]: "JOB"
13
13
  };
14
14
  var createLogger = (name = "", options) => {
15
- const logOptions = options.log || {};
15
+ const logOptions = options.log || { json: true };
16
16
  let level = logOptions[name] || logOptions.default || "default";
17
+ if (options.logJson) {
18
+ logOptions.json = true;
19
+ }
17
20
  return actualCreateLogger(namespaces[name] || name, {
18
21
  level,
19
22
  ...logOptions
@@ -78,23 +81,28 @@ function ensureOpts(basePath = ".", opts) {
78
81
  adaptors: opts.adaptors || [],
79
82
  autoinstall: opts.autoinstall,
80
83
  command: opts.command,
84
+ expand: opts.expand !== false,
81
85
  force: opts.force || false,
82
- repoDir: opts.repoDir || process.env.OPENFN_REPO_DIR || DEFAULT_REPO_DIR,
86
+ immutable: opts.immutable || false,
87
+ logJson: typeof opts.logJson == "boolean" ? opts.logJson : Boolean(process.env.OPENFN_LOG_JSON),
83
88
  noCompile: Boolean(opts.noCompile),
84
- expand: opts.expand !== false,
85
- outputStdout: Boolean(opts.outputStdout),
86
89
  operation: opts.operation,
90
+ outputStdout: Boolean(opts.outputStdout),
87
91
  packages: opts.packages,
88
- stateStdin: opts.stateStdin,
89
- timeout: opts.timeout,
92
+ repoDir: opts.repoDir || process.env.OPENFN_REPO_DIR || DEFAULT_REPO_DIR,
93
+ skipAdaptorValidation: opts.skipAdaptorValidation ?? false,
90
94
  specifier: opts.specifier,
95
+ stateStdin: opts.stateStdin,
91
96
  strictOutput: opts.strictOutput ?? true,
92
- skipAdaptorValidation: opts.skipAdaptorValidation ?? false,
93
- immutable: opts.immutable || false
97
+ statePath: opts.statePath,
98
+ timeout: opts.timeout
94
99
  };
95
100
  const set = (key, value) => {
96
101
  newOpts[key] = opts.hasOwnProperty(key) ? opts[key] : value;
97
102
  };
103
+ if (opts.useAdaptorsMonorepo) {
104
+ newOpts.monorepoPath = process.env.OPENFN_ADAPTORS_REPO || "ERR";
105
+ }
98
106
  let baseDir = basePath;
99
107
  if (basePath.endsWith(".js")) {
100
108
  baseDir = path.dirname(basePath);
@@ -102,7 +110,6 @@ function ensureOpts(basePath = ".", opts) {
102
110
  } else {
103
111
  set("jobPath", `${baseDir}/job.js`);
104
112
  }
105
- set("statePath", `${baseDir}/state.json`);
106
113
  if (!opts.outputStdout) {
107
114
  set(
108
115
  "outputPath",
@@ -116,31 +123,34 @@ function ensureOpts(basePath = ".", opts) {
116
123
  // src/execute/load-state.ts
117
124
  import fs from "node:fs/promises";
118
125
  var load_state_default = async (opts, log) => {
126
+ const { stateStdin, statePath } = opts;
119
127
  log.debug("Load state...");
120
- if (opts.stateStdin) {
128
+ if (stateStdin) {
121
129
  try {
122
- const json = JSON.parse(opts.stateStdin);
130
+ const json = JSON.parse(stateStdin);
123
131
  log.success("Read state from stdin");
124
132
  log.debug("state:", json);
125
133
  return json;
126
134
  } catch (e) {
127
135
  log.error("Failed to load state from stdin");
128
- log.error(opts.stateStdin);
136
+ log.error(stateStdin);
129
137
  log.error(e);
130
138
  process.exit(1);
131
139
  }
132
140
  }
133
- try {
134
- const str = await fs.readFile(opts.statePath, "utf8");
135
- const json = JSON.parse(str);
136
- log.success(`Loaded state from ${opts.statePath}`);
137
- log.debug("state:", json);
138
- return json;
139
- } catch (e) {
140
- log.warn(`Error loading state from ${opts.statePath}`);
141
- log.warn(e);
141
+ if (statePath) {
142
+ try {
143
+ const str = await fs.readFile(statePath, "utf8");
144
+ const json = JSON.parse(str);
145
+ log.success(`Loaded state from ${statePath}`);
146
+ log.debug("state:", json);
147
+ return json;
148
+ } catch (e) {
149
+ log.warn(`Error loading state from ${statePath}`);
150
+ log.warn(e);
151
+ }
142
152
  }
143
- log.warn("Using default state { data: {}, configuration: {}");
153
+ log.info("No state provided - using default state { data: {}, configuration: {}");
144
154
  return {
145
155
  data: {},
146
156
  configuration: {}
@@ -164,11 +174,11 @@ var execute_default = (code, state, opts) => {
164
174
  function parseAdaptors(opts) {
165
175
  const adaptors = {};
166
176
  opts.adaptors.reduce((obj, exp) => {
167
- const [module, path3] = exp.split("=");
177
+ const [module, path5] = exp.split("=");
168
178
  const { name, version } = getNameAndVersion(module);
169
179
  const info = {};
170
- if (path3) {
171
- info.path = path3;
180
+ if (path5) {
181
+ info.path = path5;
172
182
  }
173
183
  if (version) {
174
184
  info.version = version;
@@ -193,7 +203,7 @@ var compile_default = async (opts, log) => {
193
203
  } else {
194
204
  const complilerOptions = await loadTransformOptions(opts, log);
195
205
  complilerOptions.logger = logger_default(COMPILER, opts);
196
- job = compile(opts.jobPath, complilerOptions);
206
+ job = compile(opts.jobSource || opts.jobPath, complilerOptions);
197
207
  if (opts.jobPath) {
198
208
  log.success(`Compiled job from ${opts.jobPath}`);
199
209
  } else {
@@ -210,10 +220,10 @@ var stripVersionSpecifier = (specifier) => {
210
220
  return specifier;
211
221
  };
212
222
  var resolveSpecifierPath = async (pattern, repoDir, log) => {
213
- const [specifier, path3] = pattern.split("=");
214
- if (path3) {
215
- log.debug(`Resolved ${specifier} to path: ${path3}`);
216
- return path3;
223
+ const [specifier, path5] = pattern.split("=");
224
+ if (path5) {
225
+ log.debug(`Resolved ${specifier} to path: ${path5}`);
226
+ return path5;
217
227
  }
218
228
  const repoPath = await getModulePath(specifier, repoDir, log);
219
229
  if (repoPath) {
@@ -230,15 +240,15 @@ var loadTransformOptions = async (opts, log) => {
230
240
  const [pattern] = opts.adaptors;
231
241
  const [specifier] = pattern.split("=");
232
242
  log.debug(`Attempting to preload typedefs for ${specifier}`);
233
- const path3 = await resolveSpecifierPath(pattern, opts.repoDir, log);
234
- if (path3) {
243
+ const path5 = await resolveSpecifierPath(pattern, opts.repoDir, log);
244
+ if (path5) {
235
245
  try {
236
- exports = await preloadAdaptorExports(path3);
246
+ exports = await preloadAdaptorExports(path5);
237
247
  if (exports) {
238
248
  log.info(`Loaded typedefs for ${specifier}`);
239
249
  }
240
250
  } catch (e) {
241
- log.error(`Failed to load adaptor typedefs from path ${path3}`);
251
+ log.error(`Failed to load adaptor typedefs from path ${path5}`);
242
252
  log.error(e);
243
253
  }
244
254
  }
@@ -390,7 +400,7 @@ var validate_adaptors_default = validateAdaptors;
390
400
 
391
401
  // src/execute/handler.ts
392
402
  var getAutoinstallTargets = (options) => {
393
- if (options.autoinstall && options.adaptors) {
403
+ if (options.adaptors) {
394
404
  return options.adaptors?.filter((a) => !/=/.test(a));
395
405
  }
396
406
  return [];
@@ -398,11 +408,17 @@ var getAutoinstallTargets = (options) => {
398
408
  var executeHandler = async (options, logger) => {
399
409
  const start = new Date().getTime();
400
410
  await validate_adaptors_default(options, logger);
401
- const autoInstallTargets = getAutoinstallTargets(options);
402
- if (autoInstallTargets.length) {
403
- const { repoDir } = options;
404
- logger.info("Auto-installing language adaptors");
405
- await install({ packages: autoInstallTargets, repoDir }, logger);
411
+ const { repoDir, monorepoPath, autoinstall } = options;
412
+ if (autoinstall) {
413
+ if (monorepoPath) {
414
+ logger.warn("Skipping auto-install as monorepo is being used");
415
+ } else {
416
+ const autoInstallTargets = getAutoinstallTargets(options);
417
+ if (autoInstallTargets.length) {
418
+ logger.info("Auto-installing language adaptors");
419
+ await install({ packages: autoInstallTargets, repoDir }, logger);
420
+ }
421
+ }
406
422
  }
407
423
  const state = await load_state_default(options, logger);
408
424
  const code = await compile_default(options, logger);
@@ -435,20 +451,18 @@ var compileHandler = async (options, logger) => {
435
451
  var handler_default2 = compileHandler;
436
452
 
437
453
  // src/test/handler.ts
454
+ var sillyMessage = "Calculating the answer to life, the universe, and everything...";
438
455
  var testHandler = async (options, logger) => {
439
456
  logger.log("Running test job...");
440
- options.jobPath = `const fn = () => state => state * 2; fn()`;
457
+ options.jobSource = `const fn = () => state => { console.log('${sillyMessage}'); return state * 2; } ; fn()`;
458
+ delete options.jobPath;
441
459
  if (!options.stateStdin) {
442
- logger.warn("No state detected: pass -S <number> to provide some state");
460
+ logger.debug("No state provided: try -S <number> to provide some state");
443
461
  options.stateStdin = "21";
444
462
  }
445
463
  const silentLogger = createNullLogger();
446
464
  const state = await load_state_default(options, silentLogger);
447
465
  const code = await compile_default(options, logger);
448
- logger.break();
449
- logger.info("Compiled job:", "\n", code);
450
- logger.break();
451
- logger.info("Running job...");
452
466
  const result = await execute_default(code, state, options);
453
467
  logger.success(`Result: ${result}`);
454
468
  return result;
@@ -466,20 +480,20 @@ var RETRY_COUNT = 20;
466
480
  var TIMEOUT_MS = 1e3 * 60;
467
481
  var actualDocGen = (specifier) => describePackage(specifier, {});
468
482
  var ensurePath = (filePath) => mkdirSync(path2.dirname(filePath), { recursive: true });
469
- var generatePlaceholder = (path3) => {
470
- writeFileSync(path3, `{ "loading": true, "timestamp": ${Date.now()}}`);
483
+ var generatePlaceholder = (path5) => {
484
+ writeFileSync(path5, `{ "loading": true, "timestamp": ${Date.now()}}`);
471
485
  };
472
486
  var finish = (logger, resultPath) => {
473
487
  logger.success("Done! Docs can be found at:\n");
474
488
  logger.print(` ${path2.resolve(resultPath)}`);
475
489
  };
476
- var generateDocs = async (specifier, path3, docgen, logger) => {
490
+ var generateDocs = async (specifier, path5, docgen, logger) => {
477
491
  const result = await docgen(specifier);
478
- await writeFile3(path3, JSON.stringify(result, null, 2));
479
- finish(logger, path3);
480
- return path3;
492
+ await writeFile3(path5, JSON.stringify(result, null, 2));
493
+ finish(logger, path5);
494
+ return path5;
481
495
  };
482
- var waitForDocs = async (docs, path3, logger, retryDuration = RETRY_DURATION) => {
496
+ var waitForDocs = async (docs, path5, logger, retryDuration = RETRY_DURATION) => {
483
497
  try {
484
498
  if (docs.hasOwnProperty("loading")) {
485
499
  logger.info("Docs are being loaded by another process. Waiting.");
@@ -491,19 +505,19 @@ var waitForDocs = async (docs, path3, logger, retryDuration = RETRY_DURATION) =>
491
505
  clearInterval(i);
492
506
  reject(new Error("Timed out waiting for docs to load"));
493
507
  }
494
- const updated = JSON.parse(readFileSync(path3, "utf8"));
508
+ const updated = JSON.parse(readFileSync(path5, "utf8"));
495
509
  if (!updated.hasOwnProperty("loading")) {
496
510
  logger.info("Docs found!");
497
511
  clearInterval(i);
498
- resolve(path3);
512
+ resolve(path5);
499
513
  }
500
514
  count++;
501
515
  }, retryDuration);
502
516
  });
503
517
  } else {
504
- logger.info(`Docs already written to cache at ${path3}`);
505
- finish(logger, path3);
506
- return path3;
518
+ logger.info(`Docs already written to cache at ${path5}`);
519
+ finish(logger, path5);
520
+ return path5;
507
521
  }
508
522
  } catch (e) {
509
523
  logger.error("Existing doc JSON corrupt. Aborting");
@@ -520,28 +534,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
520
534
  process.exit(9);
521
535
  }
522
536
  logger.success(`Generating docs for ${specifier}`);
523
- const path3 = `${repoDir}/docs/${specifier}.json`;
524
- ensurePath(path3);
537
+ const path5 = `${repoDir}/docs/${specifier}.json`;
538
+ ensurePath(path5);
525
539
  const handleError = () => {
526
540
  logger.info("Removing placeholder");
527
- rmSync(path3);
541
+ rmSync(path5);
528
542
  };
529
543
  try {
530
- const existing = readFileSync(path3, "utf8");
544
+ const existing = readFileSync(path5, "utf8");
531
545
  const json = JSON.parse(existing);
532
546
  if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
533
547
  logger.info(`Expired placeholder found. Removing.`);
534
- rmSync(path3);
548
+ rmSync(path5);
535
549
  throw new Error("TIMEOUT");
536
550
  }
537
- return waitForDocs(json, path3, logger, retryDuration);
551
+ return waitForDocs(json, path5, logger, retryDuration);
538
552
  } catch (e) {
539
553
  if (e.message !== "TIMEOUT") {
540
- logger.info(`Docs JSON not found at ${path3}`);
554
+ logger.info(`Docs JSON not found at ${path5}`);
541
555
  }
542
556
  logger.debug("Generating placeholder");
543
- generatePlaceholder(path3);
544
- return generateDocs(specifier, path3, docgen, logger).catch((e2) => {
557
+ generatePlaceholder(path5);
558
+ return generateDocs(specifier, path5, docgen, logger).catch((e2) => {
545
559
  logger.error("Error generating documentation");
546
560
  logger.error(e2);
547
561
  handleError();
@@ -589,15 +603,15 @@ var docsHandler = async (options, logger) => {
589
603
  logger.success(`Showing docs for ${adaptorName} v${version}`);
590
604
  }
591
605
  logger.info("Generating/loading documentation...");
592
- const path3 = await handler_default4(
606
+ const path5 = await handler_default4(
593
607
  {
594
608
  specifier: `${name}@${version}`,
595
609
  repoDir
596
610
  },
597
611
  createNullLogger()
598
612
  );
599
- if (path3) {
600
- const source = await readFile(path3, "utf8");
613
+ if (path5) {
614
+ const source = await readFile(path5, "utf8");
601
615
  const data = JSON.parse(source);
602
616
  let desc;
603
617
  if (operation) {
@@ -618,31 +632,119 @@ var docsHandler = async (options, logger) => {
618
632
  };
619
633
  var handler_default5 = docsHandler;
620
634
 
635
+ // src/util/use-adaptors-repo.ts
636
+ import { readFile as readFile2 } from "node:fs/promises";
637
+ import path3 from "node:path";
638
+ import assert from "node:assert";
639
+ import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
640
+ var validateMonoRepo = async (repoPath, log) => {
641
+ try {
642
+ const raw = await readFile2(`${repoPath}/package.json`, "utf8");
643
+ const pkg = JSON.parse(raw);
644
+ assert(pkg.name === "adaptors");
645
+ } catch (e) {
646
+ log.error(`ERROR: Adaptors Monorepo not found at ${repoPath}`);
647
+ process.exit(9);
648
+ }
649
+ };
650
+ var updatePath = (adaptor, repoPath, log) => {
651
+ if (adaptor.match("=")) {
652
+ return adaptor;
653
+ }
654
+ const { name, version } = getNameAndVersion4(adaptor);
655
+ if (version) {
656
+ log.warn(
657
+ `Warning: Ignoring version specifier on ${adaptor} as loading from the adaptors monorepo`
658
+ );
659
+ }
660
+ const shortName = name.replace("@openfn/language-", "");
661
+ const abspath = path3.resolve(repoPath, "packages", shortName);
662
+ return `${name}=${abspath}`;
663
+ };
664
+ var useAdaptorsRepo = async (adaptors, repoPath, log) => {
665
+ await validateMonoRepo(repoPath, log);
666
+ log.success(`Loading adaptors from monorepo at ${repoPath}`);
667
+ const updatedAdaptors = adaptors.map((a) => {
668
+ const p = updatePath(a, repoPath, log);
669
+ log.info(`Mapped adaptor ${a} to monorepo: ${p.split("=")[1]}`);
670
+ return p;
671
+ });
672
+ return updatedAdaptors;
673
+ };
674
+ var use_adaptors_repo_default = useAdaptorsRepo;
675
+
621
676
  // src/util/print-versions.ts
677
+ import { readFileSync as readFileSync2 } from "node:fs";
678
+ import path4 from "node:path";
679
+ import { getNameAndVersion as getNameAndVersion5 } from "@openfn/runtime";
622
680
  import { mainSymbols } from "figures";
623
- import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
681
+ var NODE = "node.js";
682
+ var CLI2 = "cli";
683
+ var RUNTIME2 = "runtime";
684
+ var COMPILER2 = "compiler";
624
685
  var { triangleRightSmall: t } = mainSymbols;
686
+ var loadVersionFromPath = (adaptorPath) => {
687
+ try {
688
+ const pkg = JSON.parse(readFileSync2(path4.resolve(adaptorPath, "package.json"), "utf8"));
689
+ return pkg.version;
690
+ } catch (e) {
691
+ return "unknown";
692
+ }
693
+ };
625
694
  var printVersions = async (logger, options = {}) => {
626
- const prefix = (str) => ` ${t} ${str.padEnd(options.adaptors ? 16 : 8, " ")}`;
695
+ const { adaptors, logJson } = options;
696
+ let adaptor = "";
697
+ if (adaptors && adaptors.length) {
698
+ adaptor = adaptors[0];
699
+ }
700
+ const longest = Math.max(...[
701
+ NODE,
702
+ CLI2,
703
+ RUNTIME2,
704
+ COMPILER2,
705
+ adaptor
706
+ ].map((s) => s.length));
707
+ const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
627
708
  const pkg = await import("../../package.json", { assert: { type: "json" } });
628
709
  const { version, dependencies } = pkg.default;
629
710
  const compilerVersion = dependencies["@openfn/compiler"];
630
711
  const runtimeVersion = dependencies["@openfn/runtime"];
631
- const { adaptors } = options;
632
- let adaptorVersionString = "";
633
- if (adaptors && adaptors.length === 1) {
634
- const [a] = adaptors;
635
- const { name, version: version2 } = getNameAndVersion4(a);
636
- adaptorVersionString = `
637
- ${prefix(
638
- "adaptor " + name.replace(/^@openfn\/language-/, "")
639
- )}${version2 || "latest"}`;
640
- }
641
- logger.info(`Versions:
642
- ${prefix("node.js")}${process.version.substring(1)}
643
- ${prefix("cli")}${version}
644
- ${prefix("runtime")}${runtimeVersion}
645
- ${prefix("compiler")}${compilerVersion}${adaptorVersionString}`);
712
+ let adaptorVersion;
713
+ let adaptorName;
714
+ if (adaptor) {
715
+ const { name, version: version2 } = getNameAndVersion5(adaptor);
716
+ if (name.match("=")) {
717
+ const [namePart, pathPart] = name.split("=");
718
+ adaptorVersion = loadVersionFromPath(pathPart);
719
+ adaptorName = namePart;
720
+ } else {
721
+ adaptorName = name;
722
+ adaptorVersion = version2 || "latest";
723
+ }
724
+ }
725
+ let output;
726
+ if (logJson) {
727
+ output = {
728
+ versions: {
729
+ "node.js": process.version.substring(1),
730
+ cli: version,
731
+ runtime: runtimeVersion,
732
+ compiler: compilerVersion
733
+ }
734
+ };
735
+ if (adaptorName) {
736
+ output.versions[adaptorName] = adaptorVersion;
737
+ }
738
+ } else {
739
+ const adaptorVersionString = adaptorName ? `
740
+ ${prefix(adaptorName)}${adaptorVersion}` : "";
741
+ output = `Versions:
742
+ ${prefix(NODE)}${process.version.substring(1)}
743
+ ${prefix(CLI2)}${version}
744
+ ${prefix(RUNTIME2)}${runtimeVersion}
745
+ ${prefix(COMPILER2)}${compilerVersion}${adaptorVersionString}`;
746
+ }
747
+ logger.info(output);
646
748
  };
647
749
  var print_versions_default = printVersions;
648
750
 
@@ -657,7 +759,7 @@ var handlers = {
657
759
  ["repo-install"]: install,
658
760
  ["repo-pwd"]: pwd,
659
761
  ["repo-list"]: list,
660
- version: async (_opts, logger) => print_versions_default(logger)
762
+ version: async (opts, logger) => print_versions_default(logger, opts)
661
763
  };
662
764
  var parse = async (basePath, options, log) => {
663
765
  const opts = ensureOpts(basePath, options);
@@ -665,7 +767,20 @@ var parse = async (basePath, options, log) => {
665
767
  if (opts.command === "execute" || opts.command === "test") {
666
768
  await print_versions_default(logger, opts);
667
769
  }
668
- if (opts.adaptors && opts.expand) {
770
+ if (opts.monorepoPath) {
771
+ if (opts.monorepoPath === "ERR") {
772
+ logger.error(
773
+ "ERROR: --use-adaptors-monorepo was passed, but OPENFN_ADAPTORS_REPO env var is undefined"
774
+ );
775
+ logger.error("Set OPENFN_ADAPTORS_REPO to a path pointing to the repo");
776
+ process.exit(9);
777
+ }
778
+ opts.adaptors = await use_adaptors_repo_default(
779
+ opts.adaptors,
780
+ opts.monorepoPath,
781
+ logger
782
+ );
783
+ } else if (opts.adaptors && opts.expand) {
669
784
  opts.adaptors = expand_adaptors_default(opts.adaptors, logger);
670
785
  }
671
786
  if (/^(test|version)$/.test(opts.command) && !opts.repoDir) {
@@ -681,7 +796,7 @@ var parse = async (basePath, options, log) => {
681
796
  assertPath(basePath);
682
797
  }
683
798
  if (!handler) {
684
- logger.error(`Unrecognise command: ${options.command}`);
799
+ logger.error(`Unrecognised command: ${options.command}`);
685
800
  process.exit(1);
686
801
  }
687
802
  try {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openfn/cli",
3
- "version": "0.0.27",
3
+ "version": "0.0.29",
4
4
  "description": "CLI devtools for the openfn toolchain.",
5
5
  "engines": {
6
6
  "node": ">=18",
@@ -41,10 +41,10 @@
41
41
  "rimraf": "^3.0.2",
42
42
  "treeify": "^1.1.0",
43
43
  "yargs": "^17.5.1",
44
- "@openfn/compiler": "0.0.23",
44
+ "@openfn/compiler": "0.0.25",
45
45
  "@openfn/describe-package": "0.0.14",
46
- "@openfn/logger": "0.0.8",
47
- "@openfn/runtime": "0.0.17"
46
+ "@openfn/runtime": "0.0.19",
47
+ "@openfn/logger": "0.0.10"
48
48
  },
49
49
  "files": [
50
50
  "dist",