@openfn/cli 0.0.38 → 0.0.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -58,7 +58,6 @@ If you're coming to the CLI from the old openfn devtools, here are a couple of k
58
58
 
59
59
  * The CLI has a shorter, sleeker syntax, so your command should be much shorter
60
60
  * The CLI will automatically install adaptors for you (with full version control)
61
- * By default, the CLI will only write state.data to output. This is to encourage better state management. Pass `--no-strict-output` to save the entire state object.
62
61
 
63
62
  ## Basic Usage
64
63
 
@@ -76,8 +75,6 @@ Pass the `-i` flag to auto-install any required adaptors (it's safe to do this r
76
75
 
77
76
  When the finished, the CLI will write the resulting state to disk. By default the CLI will create an `output.json` next to the job file. You can pass a path to output by passing `-o path/to/output.json` and state by adding `-s path/to/state.json`. You can use `-S` and `-O` to pass state through stdin and return the output through stdout.
78
77
 
79
- Note that the CLI will only include the `state.data` key in the output. To write the entire state object (not just `data`), pass `--no-strict-output`.
80
-
81
78
  The CLI maintains a repo for auto-installed adaptors. Run `openfn repo list` to see where the repo is, and what's in it. Set the `OPENFN_REPO_DIR` env var to specify the repo folder. When autoinstalling, the CLI will check to see if a matching version is found in the repo. `openfn repo clean` will remove all adaptors from the repo. The repo also includes any documentation and metadata built with the CLI.
82
79
 
83
80
  You can specify adaptors with a shorthand (`http`) or use the full package name (`@openfn/language-http`). You can add a specific version like `http@2.0.0`. You can pass a path to a locally installed adaptor like `http=/repo/openfn/adaptors/my-http-build`.
@@ -0,0 +1,33 @@
1
+ // src/util/expand-adaptors.ts
2
+ var expand = (name) => {
3
+ if (typeof name === "string") {
4
+ const [left] = name.split("=");
5
+ if (left.match("/") || left.endsWith(".js")) {
6
+ return name;
7
+ }
8
+ return `@openfn/language-${name}`;
9
+ }
10
+ return name;
11
+ };
12
+ var expand_adaptors_default = (opts) => {
13
+ const { adaptors, workflow } = opts;
14
+ if (adaptors) {
15
+ opts.adaptors = adaptors?.map(expand);
16
+ }
17
+ if (workflow) {
18
+ Object.values(workflow.jobs).forEach((job) => {
19
+ if (job.adaptor) {
20
+ job.adaptor = expand(job.adaptor);
21
+ }
22
+ });
23
+ }
24
+ return opts;
25
+ };
26
+
27
+ // src/constants.ts
28
+ var DEFAULT_REPO_DIR = "/tmp/openfn/repo";
29
+
30
+ export {
31
+ expand_adaptors_default,
32
+ DEFAULT_REPO_DIR
33
+ };
package/dist/index.js CHANGED
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  DEFAULT_REPO_DIR,
4
4
  expand_adaptors_default
5
- } from "./chunk-DOKL2XM5.js";
5
+ } from "./chunk-UBDWXKSG.js";
6
6
 
7
7
  // src/process/spawn.ts
8
8
  import path from "node:path";
@@ -34,77 +34,6 @@ function spawn_default(basePath, opts2) {
34
34
  import yargs from "yargs";
35
35
  import { hideBin } from "yargs/helpers";
36
36
 
37
- // src/repo/command.ts
38
- var repo = {
39
- command: "repo [subcommand]",
40
- desc: "Run commands on the module repo (install|clean)",
41
- builder: (yargs2) => yargs2.command(clean).command(install).command(pwd).command(list).example("repo install -a http", "Install @openfn/language-http").example("repo clean", "Remove everything from the repo working dir").example("repo pwd", "Print the current repo working dir")
42
- };
43
- var install = {
44
- command: "install [packages...]",
45
- desc: "install one or more packages to the runtime repo",
46
- handler: (argv) => {
47
- argv.command = "repo-install";
48
- },
49
- builder: (yargs2) => {
50
- return yargs2.option("adaptor", {
51
- alias: ["a"],
52
- description: "Install an adaptor by passing a shortened version of the name",
53
- boolean: true
54
- }).example("install axios", "Install the axios npm package to the repo").example(
55
- "install -a http",
56
- "Install @openfn/language-http adaptor to the repo"
57
- ).example(
58
- "install @openfn/language-http",
59
- "Install the language-http adaptor to the repo"
60
- );
61
- }
62
- };
63
- var clean = {
64
- command: "clean",
65
- desc: "Removes all modules from the runtime module repo",
66
- handler: (argv) => {
67
- argv.command = "repo-clean";
68
- },
69
- builder: (yargs2) => yargs2.option("force", {
70
- alias: ["f"],
71
- description: "Skip the prompt and force deletion",
72
- boolean: true
73
- })
74
- };
75
- var pwd = {
76
- command: "pwd",
77
- desc: "Print repo's current working directory",
78
- handler: (argv) => {
79
- argv.command = "repo-pwd";
80
- }
81
- };
82
- var list = {
83
- command: "list",
84
- desc: "Show a report on what is installed in the repo",
85
- handler: (argv) => {
86
- argv.command = "repo-list";
87
- }
88
- };
89
-
90
- // src/util/command-builders.ts
91
- var build = (opts2, yargs2) => opts2.reduce((_y, o) => yargs2.option(o.name, o.yargs), yargs2);
92
- var ensure = (command, opts2) => (yargs2) => {
93
- yargs2.command = command;
94
- opts2.filter((opt) => opt.ensure).forEach((opt) => {
95
- opt.ensure(yargs2);
96
- });
97
- };
98
- var override = (command, yargs2) => {
99
- return {
100
- ...command,
101
- yargs: {
102
- ...command.yargs || {},
103
- ...yargs2
104
- }
105
- };
106
- };
107
-
108
37
  // src/options.ts
109
38
  import path2 from "node:path";
110
39
  var setDefaultValue = (opts2, key, value) => {
@@ -259,10 +188,10 @@ var outputPath = {
259
188
  };
260
189
  var repoDir = {
261
190
  name: "repo-dir",
262
- yargs: {
191
+ yargs: () => ({
263
192
  description: "Provide a path to the repo root dir",
264
193
  default: process.env.OPENFN_REPO_DIR || DEFAULT_REPO_DIR
265
- }
194
+ })
266
195
  };
267
196
  var start = {
268
197
  name: "start",
@@ -274,11 +203,28 @@ var start = {
274
203
  var strictOutput = {
275
204
  name: "no-strict-output",
276
205
  yargs: {
206
+ deprecated: true,
207
+ hidden: true,
208
+ boolean: true
209
+ },
210
+ ensure: (opts2) => {
211
+ if (!opts2.hasOwnProperty("strict")) {
212
+ opts2.strict = opts2.strictOutput;
213
+ }
214
+ delete opts2.strictOutput;
215
+ }
216
+ };
217
+ var strict = {
218
+ name: "strict",
219
+ yargs: {
220
+ default: false,
277
221
  boolean: true,
278
- description: "Allow properties other than data to be returned in the output"
222
+ description: "Enables strict state handling, meaning only state.data is returned from a job."
279
223
  },
280
224
  ensure: (opts2) => {
281
- setDefaultValue(opts2, "strictOutput", true);
225
+ if (!opts2.hasOwnProperty("strictOutput")) {
226
+ setDefaultValue(opts2, "strict", false);
227
+ }
282
228
  }
283
229
  };
284
230
  var skipAdaptorValidation = {
@@ -326,6 +272,85 @@ var useAdaptorsMonorepo = {
326
272
  }
327
273
  };
328
274
 
275
+ // src/util/command-builders.ts
276
+ var expandYargs = (y2) => {
277
+ if (typeof y2 === "function") {
278
+ return y2();
279
+ }
280
+ return y2;
281
+ };
282
+ var build = (opts2, yargs2) => opts2.reduce((_y, o) => yargs2.option(o.name, expandYargs(o.yargs)), yargs2);
283
+ var ensure = (command, opts2) => (yargs2) => {
284
+ yargs2.command = command;
285
+ opts2.filter((opt) => opt.ensure).forEach((opt) => {
286
+ opt.ensure(yargs2);
287
+ });
288
+ };
289
+ var override = (command, yargs2) => {
290
+ return {
291
+ ...command,
292
+ yargs: {
293
+ ...command.yargs || {},
294
+ ...yargs2
295
+ }
296
+ };
297
+ };
298
+
299
+ // src/repo/command.ts
300
+ var repo = {
301
+ command: "repo [subcommand]",
302
+ desc: "Run commands on the module repo (install|clean)",
303
+ builder: (yargs2) => yargs2.command(clean).command(install).command(list).example("repo install -a http", "Install @openfn/language-http").example("repo clean", "Remove everything from the repo working dir")
304
+ };
305
+ var installOptions = [
306
+ repoDir,
307
+ override(expandAdaptors, {
308
+ default: true,
309
+ hidden: true
310
+ }),
311
+ override(adaptors, {
312
+ description: "Specify which language-adaptor to install (allows short-form names to be used, eg, http)"
313
+ })
314
+ ];
315
+ var install = {
316
+ command: "install [packages...]",
317
+ desc: "install one or more packages to the runtime repo. Use -a to pass shorthand adaptor names.",
318
+ handler: ensure("repo-install", installOptions),
319
+ builder: (yargs2) => build(installOptions, yargs2).example("install axios", "Install the axios npm package to the repo").example(
320
+ "install -a http",
321
+ "Install @openfn/language-http adaptor to the repo"
322
+ ).example(
323
+ "install @openfn/language-http",
324
+ "Install the language-http adaptor to the repo"
325
+ )
326
+ };
327
+ var clean = {
328
+ command: "clean",
329
+ desc: "Removes all modules from the runtime module repo",
330
+ handler: ensure("repo-clean", [repoDir]),
331
+ builder: (yargs2) => build(
332
+ [
333
+ repoDir,
334
+ {
335
+ name: "force",
336
+ yargs: {
337
+ alias: ["f"],
338
+ description: "Skip the prompt and force deletion",
339
+ boolean: true
340
+ }
341
+ }
342
+ ],
343
+ yargs2
344
+ )
345
+ };
346
+ var list = {
347
+ command: "list",
348
+ desc: "Show a report on what is installed in the repo",
349
+ aliases: ["$0"],
350
+ handler: ensure("repo-list", [repoDir]),
351
+ builder: (yargs2) => build([repoDir], yargs2)
352
+ };
353
+
329
354
  // src/execute/command.ts
330
355
  var options = [
331
356
  expandAdaptors,
@@ -343,6 +368,7 @@ var options = [
343
368
  start,
344
369
  statePath,
345
370
  stateStdin,
371
+ strict,
346
372
  strictOutput,
347
373
  timeout,
348
374
  useAdaptorsMonorepo
@@ -353,7 +379,7 @@ var executeCommand = {
353
379
 
354
380
  Execute will run a job/workflow at the path and write the output state to disk (to ./state.json unless otherwise specified)
355
381
 
356
- By default only state.data will be written to the output. Include --no-strict-output to write the entire state object.
382
+ By default only state.data will be returned fron a job. Include --no-strict to write the entire state object.
357
383
 
358
384
  Remember to include the adaptor name with -a. Auto install adaptors with the -i flag.`,
359
385
  aliases: ["$0"],
@@ -365,8 +391,8 @@ Remember to include the adaptor name with -a. Auto install adaptors with the -i
365
391
  "openfn foo/job.js",
366
392
  "Execute foo/job.js with no adaptor and write the final state to foo/job.json"
367
393
  ).example(
368
- "openfn workflow.json -ia common",
369
- "Execute workflow.json using @openfn/language-commom (with autoinstall enabled)"
394
+ "openfn workflow.json -i",
395
+ "Execute workflow.json with autoinstall enabled"
370
396
  ).example(
371
397
  "openfn job.js -a common --log info",
372
398
  "Execute job.js with common adaptor and info-level logging"
@@ -1,41 +1,90 @@
1
1
  import {
2
- CLI,
3
- COMPILER,
4
- JOB,
5
- RUNTIME,
6
- createNullLogger,
7
- defaultLogger,
8
- ensureLogOpts,
9
- ensureOpts,
10
- expand_adaptors_default,
11
- logger_default,
12
- printDuration
13
- } from "../chunk-DOKL2XM5.js";
2
+ DEFAULT_REPO_DIR,
3
+ expand_adaptors_default
4
+ } from "../chunk-UBDWXKSG.js";
14
5
 
15
6
  // src/execute/execute.ts
16
7
  import run, { getNameAndVersion } from "@openfn/runtime";
17
- var execute_default = (input, state, opts) => {
18
- return run(input, state, {
19
- start: opts.start,
20
- timeout: opts.timeout,
21
- immutableState: opts.immutable,
22
- logger: logger_default(RUNTIME, opts),
23
- jobLogger: logger_default(JOB, opts),
24
- linker: {
25
- repo: opts.repoDir,
26
- modules: parseAdaptors(opts)
27
- }
8
+
9
+ // src/util/logger.ts
10
+ import actualCreateLogger, { printDuration } from "@openfn/logger";
11
+ import { isValidLogLevel, defaultLogger } from "@openfn/logger";
12
+ var CLI = "cli";
13
+ var COMPILER = "compiler";
14
+ var RUNTIME = "runtime";
15
+ var JOB = "job";
16
+ var namespaces = {
17
+ [CLI]: "CLI",
18
+ [RUNTIME]: "R/T",
19
+ [COMPILER]: "CMP",
20
+ [JOB]: "JOB"
21
+ };
22
+ var createLogger = (name = "", options) => {
23
+ const logOptions = options.log || {};
24
+ let json = false;
25
+ let level = logOptions[name] || logOptions.default || "default";
26
+ if (options.logJson) {
27
+ json = true;
28
+ }
29
+ return actualCreateLogger(namespaces[name] || name, {
30
+ level,
31
+ json,
32
+ ...logOptions
28
33
  });
29
34
  };
35
+ var logger_default = createLogger;
36
+ var createNullLogger = () => createLogger(void 0, { log: { default: "none" } });
37
+
38
+ // src/util/abort.ts
39
+ var AbortError = class extends Error {
40
+ constructor(reason) {
41
+ super(reason);
42
+ this.handled = true;
43
+ }
44
+ };
45
+ var abort_default = (logger, reason, error, help) => {
46
+ const e = new AbortError(reason);
47
+ logger.error(reason);
48
+ if (error) {
49
+ logger.error(error.message);
50
+ }
51
+ if (help) {
52
+ logger.always(help);
53
+ }
54
+ logger.break();
55
+ logger.error("Critical error: aborting command");
56
+ throw e;
57
+ };
58
+
59
+ // src/execute/execute.ts
60
+ var execute_default = async (input, state, opts, logger) => {
61
+ try {
62
+ const result = await run(input, state, {
63
+ strict: opts.strict,
64
+ start: opts.start,
65
+ timeout: opts.timeout,
66
+ immutableState: opts.immutable,
67
+ logger: logger_default(RUNTIME, opts),
68
+ jobLogger: logger_default(JOB, opts),
69
+ linker: {
70
+ repo: opts.repoDir,
71
+ modules: parseAdaptors(opts)
72
+ }
73
+ });
74
+ return result;
75
+ } catch (e) {
76
+ abort_default(logger, "Invalid workflow", e);
77
+ }
78
+ };
30
79
  function parseAdaptors(opts) {
31
80
  const extractInfo = (specifier) => {
32
- const [module, path6] = specifier.split("=");
81
+ const [module, path7] = specifier.split("=");
33
82
  const { name, version } = getNameAndVersion(module);
34
83
  const info = {
35
84
  name
36
85
  };
37
- if (path6) {
38
- info.path = path6;
86
+ if (path7) {
87
+ info.path = path7;
39
88
  }
40
89
  if (version) {
41
90
  info.version = version;
@@ -66,14 +115,14 @@ import { writeFile } from "node:fs/promises";
66
115
  var serializeOutput = async (options, result, logger) => {
67
116
  let output = result;
68
117
  if (output && (output.configuration || output.data)) {
69
- const { data, configuration, ...rest } = result;
70
- if (options.strictOutput !== false) {
71
- output = { data };
118
+ if (options.strict) {
119
+ output = { data: output.data };
120
+ if (result.errors) {
121
+ output.errors = result.errors;
122
+ }
72
123
  } else {
73
- output = {
74
- data,
75
- ...rest
76
- };
124
+ const { configuration, ...rest } = result;
125
+ output = rest;
77
126
  }
78
127
  }
79
128
  if (output === void 0) {
@@ -83,10 +132,11 @@ var serializeOutput = async (options, result, logger) => {
83
132
  }
84
133
  if (options.outputStdout) {
85
134
  logger.success(`Result: `);
86
- logger.success(output);
135
+ logger.always(output);
87
136
  } else if (options.outputPath) {
88
- logger.success(`Writing output to ${options.outputPath}`);
137
+ logger.debug(`Writing output to ${options.outputPath}`);
89
138
  await writeFile(options.outputPath, output);
139
+ logger.success(`State written to ${options.outputPath}`);
90
140
  }
91
141
  return output;
92
142
  };
@@ -115,16 +165,13 @@ import { exec } from "node:child_process";
115
165
  import treeify from "treeify";
116
166
  import { install as rtInstall, loadRepoPkg } from "@openfn/runtime";
117
167
  var install = async (opts, log = defaultLogger) => {
118
- let { packages, adaptor, repoDir } = opts;
119
- if (packages) {
168
+ let { packages, adaptors, repoDir } = opts;
169
+ const targets = [].concat(packages ?? [], adaptors ?? []);
170
+ if (targets) {
120
171
  log.timer("install");
121
172
  log.success("Installing packages...");
122
173
  log.debug("repoDir is set to:", repoDir);
123
- if (adaptor) {
124
- const expanded = expand_adaptors_default({ adaptors: packages });
125
- packages = expanded.adaptors;
126
- }
127
- await rtInstall(packages ?? [], repoDir, log);
174
+ await rtInstall(targets, repoDir, log);
128
175
  const duration = log.timer("install");
129
176
  log.success(`Installation complete in ${duration}`);
130
177
  }
@@ -190,16 +237,28 @@ var compile_default = async (opts, log) => {
190
237
  if (opts.workflow) {
191
238
  job = compileWorkflow(opts.workflow, opts, log);
192
239
  } else {
193
- const compilerOptions = await loadTransformOptions(opts, log);
194
- job = compile(opts.job || opts.jobPath, compilerOptions);
240
+ job = await compileJob(opts.job || opts.jobPath, opts, log);
195
241
  }
196
242
  if (opts.jobPath) {
197
- log.success(`Compiled job from ${opts.jobPath}`);
243
+ log.success(`Compiled from ${opts.jobPath}`);
198
244
  } else {
199
- log.success("Compiled job");
245
+ log.success("Compilation complete");
200
246
  }
201
247
  return job;
202
248
  };
249
+ var compileJob = async (job, opts, log, jobName) => {
250
+ try {
251
+ const compilerOptions = await loadTransformOptions(opts, log);
252
+ return compile(job, compilerOptions);
253
+ } catch (e) {
254
+ abort_default(
255
+ log,
256
+ `Failed to compile job ${jobName ?? ""}`.trim(),
257
+ e,
258
+ "Check the syntax of the job expression:\n\n" + job
259
+ );
260
+ }
261
+ };
203
262
  var compileWorkflow = async (workflow, opts, log) => {
204
263
  for (const job of workflow.jobs) {
205
264
  const jobOpts = {
@@ -208,9 +267,13 @@ var compileWorkflow = async (workflow, opts, log) => {
208
267
  if (job.adaptor) {
209
268
  jobOpts.adaptors = [job.adaptor];
210
269
  }
211
- const compilerOptions = await loadTransformOptions(jobOpts, log);
212
270
  if (job.expression) {
213
- job.expression = compile(job.expression, compilerOptions);
271
+ job.expression = await compileJob(
272
+ job.expression,
273
+ jobOpts,
274
+ log,
275
+ job.id
276
+ );
214
277
  }
215
278
  }
216
279
  return workflow;
@@ -223,10 +286,10 @@ var stripVersionSpecifier = (specifier) => {
223
286
  return specifier;
224
287
  };
225
288
  var resolveSpecifierPath = async (pattern, repoDir, log) => {
226
- const [specifier, path6] = pattern.split("=");
227
- if (path6) {
228
- log.debug(`Resolved ${specifier} to path: ${path6}`);
229
- return path6;
289
+ const [specifier, path7] = pattern.split("=");
290
+ if (path7) {
291
+ log.debug(`Resolved ${specifier} to path: ${path7}`);
292
+ return path7;
230
293
  }
231
294
  const repoPath = await getModulePath(specifier, repoDir, log);
232
295
  if (repoPath) {
@@ -243,16 +306,16 @@ var loadTransformOptions = async (opts, log) => {
243
306
  const [pattern] = opts.adaptors;
244
307
  const [specifier] = pattern.split("=");
245
308
  log.debug(`Attempting to preload types for ${specifier}`);
246
- const path6 = await resolveSpecifierPath(pattern, opts.repoDir, log);
247
- if (path6) {
309
+ const path7 = await resolveSpecifierPath(pattern, opts.repoDir, log);
310
+ if (path7) {
248
311
  try {
249
312
  exports = await preloadAdaptorExports(
250
- path6,
313
+ path7,
251
314
  opts.useAdaptorsMonorepo,
252
315
  log
253
316
  );
254
317
  } catch (e) {
255
- log.error(`Failed to load adaptor typedefs from path ${path6}`);
318
+ log.error(`Failed to load adaptor typedefs from path ${path7}`);
256
319
  log.error(e);
257
320
  }
258
321
  }
@@ -340,7 +403,6 @@ import path from "node:path";
340
403
  import fs2 from "node:fs/promises";
341
404
  import { isPath } from "@openfn/compiler";
342
405
  var load_input_default = async (opts, log) => {
343
- log.debug("Loading input...");
344
406
  const { job, workflow, jobPath, workflowPath } = opts;
345
407
  if (workflow || workflowPath) {
346
408
  return loadWorkflow(opts, log);
@@ -349,36 +411,94 @@ var load_input_default = async (opts, log) => {
349
411
  return job;
350
412
  }
351
413
  if (jobPath) {
352
- log.debug(`Loading job from ${jobPath}`);
353
- opts.job = await fs2.readFile(jobPath, "utf8");
354
- return opts.job;
414
+ try {
415
+ log.debug(`Loading job from ${jobPath}`);
416
+ opts.job = await fs2.readFile(jobPath, "utf8");
417
+ return opts.job;
418
+ } catch (e) {
419
+ abort_default(
420
+ log,
421
+ "Job not found",
422
+ void 0,
423
+ `Failed to load the job from ${jobPath}`
424
+ );
425
+ }
355
426
  }
356
427
  };
357
- var fetchFile = (rootDir, filePath) => {
358
- const jobPath = filePath.startsWith("~") ? filePath : path.resolve(rootDir, filePath);
359
- return fs2.readFile(jobPath, "utf8");
360
- };
361
428
  var loadWorkflow = async (opts, log) => {
362
429
  const { workflowPath, workflow } = opts;
430
+ const readWorkflow = async () => {
431
+ try {
432
+ const text = await fs2.readFile(workflowPath, "utf8");
433
+ return text;
434
+ } catch (e) {
435
+ abort_default(
436
+ log,
437
+ "Workflow not found",
438
+ void 0,
439
+ `Failed to load a workflow from ${workflowPath}`
440
+ );
441
+ }
442
+ };
443
+ const parseWorkflow = (contents) => {
444
+ try {
445
+ return JSON.parse(contents);
446
+ } catch (e) {
447
+ abort_default(
448
+ log,
449
+ "Invalid JSON in workflow",
450
+ e,
451
+ `Check the syntax of the JSON at ${workflowPath}`
452
+ );
453
+ }
454
+ };
455
+ const fetchWorkflowFile = async (jobId, rootDir = "", filePath) => {
456
+ try {
457
+ const fullPath = filePath.startsWith("~") ? filePath : path.resolve(rootDir, filePath);
458
+ const result = await fs2.readFile(fullPath, "utf8");
459
+ return result;
460
+ } catch (e) {
461
+ abort_default(
462
+ log,
463
+ `File not found for job ${jobId}: ${filePath}`,
464
+ void 0,
465
+ `This workflow references a file which cannot be found at ${filePath}
466
+
467
+ Paths inside the workflow are relative to the workflow.json`
468
+ );
469
+ }
470
+ };
363
471
  log.debug(`Loading workflow from ${workflowPath}`);
364
472
  try {
365
473
  let wf;
366
474
  let rootDir = opts.baseDir;
367
475
  if (workflowPath) {
368
- const workflowRaw = await fs2.readFile(workflowPath, "utf8");
369
- wf = JSON.parse(workflowRaw);
476
+ let workflowRaw = await readWorkflow();
477
+ wf = parseWorkflow(workflowRaw);
370
478
  if (!rootDir) {
371
479
  rootDir = path.dirname(workflowPath);
372
480
  }
373
481
  } else {
374
482
  wf = workflow;
375
483
  }
484
+ let idx = 0;
376
485
  for (const job of wf.jobs) {
377
- if (typeof job.expression === "string" && isPath(job.expression)) {
378
- job.expression = await fetchFile(rootDir, job.expression);
486
+ idx += 1;
487
+ const expressionStr = typeof job.expression === "string" && job.expression?.trim();
488
+ const configurationStr = typeof job.configuration === "string" && job.configuration?.trim();
489
+ if (expressionStr && isPath(expressionStr)) {
490
+ job.expression = await fetchWorkflowFile(
491
+ job.id || `${idx}`,
492
+ rootDir,
493
+ expressionStr
494
+ );
379
495
  }
380
- if (typeof job.configuration === "string" && isPath(job.configuration)) {
381
- const configString = await fetchFile(rootDir, job.configuration);
496
+ if (configurationStr && isPath(configurationStr)) {
497
+ const configString = await fetchWorkflowFile(
498
+ job.id || `${idx}`,
499
+ rootDir,
500
+ configurationStr
501
+ );
382
502
  job.configuration = JSON.parse(configString);
383
503
  }
384
504
  }
@@ -418,15 +538,23 @@ var executeHandler = async (options, logger) => {
418
538
  logger.info("Skipping compilation as noCompile is set");
419
539
  }
420
540
  try {
421
- const result = await execute_default(input, state, options);
541
+ const result = await execute_default(input, state, options, logger);
422
542
  await serialize_output_default(options, result, logger);
423
543
  const duration = printDuration(new Date().getTime() - start);
424
- logger.success(`Done in ${duration}! \u2728`);
544
+ if (result.errors) {
545
+ logger.warn(
546
+ `Errors reported in ${Object.keys(result.errors).length} jobs`
547
+ );
548
+ }
549
+ logger.success(`Finished in ${duration}${result.errors ? "" : " \u2728"}`);
425
550
  return result;
426
- } catch (error) {
427
- logger.error(error);
551
+ } catch (err) {
552
+ if (!err.handled) {
553
+ logger.error("Unexpected error in execution");
554
+ logger.error(err);
555
+ }
428
556
  const duration = printDuration(new Date().getTime() - start);
429
- logger.error(`Took ${duration}.`);
557
+ logger.always(`Workflow failed in ${duration}.`);
430
558
  process.exitCode = 1;
431
559
  }
432
560
  };
@@ -509,20 +637,20 @@ var RETRY_COUNT = 20;
509
637
  var TIMEOUT_MS = 1e3 * 60;
510
638
  var actualDocGen = (specifier) => describePackage(specifier, {});
511
639
  var ensurePath = (filePath) => mkdirSync(path2.dirname(filePath), { recursive: true });
512
- var generatePlaceholder = (path6) => {
513
- writeFileSync(path6, `{ "loading": true, "timestamp": ${Date.now()}}`);
640
+ var generatePlaceholder = (path7) => {
641
+ writeFileSync(path7, `{ "loading": true, "timestamp": ${Date.now()}}`);
514
642
  };
515
643
  var finish = (logger, resultPath) => {
516
644
  logger.success("Done! Docs can be found at:\n");
517
645
  logger.print(` ${path2.resolve(resultPath)}`);
518
646
  };
519
- var generateDocs = async (specifier, path6, docgen, logger) => {
647
+ var generateDocs = async (specifier, path7, docgen, logger) => {
520
648
  const result = await docgen(specifier);
521
- await writeFile3(path6, JSON.stringify(result, null, 2));
522
- finish(logger, path6);
523
- return path6;
649
+ await writeFile3(path7, JSON.stringify(result, null, 2));
650
+ finish(logger, path7);
651
+ return path7;
524
652
  };
525
- var waitForDocs = async (docs, path6, logger, retryDuration = RETRY_DURATION) => {
653
+ var waitForDocs = async (docs, path7, logger, retryDuration = RETRY_DURATION) => {
526
654
  try {
527
655
  if (docs.hasOwnProperty("loading")) {
528
656
  logger.info("Docs are being loaded by another process. Waiting.");
@@ -534,19 +662,19 @@ var waitForDocs = async (docs, path6, logger, retryDuration = RETRY_DURATION) =>
534
662
  clearInterval(i);
535
663
  reject(new Error("Timed out waiting for docs to load"));
536
664
  }
537
- const updated = JSON.parse(readFileSync(path6, "utf8"));
665
+ const updated = JSON.parse(readFileSync(path7, "utf8"));
538
666
  if (!updated.hasOwnProperty("loading")) {
539
667
  logger.info("Docs found!");
540
668
  clearInterval(i);
541
- resolve(path6);
669
+ resolve(path7);
542
670
  }
543
671
  count++;
544
672
  }, retryDuration);
545
673
  });
546
674
  } else {
547
- logger.info(`Docs already written to cache at ${path6}`);
548
- finish(logger, path6);
549
- return path6;
675
+ logger.info(`Docs already written to cache at ${path7}`);
676
+ finish(logger, path7);
677
+ return path7;
550
678
  }
551
679
  } catch (e) {
552
680
  logger.error("Existing doc JSON corrupt. Aborting");
@@ -563,28 +691,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
563
691
  process.exit(9);
564
692
  }
565
693
  logger.success(`Generating docs for ${specifier}`);
566
- const path6 = `${repoDir}/docs/${specifier}.json`;
567
- ensurePath(path6);
694
+ const path7 = `${repoDir}/docs/${specifier}.json`;
695
+ ensurePath(path7);
568
696
  const handleError = () => {
569
697
  logger.info("Removing placeholder");
570
- rmSync(path6);
698
+ rmSync(path7);
571
699
  };
572
700
  try {
573
- const existing = readFileSync(path6, "utf8");
701
+ const existing = readFileSync(path7, "utf8");
574
702
  const json = JSON.parse(existing);
575
703
  if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
576
704
  logger.info(`Expired placeholder found. Removing.`);
577
- rmSync(path6);
705
+ rmSync(path7);
578
706
  throw new Error("TIMEOUT");
579
707
  }
580
- return waitForDocs(json, path6, logger, retryDuration);
708
+ return waitForDocs(json, path7, logger, retryDuration);
581
709
  } catch (e) {
582
710
  if (e.message !== "TIMEOUT") {
583
- logger.info(`Docs JSON not found at ${path6}`);
711
+ logger.info(`Docs JSON not found at ${path7}`);
584
712
  }
585
713
  logger.debug("Generating placeholder");
586
- generatePlaceholder(path6);
587
- return generateDocs(specifier, path6, docgen, logger).catch((e2) => {
714
+ generatePlaceholder(path7);
715
+ return generateDocs(specifier, path7, docgen, logger).catch((e2) => {
588
716
  logger.error("Error generating documentation");
589
717
  logger.error(e2);
590
718
  handleError();
@@ -633,7 +761,7 @@ var docsHandler = async (options, logger) => {
633
761
  logger.success(`Showing docs for ${adaptorName} v${version}`);
634
762
  }
635
763
  logger.info("Generating/loading documentation...");
636
- const path6 = await handler_default4(
764
+ const path7 = await handler_default4(
637
765
  {
638
766
  specifier: `${name}@${version}`,
639
767
  repoDir
@@ -641,8 +769,8 @@ var docsHandler = async (options, logger) => {
641
769
  createNullLogger()
642
770
  );
643
771
  let didError = false;
644
- if (path6) {
645
- const source = await readFile(path6, "utf8");
772
+ if (path7) {
773
+ const source = await readFile(path7, "utf8");
646
774
  const data = JSON.parse(source);
647
775
  let desc;
648
776
  if (operation) {
@@ -788,9 +916,104 @@ var metadataHandler = async (options, logger) => {
788
916
  };
789
917
  var handler_default6 = metadataHandler;
790
918
 
919
+ // src/util/ensure-opts.ts
920
+ import path4 from "node:path";
921
+ var defaultLoggerOptions = {
922
+ default: "default",
923
+ job: "debug"
924
+ };
925
+ var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
926
+ var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
927
+ var componentShorthands = {
928
+ cmp: "compiler",
929
+ rt: "runtime",
930
+ "r/t": "runtime"
931
+ };
932
+ var isValidComponent = (v) => /^(cli|runtime|compiler|job|default)$/i.test(v);
933
+ var ensureLogOpts = (opts) => {
934
+ const components = {};
935
+ if (!opts.log && /^(version|test)$/.test(opts.command)) {
936
+ opts.log = { default: "info" };
937
+ return opts;
938
+ } else if (opts.log) {
939
+ opts.log.forEach((l) => {
940
+ let component = "";
941
+ let level = "";
942
+ if (l.match(/=/)) {
943
+ const parts = l.split("=");
944
+ component = parts[0].toLowerCase();
945
+ if (componentShorthands[component]) {
946
+ component = componentShorthands[component];
947
+ }
948
+ level = parts[1].toLowerCase();
949
+ } else {
950
+ component = "default";
951
+ level = l.toLowerCase();
952
+ }
953
+ if (!isValidComponent(component)) {
954
+ throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
955
+ }
956
+ level = level.toLowerCase();
957
+ if (!isValidLogLevel(level)) {
958
+ throw new Error(ERROR_MESSAGE_LOG_LEVEL);
959
+ }
960
+ components[component] = level;
961
+ });
962
+ }
963
+ opts.log = {
964
+ ...defaultLoggerOptions,
965
+ ...components
966
+ };
967
+ return opts;
968
+ };
969
+ function ensureOpts(basePath = ".", opts) {
970
+ const newOpts = {
971
+ adaptor: opts.adaptor,
972
+ adaptors: opts.adaptors || [],
973
+ autoinstall: opts.autoinstall,
974
+ command: opts.command,
975
+ expandAdaptors: opts.expandAdaptors !== false,
976
+ force: opts.force || false,
977
+ immutable: opts.immutable || false,
978
+ log: opts.log,
979
+ logJson: typeof opts.logJson == "boolean" ? opts.logJson : Boolean(process.env.OPENFN_LOG_JSON),
980
+ compile: Boolean(opts.compile),
981
+ operation: opts.operation,
982
+ outputStdout: Boolean(opts.outputStdout),
983
+ packages: opts.packages,
984
+ repoDir: opts.repoDir || process.env.OPENFN_REPO_DIR || DEFAULT_REPO_DIR,
985
+ skipAdaptorValidation: opts.skipAdaptorValidation ?? false,
986
+ specifier: opts.specifier,
987
+ stateStdin: opts.stateStdin,
988
+ timeout: opts.timeout
989
+ };
990
+ const set2 = (key, value) => {
991
+ newOpts[key] = opts.hasOwnProperty(key) ? opts[key] : value;
992
+ };
993
+ if (opts.useAdaptorsMonorepo) {
994
+ newOpts.monorepoPath = process.env.OPENFN_ADAPTORS_REPO || "ERR";
995
+ }
996
+ let baseDir = basePath;
997
+ if (basePath.endsWith(".js")) {
998
+ baseDir = path4.dirname(basePath);
999
+ set2("jobPath", basePath);
1000
+ } else {
1001
+ set2("jobPath", `${baseDir}/job.js`);
1002
+ }
1003
+ set2("statePath", `${baseDir}/state.json`);
1004
+ if (!opts.outputStdout) {
1005
+ set2(
1006
+ "outputPath",
1007
+ newOpts.command === "compile" ? `${baseDir}/output.js` : `${baseDir}/output.json`
1008
+ );
1009
+ }
1010
+ ensureLogOpts(newOpts);
1011
+ return newOpts;
1012
+ }
1013
+
791
1014
  // src/util/use-adaptors-repo.ts
792
1015
  import { readFile as readFile2 } from "node:fs/promises";
793
- import path4 from "node:path";
1016
+ import path5 from "node:path";
794
1017
  import assert from "node:assert";
795
1018
  import { getNameAndVersion as getNameAndVersion5 } from "@openfn/runtime";
796
1019
  var validateMonoRepo = async (repoPath, log) => {
@@ -814,7 +1037,7 @@ var updatePath = (adaptor, repoPath, log) => {
814
1037
  );
815
1038
  }
816
1039
  const shortName = name.replace("@openfn/language-", "");
817
- const abspath = path4.resolve(repoPath, "packages", shortName);
1040
+ const abspath = path5.resolve(repoPath, "packages", shortName);
818
1041
  return `${name}=${abspath}`;
819
1042
  };
820
1043
  var useAdaptorsRepo = async (adaptors, repoPath, log) => {
@@ -831,7 +1054,7 @@ var use_adaptors_repo_default = useAdaptorsRepo;
831
1054
 
832
1055
  // src/util/print-versions.ts
833
1056
  import { readFileSync as readFileSync3 } from "node:fs";
834
- import path5 from "node:path";
1057
+ import path6 from "node:path";
835
1058
  import { getNameAndVersion as getNameAndVersion6 } from "@openfn/runtime";
836
1059
  import { mainSymbols } from "figures";
837
1060
  var NODE = "node.js";
@@ -841,7 +1064,7 @@ var COMPILER2 = "compiler";
841
1064
  var { triangleRightSmall: t } = mainSymbols;
842
1065
  var loadVersionFromPath = (adaptorPath) => {
843
1066
  try {
844
- const pkg = JSON.parse(readFileSync3(path5.resolve(adaptorPath, "package.json"), "utf8"));
1067
+ const pkg = JSON.parse(readFileSync3(path6.resolve(adaptorPath, "package.json"), "utf8"));
845
1068
  return pkg.version;
846
1069
  } catch (e) {
847
1070
  return "unknown";
@@ -918,7 +1141,7 @@ var handlers = {
918
1141
  ["repo-list"]: list,
919
1142
  version: async (opts, logger) => print_versions_default(logger, opts)
920
1143
  };
921
- var maybeEnsureOpts = (basePath, options) => /^(execute|compile|test)$/.test(options.command) ? ensureLogOpts(options) : ensureOpts(basePath, options);
1144
+ var maybeEnsureOpts = (basePath, options) => /(^(execute|compile|test)$)|(repo-)/.test(options.command) ? ensureLogOpts(options) : ensureOpts(basePath, options);
922
1145
  var parse = async (basePath, options, log) => {
923
1146
  const opts = maybeEnsureOpts(basePath, options);
924
1147
  const logger = log || logger_default(CLI, opts);
@@ -964,9 +1187,12 @@ var parse = async (basePath, options, log) => {
964
1187
  if (!process.exitCode) {
965
1188
  process.exitCode = e.exitCode || 1;
966
1189
  }
967
- logger.break();
968
- logger.error("Command failed!");
969
- logger.error(e);
1190
+ if (e.handled) {
1191
+ } else {
1192
+ logger.break();
1193
+ logger.error("Command failed!");
1194
+ logger.error(e);
1195
+ }
970
1196
  }
971
1197
  };
972
1198
  var commands_default = parse;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openfn/cli",
3
- "version": "0.0.38",
3
+ "version": "0.0.40",
4
4
  "description": "CLI devtools for the openfn toolchain.",
5
5
  "engines": {
6
6
  "node": ">=18",
@@ -36,10 +36,10 @@
36
36
  "typescript": "^4.7.4"
37
37
  },
38
38
  "dependencies": {
39
- "@openfn/compiler": "0.0.31",
39
+ "@openfn/compiler": "0.0.32",
40
40
  "@openfn/describe-package": "0.0.16",
41
- "@openfn/logger": "0.0.12",
42
- "@openfn/runtime": "0.0.23",
41
+ "@openfn/logger": "0.0.13",
42
+ "@openfn/runtime": "0.0.25",
43
43
  "figures": "^5.0.0",
44
44
  "rimraf": "^3.0.2",
45
45
  "treeify": "^1.1.0",
@@ -1,166 +0,0 @@
1
- // src/util/expand-adaptors.ts
2
- var expand = (name) => {
3
- if (typeof name === "string") {
4
- const [left] = name.split("=");
5
- if (left.match("/") || left.endsWith(".js")) {
6
- return name;
7
- }
8
- return `@openfn/language-${name}`;
9
- }
10
- return name;
11
- };
12
- var expand_adaptors_default = (opts) => {
13
- const { adaptors, workflow } = opts;
14
- if (adaptors) {
15
- opts.adaptors = adaptors?.map(expand);
16
- }
17
- if (workflow) {
18
- Object.values(workflow.jobs).forEach((job) => {
19
- if (job.adaptor) {
20
- job.adaptor = expand(job.adaptor);
21
- }
22
- });
23
- }
24
- return opts;
25
- };
26
-
27
- // src/util/logger.ts
28
- import actualCreateLogger, { printDuration } from "@openfn/logger";
29
- import { isValidLogLevel, defaultLogger } from "@openfn/logger";
30
- var CLI = "cli";
31
- var COMPILER = "compiler";
32
- var RUNTIME = "runtime";
33
- var JOB = "job";
34
- var namespaces = {
35
- [CLI]: "CLI",
36
- [RUNTIME]: "R/T",
37
- [COMPILER]: "CMP",
38
- [JOB]: "JOB"
39
- };
40
- var createLogger = (name = "", options) => {
41
- const logOptions = options.log || {};
42
- let json = false;
43
- let level = logOptions[name] || logOptions.default || "default";
44
- if (options.logJson) {
45
- json = true;
46
- }
47
- return actualCreateLogger(namespaces[name] || name, {
48
- level,
49
- json,
50
- ...logOptions
51
- });
52
- };
53
- var logger_default = createLogger;
54
- var createNullLogger = () => createLogger(void 0, { log: { default: "none" } });
55
-
56
- // src/util/ensure-opts.ts
57
- import path from "node:path";
58
- var defaultLoggerOptions = {
59
- default: "default",
60
- job: "debug"
61
- };
62
- var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
63
- var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
64
- var DEFAULT_REPO_DIR = "/tmp/openfn/repo";
65
- var componentShorthands = {
66
- cmp: "compiler",
67
- rt: "runtime",
68
- "r/t": "runtime"
69
- };
70
- var isValidComponent = (v) => /^(cli|runtime|compiler|job|default)$/i.test(v);
71
- var ensureLogOpts = (opts) => {
72
- const components = {};
73
- if (!opts.log && /^(version|test)$/.test(opts.command)) {
74
- opts.log = { default: "info" };
75
- return opts;
76
- } else if (opts.log) {
77
- opts.log.forEach((l) => {
78
- let component = "";
79
- let level = "";
80
- if (l.match(/=/)) {
81
- const parts = l.split("=");
82
- component = parts[0].toLowerCase();
83
- if (componentShorthands[component]) {
84
- component = componentShorthands[component];
85
- }
86
- level = parts[1].toLowerCase();
87
- } else {
88
- component = "default";
89
- level = l.toLowerCase();
90
- }
91
- if (!isValidComponent(component)) {
92
- throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
93
- }
94
- level = level.toLowerCase();
95
- if (!isValidLogLevel(level)) {
96
- throw new Error(ERROR_MESSAGE_LOG_LEVEL);
97
- }
98
- components[component] = level;
99
- });
100
- }
101
- opts.log = {
102
- ...defaultLoggerOptions,
103
- ...components
104
- };
105
- return opts;
106
- };
107
- function ensureOpts(basePath = ".", opts) {
108
- const newOpts = {
109
- adaptor: opts.adaptor,
110
- adaptors: opts.adaptors || [],
111
- autoinstall: opts.autoinstall,
112
- command: opts.command,
113
- expandAdaptors: opts.expandAdaptors !== false,
114
- force: opts.force || false,
115
- immutable: opts.immutable || false,
116
- log: opts.log,
117
- logJson: typeof opts.logJson == "boolean" ? opts.logJson : Boolean(process.env.OPENFN_LOG_JSON),
118
- compile: Boolean(opts.compile),
119
- operation: opts.operation,
120
- outputStdout: Boolean(opts.outputStdout),
121
- packages: opts.packages,
122
- repoDir: opts.repoDir || process.env.OPENFN_REPO_DIR || DEFAULT_REPO_DIR,
123
- skipAdaptorValidation: opts.skipAdaptorValidation ?? false,
124
- specifier: opts.specifier,
125
- stateStdin: opts.stateStdin,
126
- strictOutput: opts.strictOutput ?? true,
127
- timeout: opts.timeout
128
- };
129
- const set = (key, value) => {
130
- newOpts[key] = opts.hasOwnProperty(key) ? opts[key] : value;
131
- };
132
- if (opts.useAdaptorsMonorepo) {
133
- newOpts.monorepoPath = process.env.OPENFN_ADAPTORS_REPO || "ERR";
134
- }
135
- let baseDir = basePath;
136
- if (basePath.endsWith(".js")) {
137
- baseDir = path.dirname(basePath);
138
- set("jobPath", basePath);
139
- } else {
140
- set("jobPath", `${baseDir}/job.js`);
141
- }
142
- set("statePath", `${baseDir}/state.json`);
143
- if (!opts.outputStdout) {
144
- set(
145
- "outputPath",
146
- newOpts.command === "compile" ? `${baseDir}/output.js` : `${baseDir}/output.json`
147
- );
148
- }
149
- ensureLogOpts(newOpts);
150
- return newOpts;
151
- }
152
-
153
- export {
154
- expand_adaptors_default,
155
- printDuration,
156
- CLI,
157
- COMPILER,
158
- RUNTIME,
159
- JOB,
160
- logger_default,
161
- createNullLogger,
162
- defaultLogger,
163
- DEFAULT_REPO_DIR,
164
- ensureLogOpts,
165
- ensureOpts
166
- };