@openfn/cli 1.1.4 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -0
- package/dist/index.js +32 -3
- package/dist/process/runner.js +270 -72
- package/package.json +5 -5
package/README.md
CHANGED
|
@@ -127,6 +127,46 @@ If no command is specified, execute will run.
|
|
|
127
127
|
|
|
128
128
|
To get more information about a command, including usage examples, run `openfn <command> help`, ie, `openfn compile help`.
|
|
129
129
|
|
|
130
|
+
## Caching step output
|
|
131
|
+
|
|
132
|
+
The CLI can write the output of every single step to disk (rather than just the final output). To do this, just run a job with the `--cache-steps` flag.
|
|
133
|
+
|
|
134
|
+
```
|
|
135
|
+
openfn tmp/job.js --cache-steps
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
The cached output is written to `.cli-cache/<workflow-name>/<step-name>.json`, relative to the input job or workflow file. This folder has a `.gitignore` file and should be ignored from your version control.
|
|
139
|
+
|
|
140
|
+
So for a workflow at `./tmp/workflow.json` you'll get a cache path something like `./tmp/.cli-cache/workflow/step-1.json.`
|
|
141
|
+
|
|
142
|
+
The cache is cleared when execution starts, so you know all artefacts in the cache folder relate to the last run.
|
|
143
|
+
|
|
144
|
+
Step caching is disabled by default, but you can switch it on by setting the `OPENFN_ALWAYS_CACHE_STEPS` env var to `true`. To disable for a single execution, pass the `--no-compile-steps` flag.
|
|
145
|
+
|
|
146
|
+
## Starting from a custom step
|
|
147
|
+
|
|
148
|
+
When executing a workflow, the CLI will run from the first step (which is usually the first step in the `steps` array, unless `options.start` is set in the workflow).
|
|
149
|
+
|
|
150
|
+
You can run from any starting step by passing `--start <step-name>`, like this:
|
|
151
|
+
|
|
152
|
+
```
|
|
153
|
+
openfn tmp/job.js --start upload-to-salesforce
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
If you previously cached the steps from this workflow, the CLI will automatically load the correct input state from the cache. Otherwise, you can pass in whatever state you need with `-s ./transformed-state.json`.
|
|
157
|
+
|
|
158
|
+
You can also pass `--end` to make the workflow end early, or `--only` to only run a single step.
|
|
159
|
+
|
|
160
|
+
All step names name supports "fuzzy" inputs. If you pass an exact step id, that step will always be the starting step. But you can also pass part of step name or id.
|
|
161
|
+
|
|
162
|
+
So to match a step called with id `236baf56-e6c7-40f2-80ad-00d5a10b6b64` (such as you might download from Lightning), you can do:
|
|
163
|
+
|
|
164
|
+
```
|
|
165
|
+
openfn tmp/job.js --start 236b
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
Any unique continuous sequence of characters on the name or id will match. If there are multiple matches, an error will be thrown.
|
|
169
|
+
|
|
130
170
|
## Deploying Workflows
|
|
131
171
|
|
|
132
172
|
> ⚠️ This feature is still in active development. Expect breaking changes.
|
package/dist/index.js
CHANGED
|
@@ -171,6 +171,18 @@ var autoinstall = {
|
|
|
171
171
|
default: true
|
|
172
172
|
}
|
|
173
173
|
};
|
|
174
|
+
var cacheSteps = {
|
|
175
|
+
name: "cache-steps",
|
|
176
|
+
yargs: {
|
|
177
|
+
boolean: true,
|
|
178
|
+
description: "Cache the output of steps to ./.cache/<workflow-name>/<step-name>.json"
|
|
179
|
+
},
|
|
180
|
+
ensure: (opts2) => {
|
|
181
|
+
if (process.env.OPENFN_ALWAYS_CACHE_STEPS && !opts2.hasOwnProperty("cacheSteps")) {
|
|
182
|
+
opts2.cacheSteps = process.env.OPENFN_ALWAYS_CACHE_STEPS === "true";
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
};
|
|
174
186
|
var compile = {
|
|
175
187
|
name: "no-compile",
|
|
176
188
|
yargs: {
|
|
@@ -341,7 +353,21 @@ var start = {
|
|
|
341
353
|
name: "start",
|
|
342
354
|
yargs: {
|
|
343
355
|
string: true,
|
|
344
|
-
description: "Specifiy the start
|
|
356
|
+
description: "Specifiy the start step in a workflow"
|
|
357
|
+
}
|
|
358
|
+
};
|
|
359
|
+
var end = {
|
|
360
|
+
name: "end",
|
|
361
|
+
yargs: {
|
|
362
|
+
string: true,
|
|
363
|
+
description: "Specifiy the end step in a workflow"
|
|
364
|
+
}
|
|
365
|
+
};
|
|
366
|
+
var only = {
|
|
367
|
+
name: "only",
|
|
368
|
+
yargs: {
|
|
369
|
+
string: true,
|
|
370
|
+
description: "Specifiy to only run one step in a workflow"
|
|
345
371
|
}
|
|
346
372
|
};
|
|
347
373
|
var skipAdaptorValidation = {
|
|
@@ -531,17 +557,20 @@ var options4 = [
|
|
|
531
557
|
expandAdaptors,
|
|
532
558
|
adaptors,
|
|
533
559
|
autoinstall,
|
|
560
|
+
cacheSteps,
|
|
534
561
|
compile,
|
|
535
|
-
|
|
562
|
+
end,
|
|
536
563
|
ignoreImports,
|
|
564
|
+
immutable,
|
|
537
565
|
inputPath,
|
|
538
566
|
log,
|
|
539
567
|
logJson,
|
|
568
|
+
only,
|
|
540
569
|
outputPath,
|
|
541
570
|
outputStdout,
|
|
542
571
|
repoDir,
|
|
543
|
-
skipAdaptorValidation,
|
|
544
572
|
sanitize,
|
|
573
|
+
skipAdaptorValidation,
|
|
545
574
|
start,
|
|
546
575
|
statePath,
|
|
547
576
|
stateStdin,
|
package/dist/process/runner.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
// src/execute/execute.ts
|
|
2
|
-
import run, { getNameAndVersion } from "@openfn/runtime";
|
|
2
|
+
import run, { NOTIFY_JOB_COMPLETE, getNameAndVersion } from "@openfn/runtime";
|
|
3
3
|
|
|
4
4
|
// src/util/logger.ts
|
|
5
5
|
import actualCreateLogger, { printDuration } from "@openfn/logger";
|
|
@@ -31,8 +31,59 @@ var createLogger = (name = "", options) => {
|
|
|
31
31
|
var logger_default = createLogger;
|
|
32
32
|
var createNullLogger = () => createLogger(void 0, { log: { default: "none" } });
|
|
33
33
|
|
|
34
|
+
// src/util/cache.ts
|
|
35
|
+
import fs from "node:fs";
|
|
36
|
+
import path from "node:path";
|
|
37
|
+
import { rmdir } from "node:fs/promises";
|
|
38
|
+
var getCachePath = async (plan, options, stepId) => {
|
|
39
|
+
const { baseDir } = options;
|
|
40
|
+
const { name } = plan.workflow;
|
|
41
|
+
const basePath = `${baseDir}/.cli-cache/${name}`;
|
|
42
|
+
if (stepId) {
|
|
43
|
+
return path.resolve(`${basePath}/${stepId.replace(/ /, "-")}.json`);
|
|
44
|
+
}
|
|
45
|
+
return path.resolve(basePath);
|
|
46
|
+
};
|
|
47
|
+
var ensureGitIgnore = (options) => {
|
|
48
|
+
if (!options._hasGitIgnore) {
|
|
49
|
+
const ignorePath = path.resolve(
|
|
50
|
+
options.baseDir,
|
|
51
|
+
".cli-cache",
|
|
52
|
+
".gitignore"
|
|
53
|
+
);
|
|
54
|
+
try {
|
|
55
|
+
fs.accessSync(ignorePath);
|
|
56
|
+
} catch (e) {
|
|
57
|
+
fs.writeFileSync(ignorePath, "*");
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
options._hasGitIgnore = true;
|
|
61
|
+
};
|
|
62
|
+
var saveToCache = async (plan, stepId, output, options, logger) => {
|
|
63
|
+
if (options.cacheSteps) {
|
|
64
|
+
const cachePath = await getCachePath(plan, options, stepId);
|
|
65
|
+
fs.mkdirSync(path.dirname(cachePath), { recursive: true });
|
|
66
|
+
ensureGitIgnore(options);
|
|
67
|
+
logger.info(`Writing ${stepId} output to ${cachePath}`);
|
|
68
|
+
fs.writeFileSync(cachePath, JSON.stringify(output));
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
var clearCache = async (plan, options, logger) => {
|
|
72
|
+
const cacheDir = await getCachePath(plan, options);
|
|
73
|
+
try {
|
|
74
|
+
await rmdir(cacheDir, { recursive: true });
|
|
75
|
+
logger.info(`Cleared cache at ${cacheDir}`);
|
|
76
|
+
} catch (e) {
|
|
77
|
+
if (e.code === "ENOENT") {
|
|
78
|
+
} else {
|
|
79
|
+
logger.error(`Error while clearing cache at ${cacheDir}`);
|
|
80
|
+
logger.error(e);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
|
|
34
85
|
// src/execute/execute.ts
|
|
35
|
-
var execute_default = async (plan, input, opts) => {
|
|
86
|
+
var execute_default = async (plan, input, opts, logger) => {
|
|
36
87
|
try {
|
|
37
88
|
const result = await run(plan, input, {
|
|
38
89
|
immutableState: opts.immutable,
|
|
@@ -41,6 +92,14 @@ var execute_default = async (plan, input, opts) => {
|
|
|
41
92
|
linker: {
|
|
42
93
|
repo: opts.repoDir,
|
|
43
94
|
modules: parseAdaptors(plan)
|
|
95
|
+
},
|
|
96
|
+
callbacks: {
|
|
97
|
+
notify: async (eventName, payload) => {
|
|
98
|
+
if (eventName === NOTIFY_JOB_COMPLETE) {
|
|
99
|
+
const { state, jobId } = payload;
|
|
100
|
+
await saveToCache(plan, jobId, state, opts, logger);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
44
103
|
}
|
|
45
104
|
});
|
|
46
105
|
return result;
|
|
@@ -51,13 +110,13 @@ var execute_default = async (plan, input, opts) => {
|
|
|
51
110
|
};
|
|
52
111
|
function parseAdaptors(plan) {
|
|
53
112
|
const extractInfo = (specifier) => {
|
|
54
|
-
const [module,
|
|
113
|
+
const [module, path8] = specifier.split("=");
|
|
55
114
|
const { name, version } = getNameAndVersion(module);
|
|
56
115
|
const info = {
|
|
57
116
|
name
|
|
58
117
|
};
|
|
59
|
-
if (
|
|
60
|
-
info.path =
|
|
118
|
+
if (path8) {
|
|
119
|
+
info.path = path8;
|
|
61
120
|
}
|
|
62
121
|
if (version) {
|
|
63
122
|
info.version = version;
|
|
@@ -196,6 +255,7 @@ var AbortError = class extends Error {
|
|
|
196
255
|
};
|
|
197
256
|
var abort_default = (logger, reason, error, help) => {
|
|
198
257
|
const e = new AbortError(reason);
|
|
258
|
+
logger.break();
|
|
199
259
|
logger.error(reason);
|
|
200
260
|
if (error) {
|
|
201
261
|
logger.error(error.message);
|
|
@@ -205,6 +265,7 @@ var abort_default = (logger, reason, error, help) => {
|
|
|
205
265
|
}
|
|
206
266
|
logger.break();
|
|
207
267
|
logger.error("Critical error: aborting command");
|
|
268
|
+
process.exitCode = 1;
|
|
208
269
|
throw e;
|
|
209
270
|
};
|
|
210
271
|
|
|
@@ -261,10 +322,10 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
261
322
|
return specifier;
|
|
262
323
|
};
|
|
263
324
|
var resolveSpecifierPath = async (pattern, repoDir, log) => {
|
|
264
|
-
const [specifier,
|
|
265
|
-
if (
|
|
266
|
-
log.debug(`Resolved ${specifier} to path: ${
|
|
267
|
-
return
|
|
325
|
+
const [specifier, path8] = pattern.split("=");
|
|
326
|
+
if (path8) {
|
|
327
|
+
log.debug(`Resolved ${specifier} to path: ${path8}`);
|
|
328
|
+
return path8;
|
|
268
329
|
}
|
|
269
330
|
const repoPath = await getModulePath(specifier, repoDir, log);
|
|
270
331
|
if (repoPath) {
|
|
@@ -281,16 +342,16 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
281
342
|
const [pattern] = opts.adaptors;
|
|
282
343
|
const [specifier] = pattern.split("=");
|
|
283
344
|
log.debug(`Trying to preload types for ${specifier}`);
|
|
284
|
-
const
|
|
285
|
-
if (
|
|
345
|
+
const path8 = await resolveSpecifierPath(pattern, opts.repoDir, log);
|
|
346
|
+
if (path8) {
|
|
286
347
|
try {
|
|
287
348
|
exports = await preloadAdaptorExports(
|
|
288
|
-
|
|
349
|
+
path8,
|
|
289
350
|
opts.useAdaptorsMonorepo,
|
|
290
351
|
log
|
|
291
352
|
);
|
|
292
353
|
} catch (e) {
|
|
293
|
-
log.error(`Failed to load adaptor typedefs from path ${
|
|
354
|
+
log.error(`Failed to load adaptor typedefs from path ${path8}`);
|
|
294
355
|
log.error(e);
|
|
295
356
|
}
|
|
296
357
|
}
|
|
@@ -310,8 +371,21 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
310
371
|
};
|
|
311
372
|
|
|
312
373
|
// src/util/load-state.ts
|
|
313
|
-
import
|
|
314
|
-
var
|
|
374
|
+
import fs2 from "node:fs/promises";
|
|
375
|
+
var getUpstreamStepId = (plan, stepId) => {
|
|
376
|
+
const upstreamStep = plan.workflow.steps.find((step) => {
|
|
377
|
+
if (step.next) {
|
|
378
|
+
if (typeof step.next === "string") {
|
|
379
|
+
return step.next === stepId;
|
|
380
|
+
}
|
|
381
|
+
return stepId in step.next;
|
|
382
|
+
}
|
|
383
|
+
});
|
|
384
|
+
if (upstreamStep) {
|
|
385
|
+
return typeof upstreamStep === "string" ? upstreamStep : upstreamStep.id;
|
|
386
|
+
}
|
|
387
|
+
};
|
|
388
|
+
var load_state_default = async (plan, opts, log, start) => {
|
|
315
389
|
const { stateStdin, statePath } = opts;
|
|
316
390
|
log.debug("Loading state...");
|
|
317
391
|
if (stateStdin) {
|
|
@@ -329,7 +403,7 @@ var load_state_default = async (opts, log) => {
|
|
|
329
403
|
}
|
|
330
404
|
if (statePath) {
|
|
331
405
|
try {
|
|
332
|
-
const str = await
|
|
406
|
+
const str = await fs2.readFile(statePath, "utf8");
|
|
333
407
|
const json = JSON.parse(str);
|
|
334
408
|
log.success(`Loaded state from ${statePath}`);
|
|
335
409
|
log.debug("state:", json);
|
|
@@ -339,6 +413,43 @@ var load_state_default = async (opts, log) => {
|
|
|
339
413
|
log.warn(e);
|
|
340
414
|
}
|
|
341
415
|
}
|
|
416
|
+
if (start) {
|
|
417
|
+
log.info(
|
|
418
|
+
"No state provided to CLI. Will attempt to load state from cache instead"
|
|
419
|
+
);
|
|
420
|
+
log.always(
|
|
421
|
+
`Attempting to load cached input state for starting step "${start}"`
|
|
422
|
+
);
|
|
423
|
+
try {
|
|
424
|
+
const upstreamStepId = getUpstreamStepId(plan, start);
|
|
425
|
+
if (upstreamStepId) {
|
|
426
|
+
log.debug(`Input step for "${start}" is "${upstreamStepId}"`);
|
|
427
|
+
const cachedStatePath = await getCachePath(plan, opts, upstreamStepId);
|
|
428
|
+
log.debug("Loading cached state from", cachedStatePath);
|
|
429
|
+
try {
|
|
430
|
+
await fs2.access(cachedStatePath);
|
|
431
|
+
const str = await fs2.readFile(cachedStatePath, "utf8");
|
|
432
|
+
const json = JSON.parse(str);
|
|
433
|
+
log.success(
|
|
434
|
+
`Loaded cached state for step "${start}" from ${cachedStatePath}`
|
|
435
|
+
);
|
|
436
|
+
log.info(` To force disable the cache, run again with --no-cache`);
|
|
437
|
+
return json;
|
|
438
|
+
} catch (e) {
|
|
439
|
+
log.warn(`No cached state found for step "${start}"`);
|
|
440
|
+
log.warn(
|
|
441
|
+
"Re-run this workflow with --cache to save the output of each step"
|
|
442
|
+
);
|
|
443
|
+
log.break();
|
|
444
|
+
}
|
|
445
|
+
} else {
|
|
446
|
+
log.warn(`Could not find an input step for step "${start}"`);
|
|
447
|
+
}
|
|
448
|
+
} catch (e) {
|
|
449
|
+
log.warn("Error loading cached state");
|
|
450
|
+
log.warn(e);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
342
453
|
log.info(
|
|
343
454
|
"No state provided - using default state { data: {}, configuration: {} }"
|
|
344
455
|
);
|
|
@@ -375,8 +486,8 @@ var validateAdaptors = async (options, logger) => {
|
|
|
375
486
|
var validate_adaptors_default = validateAdaptors;
|
|
376
487
|
|
|
377
488
|
// src/util/load-plan.ts
|
|
378
|
-
import
|
|
379
|
-
import
|
|
489
|
+
import fs3 from "node:fs/promises";
|
|
490
|
+
import path3 from "node:path";
|
|
380
491
|
import { isPath } from "@openfn/compiler";
|
|
381
492
|
|
|
382
493
|
// src/util/expand-adaptors.ts
|
|
@@ -406,7 +517,7 @@ var expand_adaptors_default = (input) => {
|
|
|
406
517
|
|
|
407
518
|
// src/util/map-adaptors-to-monorepo.ts
|
|
408
519
|
import { readFile } from "node:fs/promises";
|
|
409
|
-
import
|
|
520
|
+
import path2 from "node:path";
|
|
410
521
|
import assert from "node:assert";
|
|
411
522
|
import { getNameAndVersion as getNameAndVersion2 } from "@openfn/runtime";
|
|
412
523
|
var validateMonoRepo = async (repoPath, log) => {
|
|
@@ -430,7 +541,7 @@ var updatePath = (adaptor, repoPath, log) => {
|
|
|
430
541
|
);
|
|
431
542
|
}
|
|
432
543
|
const shortName = name.replace("@openfn/language-", "");
|
|
433
|
-
const abspath =
|
|
544
|
+
const abspath = path2.resolve(repoPath, "packages", shortName);
|
|
434
545
|
log.info(`Mapped adaptor ${name} to monorepo: ${abspath}`);
|
|
435
546
|
return `${name}=${abspath}`;
|
|
436
547
|
};
|
|
@@ -461,10 +572,10 @@ var loadPlan = async (options, logger) => {
|
|
|
461
572
|
}
|
|
462
573
|
const jsonPath = planPath || workflowPath;
|
|
463
574
|
if (!options.baseDir) {
|
|
464
|
-
options.baseDir =
|
|
575
|
+
options.baseDir = path3.dirname(jsonPath);
|
|
465
576
|
}
|
|
466
577
|
const json = await loadJson(jsonPath, logger);
|
|
467
|
-
const defaultName =
|
|
578
|
+
const defaultName = path3.parse(jsonPath).name;
|
|
468
579
|
if (json.workflow) {
|
|
469
580
|
return loadXPlan(json, options, logger, defaultName);
|
|
470
581
|
} else {
|
|
@@ -475,7 +586,7 @@ var load_plan_default = loadPlan;
|
|
|
475
586
|
var loadJson = async (workflowPath, logger) => {
|
|
476
587
|
let text;
|
|
477
588
|
try {
|
|
478
|
-
text = await
|
|
589
|
+
text = await fs3.readFile(workflowPath, "utf8");
|
|
479
590
|
logger.debug("Loaded workflow from", workflowPath);
|
|
480
591
|
} catch (e) {
|
|
481
592
|
return abort_default(
|
|
@@ -509,8 +620,8 @@ var loadExpression = async (options, logger) => {
|
|
|
509
620
|
const expressionPath = options.expressionPath;
|
|
510
621
|
logger.debug(`Loading expression from ${expressionPath}`);
|
|
511
622
|
try {
|
|
512
|
-
const expression = await
|
|
513
|
-
const name =
|
|
623
|
+
const expression = await fs3.readFile(expressionPath, "utf8");
|
|
624
|
+
const name = path3.parse(expressionPath).name;
|
|
514
625
|
const step = { expression };
|
|
515
626
|
if (options.adaptors) {
|
|
516
627
|
const [adaptor] = options.adaptors;
|
|
@@ -557,8 +668,8 @@ var loadOldWorkflow = async (workflow, options, logger, defaultName = "") => {
|
|
|
557
668
|
};
|
|
558
669
|
var fetchFile = async (jobId, rootDir = "", filePath, log) => {
|
|
559
670
|
try {
|
|
560
|
-
const fullPath = filePath.startsWith("~") ? filePath :
|
|
561
|
-
const result = await
|
|
671
|
+
const fullPath = filePath.startsWith("~") ? filePath : path3.resolve(rootDir, filePath);
|
|
672
|
+
const result = await fs3.readFile(fullPath, "utf8");
|
|
562
673
|
log.debug("Loaded file", fullPath);
|
|
563
674
|
return result;
|
|
564
675
|
} catch (e) {
|
|
@@ -620,8 +731,8 @@ var loadXPlan = async (plan, options, logger, defaultName = "") => {
|
|
|
620
731
|
};
|
|
621
732
|
|
|
622
733
|
// src/util/assert-path.ts
|
|
623
|
-
var assert_path_default = (
|
|
624
|
-
if (!
|
|
734
|
+
var assert_path_default = (path8) => {
|
|
735
|
+
if (!path8) {
|
|
625
736
|
console.error("ERROR: no path provided!");
|
|
626
737
|
console.error("\nUsage:");
|
|
627
738
|
console.error(" open path/to/job");
|
|
@@ -631,12 +742,58 @@ var assert_path_default = (path7) => {
|
|
|
631
742
|
}
|
|
632
743
|
};
|
|
633
744
|
|
|
745
|
+
// src/util/fuzzy-match-step.ts
|
|
746
|
+
var fuzzy_match_step_default = (plan, stepPattern) => {
|
|
747
|
+
if (stepPattern) {
|
|
748
|
+
const { steps } = plan.workflow;
|
|
749
|
+
const exact = steps.find((step) => step.id === stepPattern);
|
|
750
|
+
if (exact)
|
|
751
|
+
return exact.id;
|
|
752
|
+
const matches = {};
|
|
753
|
+
steps.forEach((step) => {
|
|
754
|
+
if (step.id?.includes(stepPattern) || step.name?.includes(stepPattern)) {
|
|
755
|
+
matches[step.id] = true;
|
|
756
|
+
}
|
|
757
|
+
});
|
|
758
|
+
const results = Object.keys(matches);
|
|
759
|
+
if (results.length === 1) {
|
|
760
|
+
return results[0];
|
|
761
|
+
}
|
|
762
|
+
if (results.length > 1) {
|
|
763
|
+
throw new Error("AMBIGUOUS_INPUT");
|
|
764
|
+
}
|
|
765
|
+
throw new Error("NOT_FOUND");
|
|
766
|
+
}
|
|
767
|
+
};
|
|
768
|
+
|
|
634
769
|
// src/execute/handler.ts
|
|
770
|
+
var matchStep = (plan, stepPattern, stepName, logger) => {
|
|
771
|
+
try {
|
|
772
|
+
return fuzzy_match_step_default(plan, stepPattern) ?? stepPattern;
|
|
773
|
+
} catch (err) {
|
|
774
|
+
let message;
|
|
775
|
+
let help;
|
|
776
|
+
if (err.message === "AMBIGUOUS_INPUT") {
|
|
777
|
+
message = `${stepName} pattern matched multiple steps`;
|
|
778
|
+
help = `The ${stepName} option can contain an exact match of a step id, or a partial match if a name or id so long as it is unique.`;
|
|
779
|
+
} else if (err.message === "NOT_FOUND") {
|
|
780
|
+
message = `${stepName} step not found`;
|
|
781
|
+
help = `The step "${stepPattern}" could not be be found in the workflow`;
|
|
782
|
+
} else {
|
|
783
|
+
message = `Error parsing ${stepName} option`;
|
|
784
|
+
}
|
|
785
|
+
abort_default(logger, `Error: ${message}`, void 0, help);
|
|
786
|
+
}
|
|
787
|
+
return "";
|
|
788
|
+
};
|
|
635
789
|
var executeHandler = async (options, logger) => {
|
|
636
790
|
const start = (/* @__PURE__ */ new Date()).getTime();
|
|
637
791
|
assert_path_default(options.path);
|
|
638
792
|
await validate_adaptors_default(options, logger);
|
|
639
793
|
let plan = await load_plan_default(options, logger);
|
|
794
|
+
if (options.cacheSteps) {
|
|
795
|
+
await clearCache(plan, options, logger);
|
|
796
|
+
}
|
|
640
797
|
const { repoDir, monorepoPath, autoinstall } = options;
|
|
641
798
|
if (autoinstall) {
|
|
642
799
|
if (monorepoPath) {
|
|
@@ -649,14 +806,55 @@ var executeHandler = async (options, logger) => {
|
|
|
649
806
|
}
|
|
650
807
|
}
|
|
651
808
|
}
|
|
652
|
-
|
|
809
|
+
let customStart;
|
|
810
|
+
let customEnd;
|
|
811
|
+
if (options.only) {
|
|
812
|
+
const step = matchStep(plan, options.only, "only", logger);
|
|
813
|
+
customStart = step;
|
|
814
|
+
customEnd = step;
|
|
815
|
+
logger.always(`Only running workflow step "${options.start}"`);
|
|
816
|
+
} else {
|
|
817
|
+
if (options.start) {
|
|
818
|
+
customStart = matchStep(
|
|
819
|
+
plan,
|
|
820
|
+
options.start ?? plan.options.start,
|
|
821
|
+
"start",
|
|
822
|
+
logger
|
|
823
|
+
);
|
|
824
|
+
logger.info(`Starting workflow from step "${options.start}"`);
|
|
825
|
+
}
|
|
826
|
+
if (options.end) {
|
|
827
|
+
customEnd = matchStep(
|
|
828
|
+
plan,
|
|
829
|
+
options.end ?? plan.options.end,
|
|
830
|
+
"end",
|
|
831
|
+
logger
|
|
832
|
+
);
|
|
833
|
+
logger.always(`Ending workflow at step "${options.end}"`);
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
const state = await load_state_default(plan, options, logger, customStart);
|
|
653
837
|
if (options.compile) {
|
|
654
838
|
plan = await compile_default(plan, options, logger);
|
|
655
839
|
} else {
|
|
656
840
|
logger.info("Skipping compilation as noCompile is set");
|
|
657
841
|
}
|
|
842
|
+
const finalPlan = {
|
|
843
|
+
...plan,
|
|
844
|
+
options: {
|
|
845
|
+
...plan.options,
|
|
846
|
+
start: customStart || plan.options.start,
|
|
847
|
+
end: customEnd
|
|
848
|
+
},
|
|
849
|
+
workflow: plan.workflow
|
|
850
|
+
};
|
|
658
851
|
try {
|
|
659
|
-
const result = await execute_default(
|
|
852
|
+
const result = await execute_default(finalPlan, state, options, logger);
|
|
853
|
+
if (options.cacheSteps) {
|
|
854
|
+
logger.success(
|
|
855
|
+
"Cached output written to ./cli-cache (see info logs for details)"
|
|
856
|
+
);
|
|
857
|
+
}
|
|
660
858
|
await serialize_output_default(options, result, logger);
|
|
661
859
|
const duration = printDuration((/* @__PURE__ */ new Date()).getTime() - start);
|
|
662
860
|
if (result?.errors) {
|
|
@@ -743,9 +941,9 @@ var testHandler = async (options, logger) => {
|
|
|
743
941
|
);
|
|
744
942
|
logger.debug('eg: -S "{ "data": { "answer": 33 } }"');
|
|
745
943
|
}
|
|
746
|
-
const state = await load_state_default(opts, createNullLogger());
|
|
944
|
+
const state = await load_state_default(plan, opts, createNullLogger());
|
|
747
945
|
const compiledPlan = await compile_default(plan, opts, logger);
|
|
748
|
-
const result = await execute_default(compiledPlan, state, opts);
|
|
946
|
+
const result = await execute_default(compiledPlan, state, opts, logger);
|
|
749
947
|
logger.success(`Result: ${result.data.answer}`);
|
|
750
948
|
return result;
|
|
751
949
|
};
|
|
@@ -807,28 +1005,28 @@ var handler_default4 = deployHandler;
|
|
|
807
1005
|
// src/docgen/handler.ts
|
|
808
1006
|
import { writeFile as writeFile3 } from "node:fs/promises";
|
|
809
1007
|
import { readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
|
|
810
|
-
import
|
|
1008
|
+
import path4 from "node:path";
|
|
811
1009
|
import { describePackage } from "@openfn/describe-package";
|
|
812
1010
|
import { getNameAndVersion as getNameAndVersion3 } from "@openfn/runtime";
|
|
813
1011
|
var RETRY_DURATION = 500;
|
|
814
1012
|
var RETRY_COUNT = 20;
|
|
815
1013
|
var TIMEOUT_MS = 1e3 * 60;
|
|
816
1014
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
817
|
-
var ensurePath = (filePath) => mkdirSync(
|
|
818
|
-
var generatePlaceholder = (
|
|
819
|
-
writeFileSync(
|
|
1015
|
+
var ensurePath = (filePath) => mkdirSync(path4.dirname(filePath), { recursive: true });
|
|
1016
|
+
var generatePlaceholder = (path8) => {
|
|
1017
|
+
writeFileSync(path8, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
820
1018
|
};
|
|
821
1019
|
var finish = (logger, resultPath) => {
|
|
822
1020
|
logger.success("Done! Docs can be found at:\n");
|
|
823
|
-
logger.print(` ${
|
|
1021
|
+
logger.print(` ${path4.resolve(resultPath)}`);
|
|
824
1022
|
};
|
|
825
|
-
var generateDocs = async (specifier,
|
|
1023
|
+
var generateDocs = async (specifier, path8, docgen, logger) => {
|
|
826
1024
|
const result = await docgen(specifier);
|
|
827
|
-
await writeFile3(
|
|
828
|
-
finish(logger,
|
|
829
|
-
return
|
|
1025
|
+
await writeFile3(path8, JSON.stringify(result, null, 2));
|
|
1026
|
+
finish(logger, path8);
|
|
1027
|
+
return path8;
|
|
830
1028
|
};
|
|
831
|
-
var waitForDocs = async (docs,
|
|
1029
|
+
var waitForDocs = async (docs, path8, logger, retryDuration = RETRY_DURATION) => {
|
|
832
1030
|
try {
|
|
833
1031
|
if (docs.hasOwnProperty("loading")) {
|
|
834
1032
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -840,19 +1038,19 @@ var waitForDocs = async (docs, path7, logger, retryDuration = RETRY_DURATION) =>
|
|
|
840
1038
|
clearInterval(i);
|
|
841
1039
|
reject(new Error("Timed out waiting for docs to load"));
|
|
842
1040
|
}
|
|
843
|
-
const updated = JSON.parse(readFileSync(
|
|
1041
|
+
const updated = JSON.parse(readFileSync(path8, "utf8"));
|
|
844
1042
|
if (!updated.hasOwnProperty("loading")) {
|
|
845
1043
|
logger.info("Docs found!");
|
|
846
1044
|
clearInterval(i);
|
|
847
|
-
resolve(
|
|
1045
|
+
resolve(path8);
|
|
848
1046
|
}
|
|
849
1047
|
count++;
|
|
850
1048
|
}, retryDuration);
|
|
851
1049
|
});
|
|
852
1050
|
} else {
|
|
853
|
-
logger.info(`Docs already written to cache at ${
|
|
854
|
-
finish(logger,
|
|
855
|
-
return
|
|
1051
|
+
logger.info(`Docs already written to cache at ${path8}`);
|
|
1052
|
+
finish(logger, path8);
|
|
1053
|
+
return path8;
|
|
856
1054
|
}
|
|
857
1055
|
} catch (e) {
|
|
858
1056
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
@@ -869,28 +1067,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
|
|
|
869
1067
|
process.exit(9);
|
|
870
1068
|
}
|
|
871
1069
|
logger.success(`Generating docs for ${specifier}`);
|
|
872
|
-
const
|
|
873
|
-
ensurePath(
|
|
1070
|
+
const path8 = `${repoDir}/docs/${specifier}.json`;
|
|
1071
|
+
ensurePath(path8);
|
|
874
1072
|
const handleError = () => {
|
|
875
1073
|
logger.info("Removing placeholder");
|
|
876
|
-
rmSync(
|
|
1074
|
+
rmSync(path8);
|
|
877
1075
|
};
|
|
878
1076
|
try {
|
|
879
|
-
const existing = readFileSync(
|
|
1077
|
+
const existing = readFileSync(path8, "utf8");
|
|
880
1078
|
const json = JSON.parse(existing);
|
|
881
1079
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
882
1080
|
logger.info(`Expired placeholder found. Removing.`);
|
|
883
|
-
rmSync(
|
|
1081
|
+
rmSync(path8);
|
|
884
1082
|
throw new Error("TIMEOUT");
|
|
885
1083
|
}
|
|
886
|
-
return waitForDocs(json,
|
|
1084
|
+
return waitForDocs(json, path8, logger, retryDuration);
|
|
887
1085
|
} catch (e) {
|
|
888
1086
|
if (e.message !== "TIMEOUT") {
|
|
889
|
-
logger.info(`Docs JSON not found at ${
|
|
1087
|
+
logger.info(`Docs JSON not found at ${path8}`);
|
|
890
1088
|
}
|
|
891
1089
|
logger.debug("Generating placeholder");
|
|
892
|
-
generatePlaceholder(
|
|
893
|
-
return generateDocs(specifier,
|
|
1090
|
+
generatePlaceholder(path8);
|
|
1091
|
+
return generateDocs(specifier, path8, docgen, logger).catch((e2) => {
|
|
894
1092
|
logger.error("Error generating documentation");
|
|
895
1093
|
logger.error(e2);
|
|
896
1094
|
handleError();
|
|
@@ -937,7 +1135,7 @@ var docsHandler = async (options, logger) => {
|
|
|
937
1135
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
938
1136
|
}
|
|
939
1137
|
logger.info("Generating/loading documentation...");
|
|
940
|
-
const
|
|
1138
|
+
const path8 = await handler_default5(
|
|
941
1139
|
{
|
|
942
1140
|
specifier: `${name}@${version}`,
|
|
943
1141
|
repoDir
|
|
@@ -946,8 +1144,8 @@ var docsHandler = async (options, logger) => {
|
|
|
946
1144
|
createNullLogger()
|
|
947
1145
|
);
|
|
948
1146
|
let didError = false;
|
|
949
|
-
if (
|
|
950
|
-
const source = await readFile2(
|
|
1147
|
+
if (path8) {
|
|
1148
|
+
const source = await readFile2(path8, "utf8");
|
|
951
1149
|
const data = JSON.parse(source);
|
|
952
1150
|
let desc;
|
|
953
1151
|
if (operation) {
|
|
@@ -985,7 +1183,7 @@ var handler_default6 = docsHandler;
|
|
|
985
1183
|
// src/metadata/cache.ts
|
|
986
1184
|
import { createHash } from "node:crypto";
|
|
987
1185
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
988
|
-
import
|
|
1186
|
+
import path5 from "node:path";
|
|
989
1187
|
import { writeFile as writeFile4, mkdir } from "node:fs/promises";
|
|
990
1188
|
var getPath = (repoDir, key) => `${repoDir}/meta/${key}.json`;
|
|
991
1189
|
var sortKeys = (obj) => {
|
|
@@ -1016,7 +1214,7 @@ var get = (repoPath, key) => {
|
|
|
1016
1214
|
};
|
|
1017
1215
|
var set = async (repoPath, key, data) => {
|
|
1018
1216
|
const fullPath = getPath(repoPath, key);
|
|
1019
|
-
await mkdir(
|
|
1217
|
+
await mkdir(path5.dirname(fullPath), { recursive: true });
|
|
1020
1218
|
await writeFile4(fullPath, JSON.stringify(data));
|
|
1021
1219
|
};
|
|
1022
1220
|
var cache_default = { get, set, generateKey, getPath, sortKeys };
|
|
@@ -1057,7 +1255,7 @@ var getAdaptorPath = async (adaptor, logger, repoDir) => {
|
|
|
1057
1255
|
var metadataHandler = async (options, logger) => {
|
|
1058
1256
|
const { repoDir, adaptors } = options;
|
|
1059
1257
|
const adaptor = adaptors[0];
|
|
1060
|
-
const state = await load_state_default(options, logger);
|
|
1258
|
+
const state = await load_state_default({}, options, logger);
|
|
1061
1259
|
logger.success(`Generating metadata`);
|
|
1062
1260
|
logger.info("config:", state);
|
|
1063
1261
|
const config = state.configuration;
|
|
@@ -1100,8 +1298,8 @@ var metadataHandler = async (options, logger) => {
|
|
|
1100
1298
|
var handler_default7 = metadataHandler;
|
|
1101
1299
|
|
|
1102
1300
|
// src/pull/handler.ts
|
|
1103
|
-
import
|
|
1104
|
-
import
|
|
1301
|
+
import path6 from "path";
|
|
1302
|
+
import fs4 from "node:fs/promises";
|
|
1105
1303
|
import {
|
|
1106
1304
|
getConfig as getConfig2,
|
|
1107
1305
|
getProject,
|
|
@@ -1123,8 +1321,8 @@ async function pullHandler(options, logger) {
|
|
|
1123
1321
|
process.exit(1);
|
|
1124
1322
|
}
|
|
1125
1323
|
const state = getStateFromProjectPayload(project);
|
|
1126
|
-
await
|
|
1127
|
-
|
|
1324
|
+
await fs4.writeFile(
|
|
1325
|
+
path6.resolve(config.statePath),
|
|
1128
1326
|
JSON.stringify(state, null, 2)
|
|
1129
1327
|
);
|
|
1130
1328
|
logger.always(
|
|
@@ -1149,9 +1347,9 @@ async function pullHandler(options, logger) {
|
|
|
1149
1347
|
process.exitCode = 1;
|
|
1150
1348
|
process.exit(1);
|
|
1151
1349
|
}
|
|
1152
|
-
const resolvedPath =
|
|
1350
|
+
const resolvedPath = path6.resolve(config.specPath);
|
|
1153
1351
|
logger.debug("reading spec from", resolvedPath);
|
|
1154
|
-
await
|
|
1352
|
+
await fs4.writeFile(resolvedPath, res.body);
|
|
1155
1353
|
const spec = await getSpec(resolvedPath);
|
|
1156
1354
|
if (spec.errors.length > 0) {
|
|
1157
1355
|
logger.error("ERROR: invalid spec");
|
|
@@ -1182,7 +1380,7 @@ var handler_default8 = pullHandler;
|
|
|
1182
1380
|
|
|
1183
1381
|
// src/util/print-versions.ts
|
|
1184
1382
|
import { readFileSync as readFileSync3 } from "node:fs";
|
|
1185
|
-
import
|
|
1383
|
+
import path7 from "node:path";
|
|
1186
1384
|
import url from "node:url";
|
|
1187
1385
|
import { getNameAndVersion as getNameAndVersion5 } from "@openfn/runtime";
|
|
1188
1386
|
import { mainSymbols } from "figures";
|
|
@@ -1194,7 +1392,7 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
1194
1392
|
var loadVersionFromPath = (adaptorPath) => {
|
|
1195
1393
|
try {
|
|
1196
1394
|
const pkg = JSON.parse(
|
|
1197
|
-
readFileSync3(
|
|
1395
|
+
readFileSync3(path7.resolve(adaptorPath, "package.json"), "utf8")
|
|
1198
1396
|
);
|
|
1199
1397
|
return pkg.version;
|
|
1200
1398
|
} catch (e) {
|
|
@@ -1227,7 +1425,7 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
|
|
|
1227
1425
|
...[NODE, CLI2, RUNTIME2, COMPILER2, adaptorName].map((s) => s.length)
|
|
1228
1426
|
);
|
|
1229
1427
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
1230
|
-
const dirname =
|
|
1428
|
+
const dirname = path7.dirname(url.fileURLToPath(import.meta.url));
|
|
1231
1429
|
const pkg = JSON.parse(readFileSync3(`${dirname}/../../package.json`, "utf8"));
|
|
1232
1430
|
const { version, dependencies } = pkg;
|
|
1233
1431
|
const compilerVersion = dependencies["@openfn/compiler"];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/cli",
|
|
3
|
-
"version": "1.1
|
|
3
|
+
"version": "1.2.1",
|
|
4
4
|
"description": "CLI devtools for the openfn toolchain.",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=18",
|
|
@@ -45,11 +45,11 @@
|
|
|
45
45
|
"rimraf": "^3.0.2",
|
|
46
46
|
"treeify": "^1.1.0",
|
|
47
47
|
"yargs": "^17.7.2",
|
|
48
|
-
"@openfn/compiler": "0.1.1",
|
|
49
48
|
"@openfn/deploy": "0.4.4",
|
|
50
|
-
"@openfn/
|
|
51
|
-
"@openfn/
|
|
52
|
-
"@openfn/logger": "1.0.1"
|
|
49
|
+
"@openfn/compiler": "0.1.2",
|
|
50
|
+
"@openfn/describe-package": "0.0.19",
|
|
51
|
+
"@openfn/logger": "1.0.1",
|
|
52
|
+
"@openfn/runtime": "1.1.2"
|
|
53
53
|
},
|
|
54
54
|
"files": [
|
|
55
55
|
"dist",
|