@openfn/cli 1.18.6 → 1.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +135 -51
- package/dist/process/runner.js +1061 -558
- package/package.json +10 -8
package/dist/process/runner.js
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __export = (target, all) => {
|
|
3
|
+
for (var name in all)
|
|
4
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
5
|
+
};
|
|
6
|
+
|
|
1
7
|
// src/apollo/handler.ts
|
|
2
8
|
import { WebSocket } from "ws";
|
|
3
9
|
import { readFile, writeFile, mkdir } from "node:fs/promises";
|
|
@@ -15,13 +21,13 @@ var urlMap = {
|
|
|
15
21
|
["local"]: LOCAL_URL
|
|
16
22
|
};
|
|
17
23
|
var DEFAULT_ENV = "staging";
|
|
18
|
-
var getURL = (
|
|
19
|
-
if (
|
|
20
|
-
if (
|
|
21
|
-
return urlMap[
|
|
24
|
+
var getURL = (options6) => {
|
|
25
|
+
if (options6.apolloUrl) {
|
|
26
|
+
if (options6.apolloUrl in urlMap) {
|
|
27
|
+
return urlMap[options6.apolloUrl];
|
|
22
28
|
}
|
|
23
|
-
if (
|
|
24
|
-
return
|
|
29
|
+
if (options6.apolloUrl.startsWith("http")) {
|
|
30
|
+
return options6.apolloUrl;
|
|
25
31
|
}
|
|
26
32
|
throw new Error(`Unrecognised apollo URL`);
|
|
27
33
|
}
|
|
@@ -46,14 +52,14 @@ var outputFiles = (files, logger) => {
|
|
|
46
52
|
};
|
|
47
53
|
|
|
48
54
|
// src/apollo/handler.ts
|
|
49
|
-
var apolloHandler = async (
|
|
50
|
-
logger.always(`Calling Apollo service: ${
|
|
51
|
-
const json = await loadPayload(logger,
|
|
52
|
-
const url2 = getURL(
|
|
55
|
+
var apolloHandler = async (options6, logger) => {
|
|
56
|
+
logger.always(`Calling Apollo service: ${options6.service}`);
|
|
57
|
+
const json = await loadPayload(logger, options6.payload);
|
|
58
|
+
const url2 = getURL(options6);
|
|
53
59
|
logger.success(`Using apollo server at`, url2);
|
|
54
|
-
const result = await callApollo(url2,
|
|
60
|
+
const result = await callApollo(url2, options6.service, json, logger);
|
|
55
61
|
if (result) {
|
|
56
|
-
await serializeOutput(
|
|
62
|
+
await serializeOutput(options6, result, logger);
|
|
57
63
|
} else {
|
|
58
64
|
logger.warn("No output returned from Apollo");
|
|
59
65
|
}
|
|
@@ -73,15 +79,15 @@ var write = async (basePath, filePath, content, logger) => {
|
|
|
73
79
|
await writeFile(dest, content);
|
|
74
80
|
logger.success(`Wrote content to ${dest}`);
|
|
75
81
|
};
|
|
76
|
-
var serializeOutput = async (
|
|
77
|
-
if (
|
|
78
|
-
if (result.files && !
|
|
82
|
+
var serializeOutput = async (options6, result, logger) => {
|
|
83
|
+
if (options6.outputPath) {
|
|
84
|
+
if (result.files && !options6.outputPath.endsWith(".json")) {
|
|
79
85
|
for (const p in result.files) {
|
|
80
|
-
await write(
|
|
86
|
+
await write(options6.outputPath, p, result.files[p], logger);
|
|
81
87
|
}
|
|
82
88
|
} else {
|
|
83
89
|
await write(
|
|
84
|
-
|
|
90
|
+
options6.outputPath,
|
|
85
91
|
"",
|
|
86
92
|
JSON.stringify(result, null, 2),
|
|
87
93
|
logger
|
|
@@ -127,14 +133,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
127
133
|
});
|
|
128
134
|
});
|
|
129
135
|
};
|
|
130
|
-
var loadPayload = async (logger,
|
|
131
|
-
if (!
|
|
136
|
+
var loadPayload = async (logger, path15) => {
|
|
137
|
+
if (!path15) {
|
|
132
138
|
logger.warn("No JSON payload provided");
|
|
133
139
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
134
140
|
return {};
|
|
135
141
|
}
|
|
136
|
-
if (
|
|
137
|
-
const str = await readFile(
|
|
142
|
+
if (path15.endsWith(".json")) {
|
|
143
|
+
const str = await readFile(path15, "utf8");
|
|
138
144
|
const json = JSON.parse(str);
|
|
139
145
|
logger.debug("Loaded JSON payload");
|
|
140
146
|
return json;
|
|
@@ -158,17 +164,17 @@ var namespaces = {
|
|
|
158
164
|
[COMPILER]: "CMP",
|
|
159
165
|
[JOB]: "JOB"
|
|
160
166
|
};
|
|
161
|
-
var createLogger2 = (name = "",
|
|
162
|
-
const logOptions =
|
|
167
|
+
var createLogger2 = (name = "", options6) => {
|
|
168
|
+
const logOptions = options6.log || {};
|
|
163
169
|
let json = false;
|
|
164
170
|
let level = logOptions[name] || logOptions.default || "default";
|
|
165
|
-
if (
|
|
171
|
+
if (options6.logJson) {
|
|
166
172
|
json = true;
|
|
167
173
|
}
|
|
168
174
|
return actualCreateLogger(namespaces[name] || name, {
|
|
169
175
|
level,
|
|
170
176
|
json,
|
|
171
|
-
sanitize:
|
|
177
|
+
sanitize: options6.sanitize || "none",
|
|
172
178
|
...logOptions
|
|
173
179
|
});
|
|
174
180
|
};
|
|
@@ -179,8 +185,8 @@ var createNullLogger = () => createLogger2(void 0, { log: { default: "none" } })
|
|
|
179
185
|
import fs from "node:fs";
|
|
180
186
|
import path2 from "node:path";
|
|
181
187
|
import { rmdir } from "node:fs/promises";
|
|
182
|
-
var getCachePath = async (plan,
|
|
183
|
-
const { baseDir } =
|
|
188
|
+
var getCachePath = async (plan, options6, stepId) => {
|
|
189
|
+
const { baseDir } = options6;
|
|
184
190
|
const { name } = plan.workflow;
|
|
185
191
|
const basePath = `${baseDir}/.cli-cache/${name}`;
|
|
186
192
|
if (stepId) {
|
|
@@ -188,10 +194,10 @@ var getCachePath = async (plan, options, stepId) => {
|
|
|
188
194
|
}
|
|
189
195
|
return path2.resolve(basePath);
|
|
190
196
|
};
|
|
191
|
-
var ensureGitIgnore = (
|
|
192
|
-
if (!
|
|
197
|
+
var ensureGitIgnore = (options6) => {
|
|
198
|
+
if (!options6._hasGitIgnore) {
|
|
193
199
|
const ignorePath = path2.resolve(
|
|
194
|
-
|
|
200
|
+
options6.baseDir,
|
|
195
201
|
".cli-cache",
|
|
196
202
|
".gitignore"
|
|
197
203
|
);
|
|
@@ -201,19 +207,19 @@ var ensureGitIgnore = (options) => {
|
|
|
201
207
|
fs.writeFileSync(ignorePath, "*");
|
|
202
208
|
}
|
|
203
209
|
}
|
|
204
|
-
|
|
210
|
+
options6._hasGitIgnore = true;
|
|
205
211
|
};
|
|
206
|
-
var saveToCache = async (plan, stepId, output,
|
|
207
|
-
if (
|
|
208
|
-
const cachePath = await getCachePath(plan,
|
|
212
|
+
var saveToCache = async (plan, stepId, output, options6, logger) => {
|
|
213
|
+
if (options6.cacheSteps) {
|
|
214
|
+
const cachePath = await getCachePath(plan, options6, stepId);
|
|
209
215
|
fs.mkdirSync(path2.dirname(cachePath), { recursive: true });
|
|
210
|
-
ensureGitIgnore(
|
|
216
|
+
ensureGitIgnore(options6);
|
|
211
217
|
logger.info(`Writing ${stepId} output to ${cachePath}`);
|
|
212
218
|
fs.writeFileSync(cachePath, JSON.stringify(output));
|
|
213
219
|
}
|
|
214
220
|
};
|
|
215
|
-
var clearCache = async (plan,
|
|
216
|
-
const cacheDir = await getCachePath(plan,
|
|
221
|
+
var clearCache = async (plan, options6, logger) => {
|
|
222
|
+
const cacheDir = await getCachePath(plan, options6);
|
|
217
223
|
try {
|
|
218
224
|
await rmdir(cacheDir, { recursive: true });
|
|
219
225
|
logger.info(`Cleared cache at ${cacheDir}`);
|
|
@@ -256,13 +262,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
256
262
|
};
|
|
257
263
|
function parseAdaptors(plan) {
|
|
258
264
|
const extractInfo = (specifier) => {
|
|
259
|
-
const [module,
|
|
265
|
+
const [module, path15] = specifier.split("=");
|
|
260
266
|
const { name, version } = getNameAndVersion(module);
|
|
261
267
|
const info = {
|
|
262
268
|
name
|
|
263
269
|
};
|
|
264
|
-
if (
|
|
265
|
-
info.path =
|
|
270
|
+
if (path15) {
|
|
271
|
+
info.path = path15;
|
|
266
272
|
}
|
|
267
273
|
if (version) {
|
|
268
274
|
info.version = version;
|
|
@@ -283,7 +289,7 @@ function parseAdaptors(plan) {
|
|
|
283
289
|
// src/execute/serialize-output.ts
|
|
284
290
|
import { mkdir as mkdir2, writeFile as writeFile2 } from "node:fs/promises";
|
|
285
291
|
import { dirname } from "node:path";
|
|
286
|
-
var serializeOutput2 = async (
|
|
292
|
+
var serializeOutput2 = async (options6, result, logger) => {
|
|
287
293
|
let output = result;
|
|
288
294
|
if (output && (output.configuration || output.data)) {
|
|
289
295
|
const { configuration, ...rest } = result;
|
|
@@ -294,14 +300,14 @@ var serializeOutput2 = async (options, result, logger) => {
|
|
|
294
300
|
} else {
|
|
295
301
|
output = JSON.stringify(output, void 0, 2);
|
|
296
302
|
}
|
|
297
|
-
if (
|
|
303
|
+
if (options6.outputStdout) {
|
|
298
304
|
logger.success(`Result: `);
|
|
299
305
|
logger.always(output);
|
|
300
|
-
} else if (
|
|
301
|
-
await mkdir2(dirname(
|
|
302
|
-
logger.debug(`Writing output to ${
|
|
303
|
-
await writeFile2(
|
|
304
|
-
logger.success(`State written to ${
|
|
306
|
+
} else if (options6.outputPath) {
|
|
307
|
+
await mkdir2(dirname(options6.outputPath), { recursive: true });
|
|
308
|
+
logger.debug(`Writing output to ${options6.outputPath}`);
|
|
309
|
+
await writeFile2(options6.outputPath, output);
|
|
310
|
+
logger.success(`State written to ${options6.outputPath}`);
|
|
305
311
|
}
|
|
306
312
|
return output;
|
|
307
313
|
};
|
|
@@ -328,16 +334,16 @@ import {
|
|
|
328
334
|
loadRepoPkg,
|
|
329
335
|
getNameAndVersion as getNameAndVersion2
|
|
330
336
|
} from "@openfn/runtime";
|
|
331
|
-
var install = async (opts,
|
|
337
|
+
var install = async (opts, log2 = defaultLogger) => {
|
|
332
338
|
let { packages, adaptors, repoDir } = opts;
|
|
333
339
|
const targets = [].concat(packages ?? [], adaptors ?? []);
|
|
334
340
|
if (targets) {
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
const result = await rtInstall(targets, repoDir,
|
|
339
|
-
const duration =
|
|
340
|
-
|
|
341
|
+
log2.timer("install");
|
|
342
|
+
log2.success("Installing packages...");
|
|
343
|
+
log2.debug("repoDir is set to:", repoDir);
|
|
344
|
+
const result = await rtInstall(targets, repoDir, log2);
|
|
345
|
+
const duration = log2.timer("install");
|
|
346
|
+
log2.success(`Installation complete in ${duration}`);
|
|
341
347
|
return result;
|
|
342
348
|
}
|
|
343
349
|
return [];
|
|
@@ -365,16 +371,16 @@ var removePackage = async (packageSpecifier, repoDir, logger) => {
|
|
|
365
371
|
logger.warn(`Failed to remove ${aliasedName}: ${error.message}`);
|
|
366
372
|
}
|
|
367
373
|
};
|
|
368
|
-
var clean = async (
|
|
369
|
-
if (
|
|
374
|
+
var clean = async (options6, logger) => {
|
|
375
|
+
if (options6.repoDir) {
|
|
370
376
|
const doIt = await logger.confirm(
|
|
371
|
-
`This will remove everything at ${
|
|
372
|
-
|
|
377
|
+
`This will remove everything at ${options6.repoDir}. Do you wish to proceed?`,
|
|
378
|
+
options6.force
|
|
373
379
|
);
|
|
374
380
|
if (doIt) {
|
|
375
381
|
return new Promise((resolve) => {
|
|
376
|
-
logger.info(`Cleaning repo at ${
|
|
377
|
-
exec(`npm exec rimraf ${
|
|
382
|
+
logger.info(`Cleaning repo at ${options6.repoDir} `);
|
|
383
|
+
exec(`npm exec rimraf ${options6.repoDir}`, () => {
|
|
378
384
|
logger.success("Repo cleaned");
|
|
379
385
|
resolve();
|
|
380
386
|
});
|
|
@@ -385,12 +391,12 @@ var clean = async (options, logger) => {
|
|
|
385
391
|
logger.error("No repoDir path detected");
|
|
386
392
|
}
|
|
387
393
|
};
|
|
388
|
-
var pwd = async (
|
|
394
|
+
var pwd = async (options6, logger) => {
|
|
389
395
|
logger.info(`OPENFN_REPO_DIR is set to ${process.env.OPENFN_REPO_DIR}`);
|
|
390
|
-
logger.success(`Repo working directory is: ${
|
|
396
|
+
logger.success(`Repo working directory is: ${options6.repoDir}`);
|
|
391
397
|
};
|
|
392
|
-
var getDependencyList = async (
|
|
393
|
-
const pkg = await loadRepoPkg(
|
|
398
|
+
var getDependencyList = async (options6, _logger) => {
|
|
399
|
+
const pkg = await loadRepoPkg(options6.repoDir);
|
|
394
400
|
const result = {};
|
|
395
401
|
if (pkg) {
|
|
396
402
|
Object.keys(pkg.dependencies).forEach((key) => {
|
|
@@ -403,9 +409,9 @@ var getDependencyList = async (options, _logger) => {
|
|
|
403
409
|
}
|
|
404
410
|
return result;
|
|
405
411
|
};
|
|
406
|
-
var list = async (
|
|
407
|
-
const tree = await getDependencyList(
|
|
408
|
-
await pwd(
|
|
412
|
+
var list = async (options6, logger) => {
|
|
413
|
+
const tree = await getDependencyList(options6, logger);
|
|
414
|
+
await pwd(options6, logger);
|
|
409
415
|
const output = {};
|
|
410
416
|
Object.keys(tree).forEach((key) => {
|
|
411
417
|
const versions = tree[key];
|
|
@@ -463,27 +469,27 @@ var throwAbortableError = (message, help) => {
|
|
|
463
469
|
};
|
|
464
470
|
|
|
465
471
|
// src/compile/compile.ts
|
|
466
|
-
async function compile_default(planOrPath, opts,
|
|
472
|
+
async function compile_default(planOrPath, opts, log2) {
|
|
467
473
|
if (typeof planOrPath === "string") {
|
|
468
|
-
const result = await compileJob(planOrPath, opts,
|
|
469
|
-
|
|
474
|
+
const result = await compileJob(planOrPath, opts, log2);
|
|
475
|
+
log2.success(`Compiled expression from ${opts.expressionPath}`);
|
|
470
476
|
return result;
|
|
471
477
|
}
|
|
472
478
|
const compiledPlan = await compileWorkflow(
|
|
473
479
|
planOrPath,
|
|
474
480
|
opts,
|
|
475
|
-
|
|
481
|
+
log2
|
|
476
482
|
);
|
|
477
|
-
|
|
483
|
+
log2.success("Compiled all expressions in workflow");
|
|
478
484
|
return compiledPlan;
|
|
479
485
|
}
|
|
480
|
-
var compileJob = async (job, opts,
|
|
486
|
+
var compileJob = async (job, opts, log2, jobName) => {
|
|
481
487
|
try {
|
|
482
|
-
const compilerOptions = await loadTransformOptions(opts,
|
|
488
|
+
const compilerOptions = await loadTransformOptions(opts, log2);
|
|
483
489
|
return compile(job, compilerOptions);
|
|
484
490
|
} catch (e) {
|
|
485
491
|
abort_default(
|
|
486
|
-
|
|
492
|
+
log2,
|
|
487
493
|
`Failed to compile job ${jobName ?? ""}`.trim(),
|
|
488
494
|
e,
|
|
489
495
|
"Check the syntax of the job expression:\n\n" + job
|
|
@@ -491,7 +497,7 @@ var compileJob = async (job, opts, log, jobName) => {
|
|
|
491
497
|
return { code: job };
|
|
492
498
|
}
|
|
493
499
|
};
|
|
494
|
-
var compileWorkflow = async (plan, opts,
|
|
500
|
+
var compileWorkflow = async (plan, opts, log2) => {
|
|
495
501
|
let globalsIgnoreList = getExports(plan.workflow.globals);
|
|
496
502
|
for (const step of plan.workflow.steps) {
|
|
497
503
|
const job = step;
|
|
@@ -505,7 +511,7 @@ var compileWorkflow = async (plan, opts, log) => {
|
|
|
505
511
|
const { code, map } = await compileJob(
|
|
506
512
|
job.expression,
|
|
507
513
|
jobOpts,
|
|
508
|
-
|
|
514
|
+
log2,
|
|
509
515
|
job.id
|
|
510
516
|
);
|
|
511
517
|
job.expression = code;
|
|
@@ -521,21 +527,21 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
521
527
|
}
|
|
522
528
|
return specifier;
|
|
523
529
|
};
|
|
524
|
-
var resolveSpecifierPath = async (pattern, repoDir,
|
|
525
|
-
const [specifier,
|
|
526
|
-
if (
|
|
527
|
-
|
|
528
|
-
return
|
|
530
|
+
var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
531
|
+
const [specifier, path15] = pattern.split("=");
|
|
532
|
+
if (path15) {
|
|
533
|
+
log2.debug(`Resolved ${specifier} to path: ${path15}`);
|
|
534
|
+
return path15;
|
|
529
535
|
}
|
|
530
|
-
const repoPath = await getModulePath(specifier, repoDir,
|
|
536
|
+
const repoPath = await getModulePath(specifier, repoDir, log2);
|
|
531
537
|
if (repoPath) {
|
|
532
538
|
return repoPath;
|
|
533
539
|
}
|
|
534
540
|
return null;
|
|
535
541
|
};
|
|
536
|
-
var loadTransformOptions = async (opts,
|
|
537
|
-
const
|
|
538
|
-
logger:
|
|
542
|
+
var loadTransformOptions = async (opts, log2) => {
|
|
543
|
+
const options6 = {
|
|
544
|
+
logger: log2 || logger_default(COMPILER, opts),
|
|
539
545
|
trace: opts.trace
|
|
540
546
|
};
|
|
541
547
|
if (opts.adaptors?.length && opts.ignoreImports != true) {
|
|
@@ -543,18 +549,18 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
543
549
|
for (const adaptorInput of opts.adaptors) {
|
|
544
550
|
let exports;
|
|
545
551
|
const [specifier] = adaptorInput.split("=");
|
|
546
|
-
|
|
547
|
-
const
|
|
548
|
-
if (
|
|
552
|
+
log2.debug(`Trying to preload types for ${specifier}`);
|
|
553
|
+
const path15 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log2);
|
|
554
|
+
if (path15) {
|
|
549
555
|
try {
|
|
550
|
-
exports = await preloadAdaptorExports(
|
|
556
|
+
exports = await preloadAdaptorExports(path15, log2);
|
|
551
557
|
} catch (e) {
|
|
552
|
-
|
|
553
|
-
|
|
558
|
+
log2.error(`Failed to load adaptor typedefs from path ${path15}`);
|
|
559
|
+
log2.error(e);
|
|
554
560
|
}
|
|
555
561
|
}
|
|
556
562
|
if (!exports || exports.length === 0) {
|
|
557
|
-
|
|
563
|
+
log2.debug(`No module exports found for ${adaptorInput}`);
|
|
558
564
|
}
|
|
559
565
|
adaptorsConfig.push({
|
|
560
566
|
name: stripVersionSpecifier(specifier),
|
|
@@ -562,12 +568,12 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
562
568
|
exportAll: true
|
|
563
569
|
});
|
|
564
570
|
}
|
|
565
|
-
|
|
571
|
+
options6["add-imports"] = {
|
|
566
572
|
ignore: opts.ignoreImports,
|
|
567
573
|
adaptors: adaptorsConfig
|
|
568
574
|
};
|
|
569
575
|
}
|
|
570
|
-
return
|
|
576
|
+
return options6;
|
|
571
577
|
};
|
|
572
578
|
|
|
573
579
|
// src/util/load-state.ts
|
|
@@ -585,72 +591,72 @@ var getUpstreamStepId = (plan, stepId) => {
|
|
|
585
591
|
return typeof upstreamStep === "string" ? upstreamStep : upstreamStep.id;
|
|
586
592
|
}
|
|
587
593
|
};
|
|
588
|
-
var load_state_default = async (plan, opts,
|
|
589
|
-
const { stateStdin, statePath } = opts;
|
|
590
|
-
|
|
594
|
+
var load_state_default = async (plan, opts, log2, start) => {
|
|
595
|
+
const { stateStdin, statePath: statePath2 } = opts;
|
|
596
|
+
log2.debug("Loading state...");
|
|
591
597
|
if (stateStdin) {
|
|
592
598
|
try {
|
|
593
599
|
const json = JSON.parse(stateStdin);
|
|
594
|
-
|
|
595
|
-
|
|
600
|
+
log2.success("Read state from stdin");
|
|
601
|
+
log2.debug("state:", json);
|
|
596
602
|
return json;
|
|
597
603
|
} catch (e) {
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
604
|
+
log2.error("Failed to load state from stdin");
|
|
605
|
+
log2.error(stateStdin);
|
|
606
|
+
log2.error(e);
|
|
601
607
|
process.exit(1);
|
|
602
608
|
}
|
|
603
609
|
}
|
|
604
|
-
if (
|
|
610
|
+
if (statePath2) {
|
|
605
611
|
try {
|
|
606
|
-
const str = await fs2.readFile(
|
|
612
|
+
const str = await fs2.readFile(statePath2, "utf8");
|
|
607
613
|
const json = JSON.parse(str);
|
|
608
|
-
|
|
609
|
-
|
|
614
|
+
log2.success(`Loaded state from ${statePath2}`);
|
|
615
|
+
log2.debug("state:", json);
|
|
610
616
|
return json;
|
|
611
617
|
} catch (e) {
|
|
612
|
-
|
|
613
|
-
|
|
618
|
+
log2.warn(`Error loading state from ${statePath2}`);
|
|
619
|
+
log2.warn(e);
|
|
614
620
|
}
|
|
615
621
|
}
|
|
616
622
|
if (start) {
|
|
617
|
-
|
|
623
|
+
log2.info(
|
|
618
624
|
"No state provided to CLI. Will attempt to load state from cache instead"
|
|
619
625
|
);
|
|
620
|
-
|
|
626
|
+
log2.always(
|
|
621
627
|
`Attempting to load cached input state for starting step "${start}"`
|
|
622
628
|
);
|
|
623
629
|
try {
|
|
624
630
|
const upstreamStepId = getUpstreamStepId(plan, start);
|
|
625
631
|
if (upstreamStepId) {
|
|
626
|
-
|
|
632
|
+
log2.debug(`Input step for "${start}" is "${upstreamStepId}"`);
|
|
627
633
|
const cachedStatePath = await getCachePath(plan, opts, upstreamStepId);
|
|
628
|
-
|
|
634
|
+
log2.debug("Loading cached state from", cachedStatePath);
|
|
629
635
|
try {
|
|
630
636
|
await fs2.access(cachedStatePath);
|
|
631
637
|
const str = await fs2.readFile(cachedStatePath, "utf8");
|
|
632
638
|
const json = JSON.parse(str);
|
|
633
|
-
|
|
639
|
+
log2.success(
|
|
634
640
|
`Loaded cached state for step "${start}" from ${cachedStatePath}`
|
|
635
641
|
);
|
|
636
|
-
|
|
642
|
+
log2.info(` To force disable the cache, run again with --no-cache`);
|
|
637
643
|
return json;
|
|
638
644
|
} catch (e) {
|
|
639
|
-
|
|
640
|
-
|
|
645
|
+
log2.warn(`No cached state found for step "${start}"`);
|
|
646
|
+
log2.warn(
|
|
641
647
|
"Re-run this workflow with --cache to save the output of each step"
|
|
642
648
|
);
|
|
643
|
-
|
|
649
|
+
log2.break();
|
|
644
650
|
}
|
|
645
651
|
} else {
|
|
646
|
-
|
|
652
|
+
log2.warn(`Could not find an input step for step "${start}"`);
|
|
647
653
|
}
|
|
648
654
|
} catch (e) {
|
|
649
|
-
|
|
650
|
-
|
|
655
|
+
log2.warn("Error loading cached state");
|
|
656
|
+
log2.warn(e);
|
|
651
657
|
}
|
|
652
658
|
}
|
|
653
|
-
|
|
659
|
+
log2.info(
|
|
654
660
|
"No state provided - using default state { data: {}, configuration: {} }"
|
|
655
661
|
);
|
|
656
662
|
return {
|
|
@@ -660,12 +666,12 @@ var load_state_default = async (plan, opts, log, start) => {
|
|
|
660
666
|
};
|
|
661
667
|
|
|
662
668
|
// src/util/validate-adaptors.ts
|
|
663
|
-
var validateAdaptors = async (
|
|
664
|
-
if (
|
|
669
|
+
var validateAdaptors = async (options6, logger) => {
|
|
670
|
+
if (options6.skipAdaptorValidation) {
|
|
665
671
|
return;
|
|
666
672
|
}
|
|
667
|
-
const isPlan =
|
|
668
|
-
const hasDeclaredAdaptors =
|
|
673
|
+
const isPlan = options6.planPath || options6.workflowPath || options6.workflow;
|
|
674
|
+
const hasDeclaredAdaptors = options6.adaptors && options6.adaptors.length > 0;
|
|
669
675
|
if (isPlan && hasDeclaredAdaptors) {
|
|
670
676
|
logger.error("ERROR: adaptor and workflow provided");
|
|
671
677
|
logger.error(
|
|
@@ -721,43 +727,43 @@ import { readFile as readFile2 } from "node:fs/promises";
|
|
|
721
727
|
import path3 from "node:path";
|
|
722
728
|
import assert from "node:assert";
|
|
723
729
|
import { getNameAndVersion as getNameAndVersion3 } from "@openfn/runtime";
|
|
724
|
-
var validateMonoRepo = async (repoPath,
|
|
730
|
+
var validateMonoRepo = async (repoPath, log2) => {
|
|
725
731
|
try {
|
|
726
732
|
const raw = await readFile2(`${repoPath}/package.json`, "utf8");
|
|
727
733
|
const pkg = JSON.parse(raw);
|
|
728
734
|
assert(pkg.name === "adaptors");
|
|
729
735
|
} catch (e) {
|
|
730
|
-
|
|
736
|
+
log2.error(`ERROR: Adaptors Monorepo not found at ${repoPath}`);
|
|
731
737
|
process.exit(9);
|
|
732
738
|
}
|
|
733
739
|
};
|
|
734
|
-
var updatePath = (adaptor, repoPath,
|
|
740
|
+
var updatePath = (adaptor, repoPath, log2) => {
|
|
735
741
|
if (adaptor.match("=")) {
|
|
736
742
|
return adaptor;
|
|
737
743
|
}
|
|
738
744
|
const { name, version } = getNameAndVersion3(adaptor);
|
|
739
745
|
if (version) {
|
|
740
|
-
|
|
746
|
+
log2.warn(
|
|
741
747
|
`Warning: Ignoring version specifier on ${adaptor} as loading from the adaptors monorepo`
|
|
742
748
|
);
|
|
743
749
|
}
|
|
744
750
|
const shortName = name.replace("@openfn/language-", "");
|
|
745
751
|
const abspath = path3.resolve(repoPath, "packages", shortName);
|
|
746
|
-
|
|
752
|
+
log2.info(`Mapped adaptor ${name} to monorepo: ${abspath}`);
|
|
747
753
|
return `${name}=${abspath}`;
|
|
748
754
|
};
|
|
749
|
-
var mapAdaptorsToMonorepo = (monorepoPath = "", input = [],
|
|
755
|
+
var mapAdaptorsToMonorepo = (monorepoPath = "", input = [], log2) => {
|
|
750
756
|
if (monorepoPath) {
|
|
751
757
|
if (Array.isArray(input)) {
|
|
752
758
|
const adaptors = input;
|
|
753
|
-
return adaptors.map((a) => updatePath(a, monorepoPath,
|
|
759
|
+
return adaptors.map((a) => updatePath(a, monorepoPath, log2));
|
|
754
760
|
}
|
|
755
761
|
const plan = input;
|
|
756
762
|
Object.values(plan.workflow.steps).forEach((step) => {
|
|
757
763
|
const job = step;
|
|
758
764
|
if (job.adaptors) {
|
|
759
765
|
job.adaptors = job.adaptors.map(
|
|
760
|
-
(a) => updatePath(a, monorepoPath,
|
|
766
|
+
(a) => updatePath(a, monorepoPath, log2)
|
|
761
767
|
);
|
|
762
768
|
}
|
|
763
769
|
});
|
|
@@ -768,47 +774,47 @@ var mapAdaptorsToMonorepo = (monorepoPath = "", input = [], log) => {
|
|
|
768
774
|
var map_adaptors_to_monorepo_default = mapAdaptorsToMonorepo;
|
|
769
775
|
|
|
770
776
|
// src/util/load-plan.ts
|
|
771
|
-
var loadPlan = async (
|
|
772
|
-
const { workflowPath, planPath, expressionPath } =
|
|
773
|
-
if (
|
|
774
|
-
const content = await fs3.readFile(path4.resolve(
|
|
775
|
-
const
|
|
776
|
-
|
|
777
|
-
return loadXPlan({ workflow },
|
|
778
|
-
}
|
|
779
|
-
if (
|
|
780
|
-
|
|
781
|
-
return fromProject(
|
|
782
|
-
}
|
|
783
|
-
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(
|
|
784
|
-
const
|
|
785
|
-
return fromProject(path4.resolve("."),
|
|
777
|
+
var loadPlan = async (options6, logger) => {
|
|
778
|
+
const { workflowPath, planPath, expressionPath } = options6;
|
|
779
|
+
if (options6.path && /ya?ml$/.test(options6.path)) {
|
|
780
|
+
const content = await fs3.readFile(path4.resolve(options6.path), "utf-8");
|
|
781
|
+
const workflow2 = yamlToJson(content);
|
|
782
|
+
options6.baseDir = dirname2(options6.path);
|
|
783
|
+
return loadXPlan({ workflow: workflow2 }, options6, logger);
|
|
784
|
+
}
|
|
785
|
+
if (options6.path && options6.workflow) {
|
|
786
|
+
options6.baseDir = options6.path;
|
|
787
|
+
return fromProject(options6.path, options6.workflow, options6, logger);
|
|
788
|
+
}
|
|
789
|
+
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(options6.path || "") && !options6.workflow) {
|
|
790
|
+
const workflow2 = options6.path;
|
|
791
|
+
return fromProject(path4.resolve("."), workflow2, options6, logger);
|
|
786
792
|
}
|
|
787
793
|
if (expressionPath) {
|
|
788
|
-
return loadExpression(
|
|
794
|
+
return loadExpression(options6, logger);
|
|
789
795
|
}
|
|
790
796
|
const jsonPath = planPath || workflowPath;
|
|
791
|
-
if (!
|
|
792
|
-
|
|
797
|
+
if (!options6.baseDir) {
|
|
798
|
+
options6.baseDir = path4.dirname(jsonPath);
|
|
793
799
|
}
|
|
794
800
|
const json = await loadJson(jsonPath, logger);
|
|
795
801
|
const defaultName = path4.parse(jsonPath).name;
|
|
796
802
|
if (json.workflow) {
|
|
797
|
-
return loadXPlan(json,
|
|
803
|
+
return loadXPlan(json, options6, logger, defaultName);
|
|
798
804
|
} else {
|
|
799
|
-
return loadOldWorkflow(json,
|
|
805
|
+
return loadOldWorkflow(json, options6, logger, defaultName);
|
|
800
806
|
}
|
|
801
807
|
};
|
|
802
808
|
var load_plan_default = loadPlan;
|
|
803
|
-
var fromProject = async (rootDir, workflowName,
|
|
809
|
+
var fromProject = async (rootDir, workflowName, options6, logger) => {
|
|
804
810
|
logger.debug("Loading Repo from ", path4.resolve(rootDir));
|
|
805
811
|
const project = await Project.from("fs", { root: rootDir });
|
|
806
812
|
logger.debug("Loading workflow ", workflowName);
|
|
807
|
-
const
|
|
808
|
-
if (!
|
|
813
|
+
const workflow2 = project.getWorkflow(workflowName);
|
|
814
|
+
if (!workflow2) {
|
|
809
815
|
throw new Error(`Workflow "${workflowName}" not found`);
|
|
810
816
|
}
|
|
811
|
-
return loadXPlan({ workflow },
|
|
817
|
+
return loadXPlan({ workflow: workflow2 }, options6, logger);
|
|
812
818
|
};
|
|
813
819
|
var loadJson = async (workflowPath, logger) => {
|
|
814
820
|
let text;
|
|
@@ -843,8 +849,8 @@ var maybeAssign = (a, b, keys) => {
|
|
|
843
849
|
}
|
|
844
850
|
});
|
|
845
851
|
};
|
|
846
|
-
var loadExpression = async (
|
|
847
|
-
const expressionPath =
|
|
852
|
+
var loadExpression = async (options6, logger) => {
|
|
853
|
+
const expressionPath = options6.expressionPath;
|
|
848
854
|
logger.debug(`Loading expression from ${expressionPath}`);
|
|
849
855
|
try {
|
|
850
856
|
const expression = await fs3.readFile(expressionPath, "utf8");
|
|
@@ -852,19 +858,19 @@ var loadExpression = async (options, logger) => {
|
|
|
852
858
|
const step = {
|
|
853
859
|
expression,
|
|
854
860
|
// The adaptor should have been expanded nicely already, so we don't need intervene here
|
|
855
|
-
adaptors:
|
|
861
|
+
adaptors: options6.adaptors ?? []
|
|
856
862
|
};
|
|
857
863
|
const wfOptions = {};
|
|
858
|
-
maybeAssign(
|
|
864
|
+
maybeAssign(options6, wfOptions, ["timeout"]);
|
|
859
865
|
const plan = {
|
|
860
866
|
workflow: {
|
|
861
867
|
name,
|
|
862
868
|
steps: [step],
|
|
863
|
-
globals:
|
|
869
|
+
globals: options6.globals
|
|
864
870
|
},
|
|
865
871
|
options: wfOptions
|
|
866
872
|
};
|
|
867
|
-
return loadXPlan(plan,
|
|
873
|
+
return loadXPlan(plan, options6, logger);
|
|
868
874
|
} catch (e) {
|
|
869
875
|
abort_default(
|
|
870
876
|
logger,
|
|
@@ -875,33 +881,33 @@ var loadExpression = async (options, logger) => {
|
|
|
875
881
|
return {};
|
|
876
882
|
}
|
|
877
883
|
};
|
|
878
|
-
var loadOldWorkflow = async (
|
|
884
|
+
var loadOldWorkflow = async (workflow2, options6, logger, defaultName = "") => {
|
|
879
885
|
const plan = {
|
|
880
886
|
workflow: {
|
|
881
|
-
steps:
|
|
887
|
+
steps: workflow2.jobs
|
|
882
888
|
},
|
|
883
889
|
options: {
|
|
884
|
-
start:
|
|
890
|
+
start: workflow2.start
|
|
885
891
|
}
|
|
886
892
|
};
|
|
887
|
-
if (
|
|
888
|
-
plan.id =
|
|
893
|
+
if (workflow2.id) {
|
|
894
|
+
plan.id = workflow2.id;
|
|
889
895
|
}
|
|
890
|
-
const final = await loadXPlan(plan,
|
|
896
|
+
const final = await loadXPlan(plan, options6, logger, defaultName);
|
|
891
897
|
logger.warn("Converted workflow into new format:");
|
|
892
898
|
logger.warn(final);
|
|
893
899
|
return final;
|
|
894
900
|
};
|
|
895
|
-
var fetchFile = async (fileInfo,
|
|
901
|
+
var fetchFile = async (fileInfo, log2) => {
|
|
896
902
|
const { rootDir = "", filePath, name } = fileInfo;
|
|
897
903
|
try {
|
|
898
904
|
const fullPath = filePath.startsWith("~") ? filePath : path4.resolve(rootDir, filePath);
|
|
899
905
|
const result = await fs3.readFile(fullPath, "utf8");
|
|
900
|
-
|
|
906
|
+
log2.debug("Loaded file", fullPath);
|
|
901
907
|
return result;
|
|
902
908
|
} catch (e) {
|
|
903
909
|
abort_default(
|
|
904
|
-
|
|
910
|
+
log2,
|
|
905
911
|
`File not found for ${name}: ${filePath}`,
|
|
906
912
|
void 0,
|
|
907
913
|
`This workflow references a file which cannot be found at ${filePath}
|
|
@@ -911,20 +917,20 @@ Paths inside the workflow are relative to the workflow.json`
|
|
|
911
917
|
return ".";
|
|
912
918
|
}
|
|
913
919
|
};
|
|
914
|
-
var importGlobals = async (plan, rootDir,
|
|
920
|
+
var importGlobals = async (plan, rootDir, log2) => {
|
|
915
921
|
const fnStr = plan.workflow?.globals;
|
|
916
922
|
if (fnStr) {
|
|
917
923
|
if (isPath(fnStr)) {
|
|
918
924
|
plan.workflow.globals = await fetchFile(
|
|
919
925
|
{ name: "globals", rootDir, filePath: fnStr },
|
|
920
|
-
|
|
926
|
+
log2
|
|
921
927
|
);
|
|
922
928
|
} else {
|
|
923
929
|
plan.workflow.globals = fnStr;
|
|
924
930
|
}
|
|
925
931
|
}
|
|
926
932
|
};
|
|
927
|
-
var importExpressions = async (plan, rootDir,
|
|
933
|
+
var importExpressions = async (plan, rootDir, log2) => {
|
|
928
934
|
let idx = 0;
|
|
929
935
|
for (const step of plan.workflow.steps) {
|
|
930
936
|
const job = step;
|
|
@@ -942,7 +948,7 @@ var importExpressions = async (plan, rootDir, log) => {
|
|
|
942
948
|
rootDir,
|
|
943
949
|
filePath: expressionStr
|
|
944
950
|
},
|
|
945
|
-
|
|
951
|
+
log2
|
|
946
952
|
);
|
|
947
953
|
}
|
|
948
954
|
if (configurationStr && isPath(configurationStr)) {
|
|
@@ -952,7 +958,7 @@ var importExpressions = async (plan, rootDir, log) => {
|
|
|
952
958
|
rootDir,
|
|
953
959
|
filePath: configurationStr
|
|
954
960
|
},
|
|
955
|
-
|
|
961
|
+
log2
|
|
956
962
|
);
|
|
957
963
|
job.configuration = JSON.parse(configString);
|
|
958
964
|
}
|
|
@@ -963,7 +969,7 @@ var importExpressions = async (plan, rootDir, log) => {
|
|
|
963
969
|
rootDir,
|
|
964
970
|
filePath: stateStr
|
|
965
971
|
},
|
|
966
|
-
|
|
972
|
+
log2
|
|
967
973
|
);
|
|
968
974
|
job.state = JSON.parse(stateString);
|
|
969
975
|
}
|
|
@@ -979,7 +985,7 @@ var ensureAdaptors = (plan) => {
|
|
|
979
985
|
job.adaptors ??= [];
|
|
980
986
|
});
|
|
981
987
|
};
|
|
982
|
-
var loadXPlan = async (plan,
|
|
988
|
+
var loadXPlan = async (plan, options6, logger, defaultName = "") => {
|
|
983
989
|
if (!plan.options) {
|
|
984
990
|
plan.options = {};
|
|
985
991
|
}
|
|
@@ -987,22 +993,22 @@ var loadXPlan = async (plan, options, logger, defaultName = "") => {
|
|
|
987
993
|
plan.workflow.name = defaultName;
|
|
988
994
|
}
|
|
989
995
|
ensureAdaptors(plan);
|
|
990
|
-
if (
|
|
991
|
-
plan.workflow.globals =
|
|
992
|
-
await importGlobals(plan,
|
|
993
|
-
await importExpressions(plan,
|
|
994
|
-
if (
|
|
996
|
+
if (options6.globals)
|
|
997
|
+
plan.workflow.globals = options6.globals;
|
|
998
|
+
await importGlobals(plan, options6.baseDir, logger);
|
|
999
|
+
await importExpressions(plan, options6.baseDir, logger);
|
|
1000
|
+
if (options6.expandAdaptors) {
|
|
995
1001
|
expand_adaptors_default(plan);
|
|
996
1002
|
}
|
|
997
|
-
await map_adaptors_to_monorepo_default(
|
|
998
|
-
maybeAssign(
|
|
1003
|
+
await map_adaptors_to_monorepo_default(options6.monorepoPath, plan, logger);
|
|
1004
|
+
maybeAssign(options6, plan.options, ["timeout", "start"]);
|
|
999
1005
|
logger.info(`Loaded workflow ${plan.workflow.name ?? ""}`);
|
|
1000
1006
|
return plan;
|
|
1001
1007
|
};
|
|
1002
1008
|
|
|
1003
1009
|
// src/util/assert-path.ts
|
|
1004
|
-
var assert_path_default = (
|
|
1005
|
-
if (!
|
|
1010
|
+
var assert_path_default = (path15) => {
|
|
1011
|
+
if (!path15) {
|
|
1006
1012
|
console.error("ERROR: no path provided!");
|
|
1007
1013
|
console.error("\nUsage:");
|
|
1008
1014
|
console.error(" open path/to/job");
|
|
@@ -1038,20 +1044,20 @@ var fuzzy_match_step_default = (plan, stepPattern) => {
|
|
|
1038
1044
|
|
|
1039
1045
|
// src/util/validate-plan.ts
|
|
1040
1046
|
var assertWorkflowStructure = (plan, logger) => {
|
|
1041
|
-
const { workflow, options } = plan;
|
|
1042
|
-
if (!
|
|
1047
|
+
const { workflow: workflow2, options: options6 } = plan;
|
|
1048
|
+
if (!workflow2 || typeof workflow2 !== "object") {
|
|
1043
1049
|
throw new Error(`Missing or invalid "workflow" key in execution plan`);
|
|
1044
1050
|
}
|
|
1045
|
-
if (!Array.isArray(
|
|
1051
|
+
if (!Array.isArray(workflow2.steps)) {
|
|
1046
1052
|
throw new Error("The workflow.steps key must be an array");
|
|
1047
1053
|
}
|
|
1048
|
-
if (
|
|
1054
|
+
if (workflow2.steps.length === 0) {
|
|
1049
1055
|
logger.warn("The workflow.steps array is empty");
|
|
1050
1056
|
}
|
|
1051
|
-
|
|
1057
|
+
workflow2.steps.forEach((step, index) => {
|
|
1052
1058
|
assertStepStructure(step, index);
|
|
1053
1059
|
});
|
|
1054
|
-
assertOptionsStructure(
|
|
1060
|
+
assertOptionsStructure(options6, logger);
|
|
1055
1061
|
};
|
|
1056
1062
|
var assertStepStructure = (step, index) => {
|
|
1057
1063
|
const allowedKeys = [
|
|
@@ -1078,9 +1084,9 @@ var assertStepStructure = (step, index) => {
|
|
|
1078
1084
|
);
|
|
1079
1085
|
}
|
|
1080
1086
|
};
|
|
1081
|
-
var assertOptionsStructure = (
|
|
1087
|
+
var assertOptionsStructure = (options6 = {}, logger) => {
|
|
1082
1088
|
const allowedKeys = ["timeout", "stepTimeout", "start", "end", "sanitize"];
|
|
1083
|
-
for (const key in
|
|
1089
|
+
for (const key in options6) {
|
|
1084
1090
|
if (!allowedKeys.includes(key)) {
|
|
1085
1091
|
logger.warn(`Unrecognized option "${key}" in options object`);
|
|
1086
1092
|
}
|
|
@@ -1136,17 +1142,17 @@ var matchStep = (plan, stepPattern, stepName, logger) => {
|
|
|
1136
1142
|
}
|
|
1137
1143
|
return "";
|
|
1138
1144
|
};
|
|
1139
|
-
var executeHandler = async (
|
|
1145
|
+
var executeHandler = async (options6, logger) => {
|
|
1140
1146
|
const start = (/* @__PURE__ */ new Date()).getTime();
|
|
1141
|
-
assert_path_default(
|
|
1142
|
-
await validate_adaptors_default(
|
|
1143
|
-
let plan = await load_plan_default(
|
|
1147
|
+
assert_path_default(options6.path);
|
|
1148
|
+
await validate_adaptors_default(options6, logger);
|
|
1149
|
+
let plan = await load_plan_default(options6, logger);
|
|
1144
1150
|
validate_plan_default(plan, logger);
|
|
1145
|
-
if (
|
|
1146
|
-
await clearCache(plan,
|
|
1151
|
+
if (options6.cacheSteps) {
|
|
1152
|
+
await clearCache(plan, options6, logger);
|
|
1147
1153
|
}
|
|
1148
1154
|
const moduleResolutions = {};
|
|
1149
|
-
const { repoDir, monorepoPath, autoinstall } =
|
|
1155
|
+
const { repoDir, monorepoPath, autoinstall } = options6;
|
|
1150
1156
|
if (autoinstall) {
|
|
1151
1157
|
if (monorepoPath) {
|
|
1152
1158
|
logger.warn("Skipping auto-install as monorepo is being used");
|
|
@@ -1154,13 +1160,13 @@ var executeHandler = async (options, logger) => {
|
|
|
1154
1160
|
const autoInstallTargets = get_autoinstall_targets_default(plan);
|
|
1155
1161
|
if (autoInstallTargets.length) {
|
|
1156
1162
|
logger.info("Auto-installing language adaptors");
|
|
1157
|
-
|
|
1163
|
+
options6.adaptors = await install(
|
|
1158
1164
|
{ packages: autoInstallTargets, repoDir },
|
|
1159
1165
|
logger
|
|
1160
1166
|
);
|
|
1161
|
-
if (autoInstallTargets.length ===
|
|
1167
|
+
if (autoInstallTargets.length === options6.adaptors.length) {
|
|
1162
1168
|
for (let i = 0; i < autoInstallTargets.length; i++) {
|
|
1163
|
-
moduleResolutions[autoInstallTargets[i]] =
|
|
1169
|
+
moduleResolutions[autoInstallTargets[i]] = options6.adaptors[i];
|
|
1164
1170
|
}
|
|
1165
1171
|
}
|
|
1166
1172
|
}
|
|
@@ -1168,35 +1174,35 @@ var executeHandler = async (options, logger) => {
|
|
|
1168
1174
|
}
|
|
1169
1175
|
let customStart;
|
|
1170
1176
|
let customEnd;
|
|
1171
|
-
if (
|
|
1172
|
-
const step = matchStep(plan,
|
|
1177
|
+
if (options6.only) {
|
|
1178
|
+
const step = matchStep(plan, options6.only, "only", logger);
|
|
1173
1179
|
customStart = step;
|
|
1174
1180
|
customEnd = step;
|
|
1175
|
-
logger.always(`Only running workflow step "${
|
|
1181
|
+
logger.always(`Only running workflow step "${options6.start}"`);
|
|
1176
1182
|
} else {
|
|
1177
|
-
if (
|
|
1183
|
+
if (options6.start) {
|
|
1178
1184
|
customStart = matchStep(
|
|
1179
1185
|
plan,
|
|
1180
|
-
|
|
1186
|
+
options6.start ?? plan.options.start,
|
|
1181
1187
|
"start",
|
|
1182
1188
|
logger
|
|
1183
1189
|
);
|
|
1184
|
-
logger.info(`Starting workflow from step "${
|
|
1190
|
+
logger.info(`Starting workflow from step "${options6.start}"`);
|
|
1185
1191
|
}
|
|
1186
|
-
if (
|
|
1192
|
+
if (options6.end) {
|
|
1187
1193
|
customEnd = matchStep(
|
|
1188
1194
|
plan,
|
|
1189
|
-
|
|
1195
|
+
options6.end ?? plan.options.end,
|
|
1190
1196
|
"end",
|
|
1191
1197
|
logger
|
|
1192
1198
|
);
|
|
1193
|
-
logger.always(`Ending workflow at step "${
|
|
1199
|
+
logger.always(`Ending workflow at step "${options6.end}"`);
|
|
1194
1200
|
}
|
|
1195
1201
|
}
|
|
1196
|
-
const state = await load_state_default(plan,
|
|
1202
|
+
const state = await load_state_default(plan, options6, logger, customStart);
|
|
1197
1203
|
plan = override_plan_adaptors_default(plan, moduleResolutions);
|
|
1198
|
-
if (
|
|
1199
|
-
plan = await compile_default(plan,
|
|
1204
|
+
if (options6.compile) {
|
|
1205
|
+
plan = await compile_default(plan, options6, logger);
|
|
1200
1206
|
} else {
|
|
1201
1207
|
logger.info("Skipping compilation as noCompile is set");
|
|
1202
1208
|
}
|
|
@@ -1210,13 +1216,13 @@ var executeHandler = async (options, logger) => {
|
|
|
1210
1216
|
workflow: plan.workflow
|
|
1211
1217
|
};
|
|
1212
1218
|
try {
|
|
1213
|
-
const result = await execute_default(finalPlan, state,
|
|
1214
|
-
if (
|
|
1219
|
+
const result = await execute_default(finalPlan, state, options6, logger);
|
|
1220
|
+
if (options6.cacheSteps) {
|
|
1215
1221
|
logger.success(
|
|
1216
1222
|
"Cached output written to ./cli-cache (see info logs for details)"
|
|
1217
1223
|
);
|
|
1218
1224
|
}
|
|
1219
|
-
await serialize_output_default(
|
|
1225
|
+
await serialize_output_default(options6, result, logger);
|
|
1220
1226
|
const duration = printDuration((/* @__PURE__ */ new Date()).getTime() - start);
|
|
1221
1227
|
if (result?.errors) {
|
|
1222
1228
|
logger.warn(
|
|
@@ -1239,22 +1245,22 @@ var handler_default2 = executeHandler;
|
|
|
1239
1245
|
|
|
1240
1246
|
// src/compile/handler.ts
|
|
1241
1247
|
import { writeFile as writeFile3 } from "node:fs/promises";
|
|
1242
|
-
var compileHandler = async (
|
|
1243
|
-
assert_path_default(
|
|
1248
|
+
var compileHandler = async (options6, logger) => {
|
|
1249
|
+
assert_path_default(options6.path);
|
|
1244
1250
|
let result;
|
|
1245
|
-
if (
|
|
1246
|
-
const { code } = await compile_default(
|
|
1251
|
+
if (options6.expressionPath) {
|
|
1252
|
+
const { code } = await compile_default(options6.expressionPath, options6, logger);
|
|
1247
1253
|
result = code;
|
|
1248
1254
|
} else {
|
|
1249
|
-
const plan = await load_plan_default(
|
|
1250
|
-
const compiledPlan = await compile_default(plan,
|
|
1255
|
+
const plan = await load_plan_default(options6, logger);
|
|
1256
|
+
const compiledPlan = await compile_default(plan, options6, logger);
|
|
1251
1257
|
result = JSON.stringify(compiledPlan, null, 2);
|
|
1252
1258
|
}
|
|
1253
|
-
if (
|
|
1259
|
+
if (options6.outputStdout) {
|
|
1254
1260
|
logger.success("Result:\n\n" + result);
|
|
1255
1261
|
} else {
|
|
1256
|
-
await writeFile3(
|
|
1257
|
-
logger.success(`Compiled to ${
|
|
1262
|
+
await writeFile3(options6.outputPath, result);
|
|
1263
|
+
logger.success(`Compiled to ${options6.outputPath}`);
|
|
1258
1264
|
}
|
|
1259
1265
|
};
|
|
1260
1266
|
var handler_default3 = compileHandler;
|
|
@@ -1267,27 +1273,27 @@ import { readFile as readFile3, writeFile as writeFile4 } from "node:fs/promises
|
|
|
1267
1273
|
import path5 from "node:path";
|
|
1268
1274
|
import { request } from "undici";
|
|
1269
1275
|
var DEFAULT_PAGE_SIZE = 1e3;
|
|
1270
|
-
var request_default = async (method,
|
|
1271
|
-
const base =
|
|
1272
|
-
const url2 = path5.join(base, "/collections",
|
|
1276
|
+
var request_default = async (method, options6, logger) => {
|
|
1277
|
+
const base = options6.lightning || process.env.OPENFN_ENDPOINT || "https://app.openfn.org";
|
|
1278
|
+
const url2 = path5.join(base, "/collections", options6.collectionName);
|
|
1273
1279
|
logger.debug("Calling Collections server at ", url2);
|
|
1274
1280
|
const headers = {
|
|
1275
|
-
Authorization: `Bearer ${
|
|
1281
|
+
Authorization: `Bearer ${options6.token}`
|
|
1276
1282
|
};
|
|
1277
1283
|
const query = Object.assign(
|
|
1278
1284
|
{
|
|
1279
|
-
key:
|
|
1280
|
-
limit:
|
|
1285
|
+
key: options6.key,
|
|
1286
|
+
limit: options6.pageSize || DEFAULT_PAGE_SIZE
|
|
1281
1287
|
},
|
|
1282
|
-
|
|
1288
|
+
options6.query
|
|
1283
1289
|
);
|
|
1284
1290
|
const args = {
|
|
1285
1291
|
headers,
|
|
1286
1292
|
method,
|
|
1287
1293
|
query
|
|
1288
1294
|
};
|
|
1289
|
-
if (
|
|
1290
|
-
args.body = JSON.stringify(
|
|
1295
|
+
if (options6.data) {
|
|
1296
|
+
args.body = JSON.stringify(options6.data);
|
|
1291
1297
|
headers["content-type"] = "application/json";
|
|
1292
1298
|
}
|
|
1293
1299
|
let result = {};
|
|
@@ -1298,11 +1304,11 @@ var request_default = async (method, options, logger) => {
|
|
|
1298
1304
|
if (cursor) {
|
|
1299
1305
|
query.cursor = cursor;
|
|
1300
1306
|
}
|
|
1301
|
-
if (
|
|
1302
|
-
limit =
|
|
1307
|
+
if (options6.limit) {
|
|
1308
|
+
limit = options6.limit;
|
|
1303
1309
|
query.limit = Math.min(
|
|
1304
|
-
|
|
1305
|
-
|
|
1310
|
+
options6.pageSize || DEFAULT_PAGE_SIZE,
|
|
1311
|
+
options6.limit - count
|
|
1306
1312
|
);
|
|
1307
1313
|
}
|
|
1308
1314
|
try {
|
|
@@ -1396,7 +1402,7 @@ var ensureToken = (opts, logger) => {
|
|
|
1396
1402
|
}
|
|
1397
1403
|
}
|
|
1398
1404
|
};
|
|
1399
|
-
var buildQuery = (
|
|
1405
|
+
var buildQuery = (options6) => {
|
|
1400
1406
|
const map = {
|
|
1401
1407
|
createdBefore: "created_before",
|
|
1402
1408
|
createdAfter: "created_after",
|
|
@@ -1405,34 +1411,34 @@ var buildQuery = (options) => {
|
|
|
1405
1411
|
};
|
|
1406
1412
|
const query = {};
|
|
1407
1413
|
Object.keys(map).forEach((key) => {
|
|
1408
|
-
if (
|
|
1409
|
-
query[map[key]] =
|
|
1414
|
+
if (options6[key]) {
|
|
1415
|
+
query[map[key]] = options6[key];
|
|
1410
1416
|
}
|
|
1411
1417
|
});
|
|
1412
1418
|
return query;
|
|
1413
1419
|
};
|
|
1414
|
-
var get = async (
|
|
1415
|
-
ensureToken(
|
|
1416
|
-
const multiMode =
|
|
1420
|
+
var get = async (options6, logger) => {
|
|
1421
|
+
ensureToken(options6, logger);
|
|
1422
|
+
const multiMode = options6.key.includes("*");
|
|
1417
1423
|
if (multiMode) {
|
|
1418
1424
|
logger.info(
|
|
1419
|
-
`Fetching multiple items from collection "${
|
|
1425
|
+
`Fetching multiple items from collection "${options6.collectionName}" with pattern ${options6.key}`
|
|
1420
1426
|
);
|
|
1421
1427
|
} else {
|
|
1422
1428
|
logger.info(
|
|
1423
|
-
`Fetching "${
|
|
1429
|
+
`Fetching "${options6.key}" from collection "${options6.collectionName}"`
|
|
1424
1430
|
);
|
|
1425
1431
|
}
|
|
1426
1432
|
let result = await request_default(
|
|
1427
1433
|
"GET",
|
|
1428
1434
|
{
|
|
1429
|
-
lightning:
|
|
1430
|
-
token:
|
|
1431
|
-
pageSize:
|
|
1432
|
-
limit:
|
|
1433
|
-
key:
|
|
1434
|
-
collectionName:
|
|
1435
|
-
query: buildQuery(
|
|
1435
|
+
lightning: options6.lightning,
|
|
1436
|
+
token: options6.token,
|
|
1437
|
+
pageSize: options6.pageSize,
|
|
1438
|
+
limit: options6.limit,
|
|
1439
|
+
key: options6.key,
|
|
1440
|
+
collectionName: options6.collectionName,
|
|
1441
|
+
query: buildQuery(options6)
|
|
1436
1442
|
},
|
|
1437
1443
|
logger
|
|
1438
1444
|
);
|
|
@@ -1440,32 +1446,32 @@ var get = async (options, logger) => {
|
|
|
1440
1446
|
logger.success(`Fetched ${Object.keys(result).length} items!`);
|
|
1441
1447
|
} else {
|
|
1442
1448
|
result = Object.values(result)[0];
|
|
1443
|
-
logger.success(`Fetched ${
|
|
1449
|
+
logger.success(`Fetched ${options6.key}`);
|
|
1444
1450
|
}
|
|
1445
|
-
if (
|
|
1451
|
+
if (options6.outputPath) {
|
|
1446
1452
|
const content = JSON.stringify(
|
|
1447
1453
|
result,
|
|
1448
1454
|
null,
|
|
1449
|
-
|
|
1455
|
+
options6.pretty ? 2 : void 0
|
|
1450
1456
|
);
|
|
1451
|
-
await writeFile4(
|
|
1452
|
-
logger.always(`Wrote items to ${
|
|
1457
|
+
await writeFile4(options6.outputPath, content);
|
|
1458
|
+
logger.always(`Wrote items to ${options6.outputPath}`);
|
|
1453
1459
|
} else {
|
|
1454
1460
|
logger.print(result);
|
|
1455
1461
|
}
|
|
1456
1462
|
};
|
|
1457
|
-
var set = async (
|
|
1458
|
-
if (
|
|
1463
|
+
var set = async (options6, logger) => {
|
|
1464
|
+
if (options6.key && options6.items) {
|
|
1459
1465
|
throwAbortableError(
|
|
1460
1466
|
"ARGUMENT_ERROR: arguments for key and items were provided",
|
|
1461
1467
|
"If upserting multiple items with --items, do not pass a key"
|
|
1462
1468
|
);
|
|
1463
1469
|
}
|
|
1464
|
-
ensureToken(
|
|
1465
|
-
logger.info(`Upserting items to collection "${
|
|
1470
|
+
ensureToken(options6, logger);
|
|
1471
|
+
logger.info(`Upserting items to collection "${options6.collectionName}"`);
|
|
1466
1472
|
const items = [];
|
|
1467
|
-
if (
|
|
1468
|
-
const resolvedPath = path6.resolve(
|
|
1473
|
+
if (options6.items) {
|
|
1474
|
+
const resolvedPath = path6.resolve(options6.items);
|
|
1469
1475
|
logger.debug("Loading items from ", resolvedPath);
|
|
1470
1476
|
const data = await readFile3(resolvedPath, "utf8");
|
|
1471
1477
|
const obj = JSON.parse(data);
|
|
@@ -1473,43 +1479,43 @@ var set = async (options, logger) => {
|
|
|
1473
1479
|
items.push({ key, value: JSON.stringify(value) });
|
|
1474
1480
|
});
|
|
1475
1481
|
logger.info(`Upserting ${items.length} items`);
|
|
1476
|
-
} else if (
|
|
1477
|
-
const resolvedPath = path6.resolve(
|
|
1482
|
+
} else if (options6.key && options6.value) {
|
|
1483
|
+
const resolvedPath = path6.resolve(options6.value);
|
|
1478
1484
|
logger.debug("Loading value from ", resolvedPath);
|
|
1479
|
-
const data = await readFile3(path6.resolve(
|
|
1485
|
+
const data = await readFile3(path6.resolve(options6.value), "utf8");
|
|
1480
1486
|
const value = JSON.stringify(JSON.parse(data));
|
|
1481
|
-
items.push({ key:
|
|
1482
|
-
logger.info(`Upserting data to "${
|
|
1487
|
+
items.push({ key: options6.key, value });
|
|
1488
|
+
logger.info(`Upserting data to "${options6.key}"`);
|
|
1483
1489
|
} else {
|
|
1484
1490
|
throw new Error("INVALID_ARGUMENTS");
|
|
1485
1491
|
}
|
|
1486
1492
|
const result = await request_default(
|
|
1487
1493
|
"POST",
|
|
1488
1494
|
{
|
|
1489
|
-
lightning:
|
|
1490
|
-
token:
|
|
1491
|
-
key:
|
|
1492
|
-
collectionName:
|
|
1495
|
+
lightning: options6.lightning,
|
|
1496
|
+
token: options6.token,
|
|
1497
|
+
key: options6.key,
|
|
1498
|
+
collectionName: options6.collectionName,
|
|
1493
1499
|
data: { items }
|
|
1494
1500
|
},
|
|
1495
1501
|
logger
|
|
1496
1502
|
);
|
|
1497
1503
|
logger.success(`Upserted ${result.upserted} items!`);
|
|
1498
1504
|
};
|
|
1499
|
-
var remove = async (
|
|
1500
|
-
ensureToken(
|
|
1505
|
+
var remove = async (options6, logger) => {
|
|
1506
|
+
ensureToken(options6, logger);
|
|
1501
1507
|
logger.info(
|
|
1502
|
-
`Removing "${
|
|
1508
|
+
`Removing "${options6.key}" from collection "${options6.collectionName}"`
|
|
1503
1509
|
);
|
|
1504
|
-
if (
|
|
1510
|
+
if (options6.dryRun) {
|
|
1505
1511
|
logger.info("--dry-run passed: fetching affected items");
|
|
1506
1512
|
let result = await request_default(
|
|
1507
1513
|
"GET",
|
|
1508
1514
|
{
|
|
1509
|
-
lightning:
|
|
1510
|
-
token:
|
|
1511
|
-
key:
|
|
1512
|
-
collectionName:
|
|
1515
|
+
lightning: options6.lightning,
|
|
1516
|
+
token: options6.token,
|
|
1517
|
+
key: options6.key,
|
|
1518
|
+
collectionName: options6.collectionName
|
|
1513
1519
|
},
|
|
1514
1520
|
logger
|
|
1515
1521
|
);
|
|
@@ -1521,11 +1527,11 @@ var remove = async (options, logger) => {
|
|
|
1521
1527
|
let result = await request_default(
|
|
1522
1528
|
"DELETE",
|
|
1523
1529
|
{
|
|
1524
|
-
lightning:
|
|
1525
|
-
token:
|
|
1526
|
-
key:
|
|
1527
|
-
collectionName:
|
|
1528
|
-
query: buildQuery(
|
|
1530
|
+
lightning: options6.lightning,
|
|
1531
|
+
token: options6.token,
|
|
1532
|
+
key: options6.key,
|
|
1533
|
+
collectionName: options6.collectionName,
|
|
1534
|
+
query: buildQuery(options6)
|
|
1529
1535
|
},
|
|
1530
1536
|
logger
|
|
1531
1537
|
);
|
|
@@ -1539,9 +1545,9 @@ var handler_default4 = {
|
|
|
1539
1545
|
};
|
|
1540
1546
|
|
|
1541
1547
|
// src/test/handler.ts
|
|
1542
|
-
var testHandler = async (
|
|
1548
|
+
var testHandler = async (options6, logger) => {
|
|
1543
1549
|
logger.log("Running test workflow...");
|
|
1544
|
-
const opts = { ...
|
|
1550
|
+
const opts = { ...options6 };
|
|
1545
1551
|
opts.compile = true;
|
|
1546
1552
|
opts.adaptors = [];
|
|
1547
1553
|
const plan = {
|
|
@@ -1606,50 +1612,51 @@ import {
|
|
|
1606
1612
|
// src/deploy/beta.ts
|
|
1607
1613
|
import Project2 from "@openfn/project";
|
|
1608
1614
|
import { deployProject } from "@openfn/deploy";
|
|
1609
|
-
async function handler(
|
|
1615
|
+
async function handler(options6, logger) {
|
|
1610
1616
|
const { OPENFN_API_KEY } = process.env;
|
|
1611
|
-
const { endpoint } =
|
|
1612
|
-
const
|
|
1613
|
-
apiKey:
|
|
1617
|
+
const { endpoint: endpoint2 } = options6;
|
|
1618
|
+
const config2 = {
|
|
1619
|
+
apiKey: options6.apiKey
|
|
1614
1620
|
};
|
|
1615
|
-
if (!
|
|
1621
|
+
if (!options6.apiKey && OPENFN_API_KEY) {
|
|
1616
1622
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
1617
|
-
|
|
1623
|
+
config2.apiKey = OPENFN_API_KEY;
|
|
1618
1624
|
}
|
|
1619
|
-
const project = await Project2.from("fs", { root:
|
|
1625
|
+
const project = await Project2.from("fs", { root: options6.path || "." });
|
|
1626
|
+
console.log({ openfn: project.openfn });
|
|
1620
1627
|
const state = project.serialize("state", { format: "json" });
|
|
1621
1628
|
logger.debug("Converted local project to app state:");
|
|
1622
1629
|
logger.debug(JSON.stringify(state, null, 2));
|
|
1623
|
-
|
|
1630
|
+
config2.endpoint = endpoint2 || project.openfn?.endpoint;
|
|
1624
1631
|
logger.info("Sending project to app...");
|
|
1625
|
-
await deployProject(
|
|
1626
|
-
logger.success("Updated project at",
|
|
1632
|
+
await deployProject(config2, state);
|
|
1633
|
+
logger.success("Updated project at", config2.endpoint);
|
|
1627
1634
|
}
|
|
1628
1635
|
|
|
1629
1636
|
// src/deploy/handler.ts
|
|
1630
1637
|
var actualDeploy = deploy;
|
|
1631
|
-
async function deployHandler(
|
|
1632
|
-
if (
|
|
1633
|
-
return handler(
|
|
1638
|
+
async function deployHandler(options6, logger, deployFn = actualDeploy) {
|
|
1639
|
+
if (options6.beta) {
|
|
1640
|
+
return handler(options6, logger);
|
|
1634
1641
|
}
|
|
1635
1642
|
try {
|
|
1636
|
-
const
|
|
1637
|
-
logger.debug("Deploying with config", JSON.stringify(
|
|
1638
|
-
if (
|
|
1639
|
-
|
|
1643
|
+
const config2 = mergeOverrides(await getConfig(options6.configPath), options6);
|
|
1644
|
+
logger.debug("Deploying with config", JSON.stringify(config2, null, 2));
|
|
1645
|
+
if (options6.confirm === false) {
|
|
1646
|
+
config2.requireConfirmation = options6.confirm;
|
|
1640
1647
|
}
|
|
1641
1648
|
if (process.env["OPENFN_API_KEY"]) {
|
|
1642
1649
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
1643
|
-
|
|
1650
|
+
config2.apiKey = process.env["OPENFN_API_KEY"];
|
|
1644
1651
|
}
|
|
1645
1652
|
if (process.env["OPENFN_ENDPOINT"]) {
|
|
1646
1653
|
logger.info("Using OPENFN_ENDPOINT environment variable");
|
|
1647
|
-
|
|
1654
|
+
config2.endpoint = process.env["OPENFN_ENDPOINT"];
|
|
1648
1655
|
}
|
|
1649
|
-
logger.debug("Deploying with config",
|
|
1656
|
+
logger.debug("Deploying with config", config2);
|
|
1650
1657
|
logger.info(`Deploying`);
|
|
1651
|
-
validateConfig(
|
|
1652
|
-
const isOk = await deployFn(
|
|
1658
|
+
validateConfig(config2);
|
|
1659
|
+
const isOk = await deployFn(config2, logger);
|
|
1653
1660
|
process.exitCode = isOk ? 0 : 1;
|
|
1654
1661
|
return isOk;
|
|
1655
1662
|
} catch (error) {
|
|
@@ -1661,15 +1668,15 @@ async function deployHandler(options, logger, deployFn = actualDeploy) {
|
|
|
1661
1668
|
throw error;
|
|
1662
1669
|
}
|
|
1663
1670
|
}
|
|
1664
|
-
function mergeOverrides(
|
|
1671
|
+
function mergeOverrides(config2, options6) {
|
|
1665
1672
|
return {
|
|
1666
|
-
...
|
|
1667
|
-
apiKey: pickFirst(process.env["OPENFN_API_KEY"],
|
|
1668
|
-
endpoint: pickFirst(process.env["OPENFN_ENDPOINT"],
|
|
1669
|
-
statePath: pickFirst(
|
|
1670
|
-
specPath: pickFirst(
|
|
1671
|
-
configPath:
|
|
1672
|
-
requireConfirmation: pickFirst(
|
|
1673
|
+
...config2,
|
|
1674
|
+
apiKey: pickFirst(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
1675
|
+
endpoint: pickFirst(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
1676
|
+
statePath: pickFirst(options6.statePath, config2.statePath),
|
|
1677
|
+
specPath: pickFirst(options6.projectPath, config2.specPath),
|
|
1678
|
+
configPath: options6.configPath,
|
|
1679
|
+
requireConfirmation: pickFirst(options6.confirm, config2.requireConfirmation)
|
|
1673
1680
|
};
|
|
1674
1681
|
}
|
|
1675
1682
|
function pickFirst(...args) {
|
|
@@ -1688,20 +1695,20 @@ var RETRY_COUNT = 20;
|
|
|
1688
1695
|
var TIMEOUT_MS = 1e3 * 60;
|
|
1689
1696
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
1690
1697
|
var ensurePath = (filePath) => mkdirSync(path7.dirname(filePath), { recursive: true });
|
|
1691
|
-
var generatePlaceholder = (
|
|
1692
|
-
writeFileSync(
|
|
1698
|
+
var generatePlaceholder = (path15) => {
|
|
1699
|
+
writeFileSync(path15, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
1693
1700
|
};
|
|
1694
1701
|
var finish = (logger, resultPath) => {
|
|
1695
1702
|
logger.success("Done! Docs can be found at:\n");
|
|
1696
1703
|
logger.print(` ${path7.resolve(resultPath)}`);
|
|
1697
1704
|
};
|
|
1698
|
-
var generateDocs = async (specifier,
|
|
1705
|
+
var generateDocs = async (specifier, path15, docgen, logger) => {
|
|
1699
1706
|
const result = await docgen(specifier);
|
|
1700
|
-
await writeFile5(
|
|
1701
|
-
finish(logger,
|
|
1702
|
-
return
|
|
1707
|
+
await writeFile5(path15, JSON.stringify(result, null, 2));
|
|
1708
|
+
finish(logger, path15);
|
|
1709
|
+
return path15;
|
|
1703
1710
|
};
|
|
1704
|
-
var waitForDocs = async (docs,
|
|
1711
|
+
var waitForDocs = async (docs, path15, logger, retryDuration = RETRY_DURATION) => {
|
|
1705
1712
|
try {
|
|
1706
1713
|
if (docs.hasOwnProperty("loading")) {
|
|
1707
1714
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -1713,27 +1720,27 @@ var waitForDocs = async (docs, path16, logger, retryDuration = RETRY_DURATION) =
|
|
|
1713
1720
|
clearInterval(i);
|
|
1714
1721
|
reject(new Error("Timed out waiting for docs to load"));
|
|
1715
1722
|
}
|
|
1716
|
-
const updated = JSON.parse(readFileSync(
|
|
1723
|
+
const updated = JSON.parse(readFileSync(path15, "utf8"));
|
|
1717
1724
|
if (!updated.hasOwnProperty("loading")) {
|
|
1718
1725
|
logger.info("Docs found!");
|
|
1719
1726
|
clearInterval(i);
|
|
1720
|
-
resolve(
|
|
1727
|
+
resolve(path15);
|
|
1721
1728
|
}
|
|
1722
1729
|
count++;
|
|
1723
1730
|
}, retryDuration);
|
|
1724
1731
|
});
|
|
1725
1732
|
} else {
|
|
1726
|
-
logger.info(`Docs already written to cache at ${
|
|
1727
|
-
finish(logger,
|
|
1728
|
-
return
|
|
1733
|
+
logger.info(`Docs already written to cache at ${path15}`);
|
|
1734
|
+
finish(logger, path15);
|
|
1735
|
+
return path15;
|
|
1729
1736
|
}
|
|
1730
1737
|
} catch (e) {
|
|
1731
1738
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
1732
1739
|
throw e;
|
|
1733
1740
|
}
|
|
1734
1741
|
};
|
|
1735
|
-
var docgenHandler = (
|
|
1736
|
-
const { specifier, repoDir } =
|
|
1742
|
+
var docgenHandler = (options6, logger, docgen = actualDocGen, retryDuration = RETRY_DURATION) => {
|
|
1743
|
+
const { specifier, repoDir } = options6;
|
|
1737
1744
|
const { version } = getNameAndVersion4(specifier);
|
|
1738
1745
|
if (!version) {
|
|
1739
1746
|
logger.error("Error: No version number detected");
|
|
@@ -1742,28 +1749,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
|
|
|
1742
1749
|
process.exit(9);
|
|
1743
1750
|
}
|
|
1744
1751
|
logger.success(`Generating docs for ${specifier}`);
|
|
1745
|
-
const
|
|
1746
|
-
ensurePath(
|
|
1752
|
+
const path15 = `${repoDir}/docs/${specifier}.json`;
|
|
1753
|
+
ensurePath(path15);
|
|
1747
1754
|
const handleError2 = () => {
|
|
1748
1755
|
logger.info("Removing placeholder");
|
|
1749
|
-
rmSync(
|
|
1756
|
+
rmSync(path15);
|
|
1750
1757
|
};
|
|
1751
1758
|
try {
|
|
1752
|
-
const existing = readFileSync(
|
|
1759
|
+
const existing = readFileSync(path15, "utf8");
|
|
1753
1760
|
const json = JSON.parse(existing);
|
|
1754
1761
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
1755
1762
|
logger.info(`Expired placeholder found. Removing.`);
|
|
1756
|
-
rmSync(
|
|
1763
|
+
rmSync(path15);
|
|
1757
1764
|
throw new Error("TIMEOUT");
|
|
1758
1765
|
}
|
|
1759
|
-
return waitForDocs(json,
|
|
1766
|
+
return waitForDocs(json, path15, logger, retryDuration);
|
|
1760
1767
|
} catch (e) {
|
|
1761
1768
|
if (e.message !== "TIMEOUT") {
|
|
1762
|
-
logger.info(`Docs JSON not found at ${
|
|
1769
|
+
logger.info(`Docs JSON not found at ${path15}`);
|
|
1763
1770
|
}
|
|
1764
1771
|
logger.debug("Generating placeholder");
|
|
1765
|
-
generatePlaceholder(
|
|
1766
|
-
return generateDocs(specifier,
|
|
1772
|
+
generatePlaceholder(path15);
|
|
1773
|
+
return generateDocs(specifier, path15, docgen, logger).catch((e2) => {
|
|
1767
1774
|
logger.error("Error generating documentation");
|
|
1768
1775
|
logger.error(e2);
|
|
1769
1776
|
handleError2();
|
|
@@ -1802,8 +1809,8 @@ ${data.functions.map(
|
|
|
1802
1809
|
(fn) => ` ${c.yellow(fn.name)} (${fn.parameters.map((p) => p.name).join(", ")})`
|
|
1803
1810
|
).sort().join("\n")}
|
|
1804
1811
|
`;
|
|
1805
|
-
var docsHandler = async (
|
|
1806
|
-
const { adaptor, operation, repoDir } =
|
|
1812
|
+
var docsHandler = async (options6, logger) => {
|
|
1813
|
+
const { adaptor, operation, repoDir } = options6;
|
|
1807
1814
|
const adaptors = expand_adaptors_default([adaptor]);
|
|
1808
1815
|
const [adaptorName] = adaptors;
|
|
1809
1816
|
let { name, version } = getNameAndVersion5(adaptorName);
|
|
@@ -1814,7 +1821,7 @@ var docsHandler = async (options, logger) => {
|
|
|
1814
1821
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
1815
1822
|
}
|
|
1816
1823
|
logger.info("Generating/loading documentation...");
|
|
1817
|
-
const
|
|
1824
|
+
const path15 = await handler_default7(
|
|
1818
1825
|
{
|
|
1819
1826
|
specifier: `${name}@${version}`,
|
|
1820
1827
|
repoDir
|
|
@@ -1823,8 +1830,8 @@ var docsHandler = async (options, logger) => {
|
|
|
1823
1830
|
createNullLogger()
|
|
1824
1831
|
);
|
|
1825
1832
|
let didError = false;
|
|
1826
|
-
if (
|
|
1827
|
-
const source = await readFile4(
|
|
1833
|
+
if (path15) {
|
|
1834
|
+
const source = await readFile4(path15, "utf8");
|
|
1828
1835
|
const data = JSON.parse(source);
|
|
1829
1836
|
let desc;
|
|
1830
1837
|
if (operation) {
|
|
@@ -1894,8 +1901,8 @@ var sortKeys = (obj) => {
|
|
|
1894
1901
|
});
|
|
1895
1902
|
return newObj;
|
|
1896
1903
|
};
|
|
1897
|
-
var generateKey = (
|
|
1898
|
-
const sorted = sortKeys(
|
|
1904
|
+
var generateKey = (config2, adaptor) => {
|
|
1905
|
+
const sorted = sortKeys(config2);
|
|
1899
1906
|
const key = `${JSON.stringify(sorted)}-${adaptor}`;
|
|
1900
1907
|
return createHash("sha256").update(key).digest("hex");
|
|
1901
1908
|
};
|
|
@@ -2008,8 +2015,8 @@ var getAdaptorPath = async (adaptor, logger, repoDir) => {
|
|
|
2008
2015
|
return adaptorPath;
|
|
2009
2016
|
};
|
|
2010
2017
|
var shouldAutoinstall = (adaptor) => adaptor?.length > 0 && !adaptor.startsWith("/") && !adaptor.includes("=");
|
|
2011
|
-
var metadataHandler = async (
|
|
2012
|
-
const { repoDir, adaptors, keepUnsupported } =
|
|
2018
|
+
var metadataHandler = async (options6, logger) => {
|
|
2019
|
+
const { repoDir, adaptors, keepUnsupported } = options6;
|
|
2013
2020
|
let adaptor = adaptors[0];
|
|
2014
2021
|
if (await isAdaptorUnsupported(adaptor, repoDir)) {
|
|
2015
2022
|
logger.info(
|
|
@@ -2018,11 +2025,11 @@ var metadataHandler = async (options, logger) => {
|
|
|
2018
2025
|
logger.error("No metadata helper found");
|
|
2019
2026
|
process.exit(1);
|
|
2020
2027
|
}
|
|
2021
|
-
const state = await load_state_default({},
|
|
2028
|
+
const state = await load_state_default({}, options6, logger);
|
|
2022
2029
|
logger.success(`Generating metadata`);
|
|
2023
2030
|
logger.info("config:", state);
|
|
2024
|
-
const
|
|
2025
|
-
if (!
|
|
2031
|
+
const config2 = state.configuration;
|
|
2032
|
+
if (!config2 || Object.keys(config2).length === 0) {
|
|
2026
2033
|
logger.error("ERROR: Invalid configuration passed");
|
|
2027
2034
|
process.exit(1);
|
|
2028
2035
|
}
|
|
@@ -2030,8 +2037,8 @@ var metadataHandler = async (options, logger) => {
|
|
|
2030
2037
|
logger.success("Done!");
|
|
2031
2038
|
logger.print(getCachePath2(repoDir, id));
|
|
2032
2039
|
};
|
|
2033
|
-
const id = generateKey(
|
|
2034
|
-
if (!
|
|
2040
|
+
const id = generateKey(config2, adaptor);
|
|
2041
|
+
if (!options6.force) {
|
|
2035
2042
|
logger.debug("config hash: ", id);
|
|
2036
2043
|
const cached = await get2(repoDir, id);
|
|
2037
2044
|
if (cached) {
|
|
@@ -2049,14 +2056,14 @@ var metadataHandler = async (options, logger) => {
|
|
|
2049
2056
|
wasAutoInstalled = true;
|
|
2050
2057
|
adaptor = autoinstallResult[0];
|
|
2051
2058
|
}
|
|
2052
|
-
const adaptorPath = await getAdaptorPath(adaptor, logger,
|
|
2059
|
+
const adaptorPath = await getAdaptorPath(adaptor, logger, options6.repoDir);
|
|
2053
2060
|
if (!adaptorPath) {
|
|
2054
2061
|
throw new Error(`Could not resolve adaptor path for ${adaptor}`);
|
|
2055
2062
|
}
|
|
2056
2063
|
const mod = await import(adaptorPath);
|
|
2057
2064
|
if (mod.metadata && typeof mod.metadata === "function") {
|
|
2058
2065
|
logger.info("Metadata function found. Generating metadata...");
|
|
2059
|
-
const result = await mod.metadata(
|
|
2066
|
+
const result = await mod.metadata(config2);
|
|
2060
2067
|
decorateMetadata(result);
|
|
2061
2068
|
await set2(
|
|
2062
2069
|
repoDir,
|
|
@@ -2091,7 +2098,7 @@ var metadataHandler = async (options, logger) => {
|
|
|
2091
2098
|
var handler_default9 = metadataHandler;
|
|
2092
2099
|
|
|
2093
2100
|
// src/pull/handler.ts
|
|
2094
|
-
import
|
|
2101
|
+
import path12 from "path";
|
|
2095
2102
|
import fs5 from "node:fs/promises";
|
|
2096
2103
|
import {
|
|
2097
2104
|
getConfig as getConfig2,
|
|
@@ -2101,106 +2108,566 @@ import {
|
|
|
2101
2108
|
syncRemoteSpec
|
|
2102
2109
|
} from "@openfn/deploy";
|
|
2103
2110
|
|
|
2104
|
-
// src/
|
|
2105
|
-
import
|
|
2106
|
-
import path9 from "path";
|
|
2107
|
-
import fs4 from "node:fs/promises";
|
|
2108
|
-
import { getProject } from "@openfn/deploy";
|
|
2111
|
+
// src/projects/fetch.ts
|
|
2112
|
+
import path10 from "node:path";
|
|
2109
2113
|
import Project3, { Workspace } from "@openfn/project";
|
|
2110
|
-
|
|
2111
|
-
|
|
2114
|
+
|
|
2115
|
+
// src/util/command-builders.ts
|
|
2116
|
+
import c2 from "chalk";
|
|
2117
|
+
var expandYargs = (y) => {
|
|
2118
|
+
if (typeof y === "function") {
|
|
2119
|
+
return y();
|
|
2120
|
+
}
|
|
2121
|
+
return y;
|
|
2122
|
+
};
|
|
2123
|
+
function build(opts, yargs) {
|
|
2124
|
+
return opts.reduce(
|
|
2125
|
+
(_y, o) => yargs.option(o.name, expandYargs(o.yargs)),
|
|
2126
|
+
yargs
|
|
2127
|
+
);
|
|
2128
|
+
}
|
|
2129
|
+
var ensure = (command6, opts) => (yargs) => {
|
|
2130
|
+
yargs.command = command6;
|
|
2131
|
+
opts.filter((opt) => opt.ensure).forEach((opt) => {
|
|
2132
|
+
try {
|
|
2133
|
+
opt.ensure(yargs);
|
|
2134
|
+
} catch (e) {
|
|
2135
|
+
console.error(
|
|
2136
|
+
c2.red(`
|
|
2137
|
+
Error parsing command arguments: ${command6}.${opt.name}
|
|
2138
|
+
`)
|
|
2139
|
+
);
|
|
2140
|
+
console.error(c2.red("Aborting"));
|
|
2141
|
+
console.error();
|
|
2142
|
+
process.exit(9);
|
|
2143
|
+
}
|
|
2144
|
+
});
|
|
2145
|
+
};
|
|
2146
|
+
var override = (command6, yargs) => {
|
|
2147
|
+
return {
|
|
2148
|
+
...command6,
|
|
2149
|
+
yargs: {
|
|
2150
|
+
...command6.yargs || {},
|
|
2151
|
+
...yargs
|
|
2152
|
+
}
|
|
2153
|
+
};
|
|
2154
|
+
};
|
|
2155
|
+
|
|
2156
|
+
// src/options.ts
|
|
2157
|
+
import nodePath from "node:path";
|
|
2158
|
+
|
|
2159
|
+
// src/util/ensure-log-opts.ts
|
|
2160
|
+
var defaultLoggerOptions = {
|
|
2161
|
+
default: "default",
|
|
2162
|
+
// TODO fix to lower case
|
|
2163
|
+
job: "debug"
|
|
2164
|
+
};
|
|
2165
|
+
var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
|
|
2166
|
+
var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
|
|
2167
|
+
var componentShorthands = {
|
|
2168
|
+
cmp: "compiler",
|
|
2169
|
+
rt: "runtime",
|
|
2170
|
+
"r/t": "runtime"
|
|
2171
|
+
};
|
|
2172
|
+
var ensureLogOpts = (opts) => {
|
|
2173
|
+
const components = {};
|
|
2174
|
+
const outgoingOpts = opts;
|
|
2175
|
+
if (!opts.log && /^(version|test)$/.test(opts.command)) {
|
|
2176
|
+
outgoingOpts.log = { default: "info" };
|
|
2177
|
+
return outgoingOpts;
|
|
2178
|
+
}
|
|
2179
|
+
if (opts.log) {
|
|
2180
|
+
const parts = opts.log.split(",");
|
|
2181
|
+
parts.forEach((l) => {
|
|
2182
|
+
let component = "";
|
|
2183
|
+
let level = "";
|
|
2184
|
+
if (l.match(/=/)) {
|
|
2185
|
+
const parts2 = l.split("=");
|
|
2186
|
+
component = parts2[0].toLowerCase();
|
|
2187
|
+
if (componentShorthands[component]) {
|
|
2188
|
+
component = componentShorthands[component];
|
|
2189
|
+
}
|
|
2190
|
+
level = parts2[1].toLowerCase();
|
|
2191
|
+
} else {
|
|
2192
|
+
component = "default";
|
|
2193
|
+
level = l.toLowerCase();
|
|
2194
|
+
if (level === "none" && !parts.find((p) => p.startsWith("job"))) {
|
|
2195
|
+
components["job"] = "none";
|
|
2196
|
+
}
|
|
2197
|
+
}
|
|
2198
|
+
if (!/^(cli|runtime|compiler|job|default)$/i.test(component)) {
|
|
2199
|
+
throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
|
|
2200
|
+
}
|
|
2201
|
+
level = level.toLowerCase();
|
|
2202
|
+
if (!isValidLogLevel(level)) {
|
|
2203
|
+
throw new Error(ERROR_MESSAGE_LOG_LEVEL);
|
|
2204
|
+
}
|
|
2205
|
+
components[component] = level;
|
|
2206
|
+
});
|
|
2207
|
+
}
|
|
2208
|
+
outgoingOpts.log = {
|
|
2209
|
+
...defaultLoggerOptions,
|
|
2210
|
+
...components
|
|
2211
|
+
};
|
|
2212
|
+
return outgoingOpts;
|
|
2213
|
+
};
|
|
2214
|
+
var ensure_log_opts_default = ensureLogOpts;
|
|
2215
|
+
|
|
2216
|
+
// src/util/get-cli-option-object.ts
|
|
2217
|
+
function getCLIOptionObject(arg) {
|
|
2218
|
+
if (isObject(arg)) {
|
|
2219
|
+
return arg;
|
|
2220
|
+
} else if (typeof arg === "string") {
|
|
2221
|
+
try {
|
|
2222
|
+
const p = JSON.parse(arg);
|
|
2223
|
+
if (isObject(p))
|
|
2224
|
+
return p;
|
|
2225
|
+
} catch (e) {
|
|
2226
|
+
}
|
|
2227
|
+
return Object.fromEntries(
|
|
2228
|
+
arg.split(",").map((pair) => {
|
|
2229
|
+
const [k, v] = pair.split("=");
|
|
2230
|
+
return [k.trim(), v.trim()];
|
|
2231
|
+
})
|
|
2232
|
+
);
|
|
2233
|
+
}
|
|
2234
|
+
}
|
|
2235
|
+
function isObject(arg) {
|
|
2236
|
+
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
2237
|
+
}
|
|
2238
|
+
|
|
2239
|
+
// src/options.ts
|
|
2240
|
+
var setDefaultValue = (opts, key, value) => {
|
|
2241
|
+
const v = opts[key];
|
|
2242
|
+
if (isNaN(v) && !v) {
|
|
2243
|
+
opts[key] = value;
|
|
2244
|
+
}
|
|
2245
|
+
};
|
|
2246
|
+
var apikey = {
|
|
2247
|
+
name: "apikey",
|
|
2248
|
+
yargs: {
|
|
2249
|
+
alias: ["key", "pat", "token"],
|
|
2250
|
+
description: "[beta only] API Key, Personal Access Token (Pat), or other access token"
|
|
2251
|
+
}
|
|
2252
|
+
};
|
|
2253
|
+
var configPath = {
|
|
2254
|
+
name: "config",
|
|
2255
|
+
yargs: {
|
|
2256
|
+
alias: ["c", "config-path"],
|
|
2257
|
+
description: "The location of your config file",
|
|
2258
|
+
default: "./.config.json"
|
|
2259
|
+
}
|
|
2260
|
+
};
|
|
2261
|
+
var endpoint = {
|
|
2262
|
+
name: "endpoint",
|
|
2263
|
+
yargs: {
|
|
2264
|
+
alias: ["lightning"],
|
|
2265
|
+
description: "[beta only] URL to Lightning endpoint"
|
|
2266
|
+
}
|
|
2267
|
+
};
|
|
2268
|
+
var env = {
|
|
2269
|
+
name: "env",
|
|
2270
|
+
yargs: {
|
|
2271
|
+
description: "[beta only] Environment name (eg staging, prod, branch)"
|
|
2272
|
+
}
|
|
2273
|
+
};
|
|
2274
|
+
var force = {
|
|
2275
|
+
name: "force",
|
|
2276
|
+
yargs: {
|
|
2277
|
+
alias: ["f"],
|
|
2278
|
+
boolean: true,
|
|
2279
|
+
description: "Force metadata to be regenerated",
|
|
2280
|
+
default: false
|
|
2281
|
+
}
|
|
2282
|
+
};
|
|
2283
|
+
var getBaseDir = (opts) => {
|
|
2284
|
+
const basePath = opts.path ?? ".";
|
|
2285
|
+
if (/\.(jso?n?|ya?ml)$/.test(basePath)) {
|
|
2286
|
+
return nodePath.dirname(basePath);
|
|
2287
|
+
}
|
|
2288
|
+
return basePath;
|
|
2289
|
+
};
|
|
2290
|
+
var projectId = {
|
|
2291
|
+
name: "project-id",
|
|
2292
|
+
yargs: {
|
|
2293
|
+
description: "The id or UUID of an openfn project",
|
|
2294
|
+
string: true
|
|
2295
|
+
},
|
|
2296
|
+
ensure: (opts) => {
|
|
2297
|
+
return opts.projectName;
|
|
2298
|
+
}
|
|
2299
|
+
};
|
|
2300
|
+
var log = {
|
|
2301
|
+
name: "log",
|
|
2302
|
+
yargs: {
|
|
2303
|
+
alias: ["l"],
|
|
2304
|
+
description: "Set the log level",
|
|
2305
|
+
string: true
|
|
2306
|
+
},
|
|
2307
|
+
ensure: (opts) => {
|
|
2308
|
+
ensure_log_opts_default(opts);
|
|
2309
|
+
}
|
|
2310
|
+
};
|
|
2311
|
+
var logJson = {
|
|
2312
|
+
name: "log-json",
|
|
2313
|
+
yargs: {
|
|
2314
|
+
description: "Output all logs as JSON objects",
|
|
2315
|
+
boolean: true
|
|
2316
|
+
}
|
|
2317
|
+
};
|
|
2318
|
+
var outputPath = {
|
|
2319
|
+
name: "output-path",
|
|
2320
|
+
yargs: {
|
|
2321
|
+
alias: "o",
|
|
2322
|
+
description: "Path to the output file"
|
|
2323
|
+
},
|
|
2324
|
+
ensure: (opts) => {
|
|
2325
|
+
if (/^(compile|apollo)$/.test(opts.command)) {
|
|
2326
|
+
if (opts.outputPath) {
|
|
2327
|
+
delete opts.outputStdout;
|
|
2328
|
+
}
|
|
2329
|
+
} else {
|
|
2330
|
+
if (opts.outputStdout) {
|
|
2331
|
+
delete opts.outputPath;
|
|
2332
|
+
} else {
|
|
2333
|
+
const base = getBaseDir(opts);
|
|
2334
|
+
setDefaultValue(opts, "outputPath", nodePath.join(base, "output.json"));
|
|
2335
|
+
}
|
|
2336
|
+
}
|
|
2337
|
+
delete opts.o;
|
|
2338
|
+
}
|
|
2339
|
+
};
|
|
2340
|
+
var snapshots = {
|
|
2341
|
+
name: "snapshots",
|
|
2342
|
+
yargs: {
|
|
2343
|
+
description: "List of snapshot ids to pull",
|
|
2344
|
+
array: true
|
|
2345
|
+
}
|
|
2346
|
+
};
|
|
2347
|
+
var statePath = {
|
|
2348
|
+
name: "state-path",
|
|
2349
|
+
yargs: {
|
|
2350
|
+
alias: ["s"],
|
|
2351
|
+
description: "Path to the state file"
|
|
2352
|
+
},
|
|
2353
|
+
ensure: (opts) => {
|
|
2354
|
+
delete opts.s;
|
|
2355
|
+
}
|
|
2356
|
+
};
|
|
2357
|
+
var timeout = {
|
|
2358
|
+
name: "timeout",
|
|
2359
|
+
yargs: {
|
|
2360
|
+
alias: ["t"],
|
|
2361
|
+
number: true,
|
|
2362
|
+
description: "Set the timeout duration (ms). Defaults to 5 minutes.",
|
|
2363
|
+
default: 5 * 60 * 1e3
|
|
2364
|
+
}
|
|
2365
|
+
};
|
|
2366
|
+
var workflow = {
|
|
2367
|
+
name: "workflow",
|
|
2368
|
+
yargs: {
|
|
2369
|
+
string: true,
|
|
2370
|
+
description: "Name of the workflow to execute"
|
|
2371
|
+
}
|
|
2372
|
+
};
|
|
2373
|
+
var removeUnmapped = {
|
|
2374
|
+
name: "remove-unmapped",
|
|
2375
|
+
yargs: {
|
|
2376
|
+
boolean: true,
|
|
2377
|
+
description: "Removes all workflows that didn't get mapped from the final project after merge"
|
|
2378
|
+
}
|
|
2379
|
+
};
|
|
2380
|
+
var workflowMappings = {
|
|
2381
|
+
name: "workflow-mappings",
|
|
2382
|
+
yargs: {
|
|
2383
|
+
type: "string",
|
|
2384
|
+
coerce: getCLIOptionObject,
|
|
2385
|
+
description: "A manual object mapping of which workflows in source and target should be matched for a merge."
|
|
2386
|
+
}
|
|
2387
|
+
};
|
|
2388
|
+
var workspace = {
|
|
2389
|
+
name: "workspace",
|
|
2390
|
+
yargs: {
|
|
2391
|
+
alias: ["w"],
|
|
2392
|
+
description: "Path to the project workspace (ie, path to openfn.yaml)"
|
|
2393
|
+
},
|
|
2394
|
+
ensure: (opts) => {
|
|
2395
|
+
const ws = opts.workspace ?? process.env.OPENFN_WORKSPACE;
|
|
2396
|
+
if (!ws) {
|
|
2397
|
+
opts.workspace = process.cwd();
|
|
2398
|
+
} else {
|
|
2399
|
+
opts.workspace = nodePath.resolve(ws);
|
|
2400
|
+
}
|
|
2401
|
+
}
|
|
2402
|
+
};
|
|
2403
|
+
|
|
2404
|
+
// src/projects/util.ts
|
|
2405
|
+
import path9 from "node:path";
|
|
2406
|
+
import { mkdir as mkdir4, writeFile as writeFile7 } from "node:fs/promises";
|
|
2407
|
+
|
|
2408
|
+
// src/errors.ts
|
|
2409
|
+
var CLIError = class extends Error {
|
|
2410
|
+
constructor(message) {
|
|
2411
|
+
super(message);
|
|
2412
|
+
}
|
|
2413
|
+
};
|
|
2414
|
+
|
|
2415
|
+
// src/projects/util.ts
|
|
2416
|
+
var loadAppAuthConfig = (options6, logger) => {
|
|
2112
2417
|
const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
|
|
2113
|
-
const
|
|
2114
|
-
apiKey:
|
|
2115
|
-
endpoint:
|
|
2418
|
+
const config2 = {
|
|
2419
|
+
apiKey: options6.apiKey,
|
|
2420
|
+
endpoint: options6.endpoint
|
|
2116
2421
|
};
|
|
2117
|
-
if (!
|
|
2422
|
+
if (!options6.apiKey && OPENFN_API_KEY) {
|
|
2118
2423
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
2119
|
-
|
|
2424
|
+
config2.apiKey = OPENFN_API_KEY;
|
|
2120
2425
|
}
|
|
2121
|
-
if (!
|
|
2426
|
+
if (!options6.endpoint && OPENFN_ENDPOINT) {
|
|
2122
2427
|
logger.info("Using OPENFN_ENDPOINT environment variable");
|
|
2123
|
-
|
|
2428
|
+
config2.endpoint = OPENFN_ENDPOINT;
|
|
2429
|
+
}
|
|
2430
|
+
return config2;
|
|
2431
|
+
};
|
|
2432
|
+
var ensureExt = (filePath, ext) => {
|
|
2433
|
+
if (!filePath.endsWith(ext)) {
|
|
2434
|
+
return `${filePath}.${ext}`;
|
|
2435
|
+
}
|
|
2436
|
+
return filePath;
|
|
2437
|
+
};
|
|
2438
|
+
var serialize = async (project, outputPath2, formatOverride, dryRun = false) => {
|
|
2439
|
+
const root = path9.dirname(outputPath2);
|
|
2440
|
+
await mkdir4(root, { recursive: true });
|
|
2441
|
+
const format = formatOverride ?? project.config?.formats.project;
|
|
2442
|
+
const output = project?.serialize("project", { format });
|
|
2443
|
+
const maybeWriteFile = (filePath, output2) => {
|
|
2444
|
+
if (!dryRun) {
|
|
2445
|
+
return writeFile7(filePath, output2);
|
|
2446
|
+
}
|
|
2447
|
+
};
|
|
2448
|
+
let finalPath;
|
|
2449
|
+
if (format === "yaml") {
|
|
2450
|
+
finalPath = ensureExt(outputPath2, "yaml");
|
|
2451
|
+
await maybeWriteFile(finalPath, output);
|
|
2452
|
+
} else {
|
|
2453
|
+
finalPath = ensureExt(outputPath2, "json");
|
|
2454
|
+
await maybeWriteFile(finalPath, JSON.stringify(output, null, 2));
|
|
2124
2455
|
}
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
const
|
|
2129
|
-
|
|
2456
|
+
return finalPath;
|
|
2457
|
+
};
|
|
2458
|
+
var getLightningUrl = (config2, path15 = "", snapshots2) => {
|
|
2459
|
+
const params = new URLSearchParams();
|
|
2460
|
+
snapshots2?.forEach((snapshot) => params.append("snapshots[]", snapshot));
|
|
2461
|
+
return new URL(
|
|
2462
|
+
`/api/provision/${path15}?${params.toString()}`,
|
|
2463
|
+
config2.endpoint
|
|
2464
|
+
);
|
|
2465
|
+
};
|
|
2466
|
+
async function getProject(logger, config2, projectId2, snapshots2) {
|
|
2467
|
+
const url2 = getLightningUrl(config2, projectId2, snapshots2);
|
|
2468
|
+
logger.info(`Checking ${url2} for existing project`);
|
|
2469
|
+
try {
|
|
2470
|
+
const response = await fetch(url2, {
|
|
2471
|
+
headers: {
|
|
2472
|
+
Authorization: `Bearer ${config2.apiKey}`,
|
|
2473
|
+
Accept: "application/json"
|
|
2474
|
+
}
|
|
2475
|
+
});
|
|
2476
|
+
if (!response.ok) {
|
|
2477
|
+
if (response.status === 401 || response.status === 403) {
|
|
2478
|
+
throw new CLIError(
|
|
2479
|
+
`Failed to authorize request with endpoint ${config2.endpoint}, got ${response.status} ${response.statusText}`
|
|
2480
|
+
);
|
|
2481
|
+
}
|
|
2482
|
+
if (response.status === 404) {
|
|
2483
|
+
throw new CLIError(`Project not found: ${projectId2}`);
|
|
2484
|
+
}
|
|
2485
|
+
throw new CLIError(
|
|
2486
|
+
`Failed to fetch project ${projectId2}: ${response.statusText}`
|
|
2487
|
+
);
|
|
2488
|
+
}
|
|
2489
|
+
logger.info("Project found");
|
|
2490
|
+
return response.json();
|
|
2491
|
+
} catch (error) {
|
|
2492
|
+
handleCommonErrors(config2, error);
|
|
2493
|
+
throw error;
|
|
2494
|
+
}
|
|
2495
|
+
}
|
|
2496
|
+
function handleCommonErrors(config2, error) {
|
|
2497
|
+
if (error.cause?.code === "ECONNREFUSED") {
|
|
2498
|
+
throw new DeployError2(
|
|
2499
|
+
`Failed to connect to endpoint ${config2.endpoint}, got ECONNREFUSED.`
|
|
2500
|
+
);
|
|
2501
|
+
}
|
|
2502
|
+
}
|
|
2503
|
+
var DeployError2 = class extends Error {
|
|
2504
|
+
constructor(message) {
|
|
2505
|
+
super(message);
|
|
2506
|
+
}
|
|
2507
|
+
};
|
|
2508
|
+
|
|
2509
|
+
// src/projects/fetch.ts
|
|
2510
|
+
var options = [
|
|
2511
|
+
apikey,
|
|
2512
|
+
configPath,
|
|
2513
|
+
endpoint,
|
|
2514
|
+
env,
|
|
2515
|
+
log,
|
|
2516
|
+
override(outputPath, {
|
|
2517
|
+
description: "Path to output the fetched project to"
|
|
2518
|
+
}),
|
|
2519
|
+
logJson,
|
|
2520
|
+
workspace,
|
|
2521
|
+
snapshots,
|
|
2522
|
+
statePath,
|
|
2523
|
+
override(force, {
|
|
2524
|
+
description: "Overwrite local file contents with the fetched contents"
|
|
2525
|
+
})
|
|
2526
|
+
];
|
|
2527
|
+
var command = {
|
|
2528
|
+
command: "fetch [projectId]",
|
|
2529
|
+
describe: `Fetch a project's state and spec from a Lightning Instance to the local state file without expanding to the filesystem.`,
|
|
2530
|
+
builder: (yargs) => build(options, yargs).positional("projectId", {
|
|
2531
|
+
describe: "The id of the project that should be fetched, should be a UUID",
|
|
2532
|
+
demandOption: true
|
|
2533
|
+
}).example(
|
|
2534
|
+
"fetch 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2535
|
+
"Fetch an updated copy of a the above spec and state from a Lightning Instance"
|
|
2536
|
+
),
|
|
2537
|
+
handler: ensure("project-fetch", options)
|
|
2538
|
+
};
|
|
2539
|
+
var handler2 = async (options6, logger) => {
|
|
2540
|
+
const workspacePath = path10.resolve(options6.workspace ?? process.cwd());
|
|
2541
|
+
const workspace2 = new Workspace(workspacePath);
|
|
2542
|
+
const { projectId: projectId2, outputPath: outputPath2 } = options6;
|
|
2543
|
+
const config2 = loadAppAuthConfig(options6, logger);
|
|
2544
|
+
const { data } = await getProject(logger, config2, projectId2);
|
|
2130
2545
|
const project = await Project3.from(
|
|
2131
2546
|
"state",
|
|
2132
2547
|
data,
|
|
2133
2548
|
{
|
|
2134
|
-
endpoint:
|
|
2135
|
-
env:
|
|
2136
|
-
fetched_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
2549
|
+
endpoint: config2.endpoint,
|
|
2550
|
+
env: options6.env || "project"
|
|
2137
2551
|
},
|
|
2138
|
-
|
|
2552
|
+
workspace2.getConfig()
|
|
2139
2553
|
);
|
|
2554
|
+
const outputRoot = path10.resolve(outputPath2 || workspacePath);
|
|
2140
2555
|
const projectFileName = project.getIdentifier();
|
|
2141
|
-
|
|
2142
|
-
|
|
2143
|
-
|
|
2144
|
-
|
|
2145
|
-
|
|
2556
|
+
const projectsDir = project.config.dirs.projects ?? ".projects";
|
|
2557
|
+
const finalOutputPath = outputPath2 ?? `${outputRoot}/${projectsDir}/${projectFileName}`;
|
|
2558
|
+
let format = void 0;
|
|
2559
|
+
if (outputPath2) {
|
|
2560
|
+
const ext = path10.extname(outputPath2).substring(1);
|
|
2561
|
+
if (ext.length) {
|
|
2562
|
+
format = ext;
|
|
2563
|
+
}
|
|
2564
|
+
}
|
|
2565
|
+
const finalOutput = await serialize(
|
|
2566
|
+
project,
|
|
2567
|
+
finalOutputPath,
|
|
2568
|
+
format,
|
|
2569
|
+
true
|
|
2570
|
+
// dry run - this won't trigger an actual write!
|
|
2146
2571
|
);
|
|
2147
|
-
|
|
2148
|
-
|
|
2149
|
-
|
|
2150
|
-
|
|
2151
|
-
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
|
|
2572
|
+
let current = null;
|
|
2573
|
+
try {
|
|
2574
|
+
current = await Project3.from("path", finalOutput);
|
|
2575
|
+
} catch (e) {
|
|
2576
|
+
}
|
|
2577
|
+
const hasAnyHistory = project.workflows.find(
|
|
2578
|
+
(w) => w.workflow.history?.length
|
|
2579
|
+
);
|
|
2580
|
+
const skipVersionCheck = options6.force || // The user forced the checkout
|
|
2581
|
+
!current || // there is no project on disk
|
|
2582
|
+
!hasAnyHistory;
|
|
2583
|
+
if (!skipVersionCheck && !project.canMergeInto(current)) {
|
|
2584
|
+
throw new Error("Error! An incompatible project exists at this location");
|
|
2156
2585
|
}
|
|
2157
|
-
await
|
|
2158
|
-
|
|
2159
|
-
|
|
2160
|
-
|
|
2586
|
+
await serialize(project, finalOutputPath, format);
|
|
2587
|
+
logger.success(`Fetched project file to ${finalOutput}`);
|
|
2588
|
+
return project;
|
|
2589
|
+
};
|
|
2590
|
+
|
|
2591
|
+
// src/projects/checkout.ts
|
|
2592
|
+
import Project4, { Workspace as Workspace2 } from "@openfn/project";
|
|
2593
|
+
import path11 from "path";
|
|
2594
|
+
import fs4 from "fs";
|
|
2595
|
+
import { rimraf } from "rimraf";
|
|
2596
|
+
var options2 = [projectId, workspace, log];
|
|
2597
|
+
var command2 = {
|
|
2598
|
+
command: "checkout <project-id>",
|
|
2599
|
+
describe: "Switch to a different OpenFn project in the same workspace",
|
|
2600
|
+
handler: ensure("project-checkout", options2),
|
|
2601
|
+
builder: (yargs) => build(options2, yargs)
|
|
2602
|
+
};
|
|
2603
|
+
var handler3 = async (options6, logger) => {
|
|
2604
|
+
const projectId2 = options6.projectId;
|
|
2605
|
+
const workspacePath = options6.workspace ?? process.cwd();
|
|
2606
|
+
const workspace2 = new Workspace2(workspacePath, logger);
|
|
2607
|
+
const { project: _, ...config2 } = workspace2.getConfig();
|
|
2608
|
+
let switchProject;
|
|
2609
|
+
if (/\.(yaml|json)$/.test(projectId2)) {
|
|
2610
|
+
const filePath = projectId2.startsWith("/") ? projectId2 : path11.join(workspacePath, projectId2);
|
|
2611
|
+
logger.debug("Loading project from path ", filePath);
|
|
2612
|
+
switchProject = await Project4.from("path", filePath, config2);
|
|
2161
2613
|
} else {
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
);
|
|
2614
|
+
switchProject = workspace2.get(projectId2);
|
|
2615
|
+
}
|
|
2616
|
+
if (!switchProject) {
|
|
2617
|
+
throw new Error(`Project with id ${projectId2} not found in the workspace`);
|
|
2166
2618
|
}
|
|
2167
|
-
|
|
2168
|
-
const files =
|
|
2619
|
+
await rimraf(path11.join(workspacePath, config2.workflowRoot ?? "workflows"));
|
|
2620
|
+
const files = switchProject.serialize("fs");
|
|
2169
2621
|
for (const f in files) {
|
|
2170
2622
|
if (files[f]) {
|
|
2171
|
-
|
|
2623
|
+
fs4.mkdirSync(path11.join(workspacePath, path11.dirname(f)), {
|
|
2172
2624
|
recursive: true
|
|
2173
2625
|
});
|
|
2174
|
-
|
|
2626
|
+
fs4.writeFileSync(path11.join(workspacePath, f), files[f]);
|
|
2175
2627
|
} else {
|
|
2176
|
-
|
|
2628
|
+
logger.warn("WARNING! No content for file", f);
|
|
2177
2629
|
}
|
|
2178
2630
|
}
|
|
2179
|
-
logger.success(`Expanded project to ${
|
|
2631
|
+
logger.success(`Expanded project to ${workspacePath}`);
|
|
2632
|
+
};
|
|
2633
|
+
|
|
2634
|
+
// src/projects/pull.ts
|
|
2635
|
+
async function handler4(options6, logger) {
|
|
2636
|
+
const project = await handler2(options6, logger);
|
|
2637
|
+
logger.success(`Downloaded latest project version`);
|
|
2638
|
+
await handler3(
|
|
2639
|
+
{
|
|
2640
|
+
...options6,
|
|
2641
|
+
projectId: project.id
|
|
2642
|
+
},
|
|
2643
|
+
logger
|
|
2644
|
+
);
|
|
2645
|
+
logger.success(`Checked out project locally`);
|
|
2180
2646
|
}
|
|
2647
|
+
var pull_default = handler4;
|
|
2181
2648
|
|
|
2182
2649
|
// src/pull/handler.ts
|
|
2183
|
-
async function pullHandler(
|
|
2184
|
-
if (
|
|
2185
|
-
return
|
|
2650
|
+
async function pullHandler(options6, logger) {
|
|
2651
|
+
if (options6.beta) {
|
|
2652
|
+
return pull_default(options6, logger);
|
|
2186
2653
|
}
|
|
2187
2654
|
try {
|
|
2188
|
-
const
|
|
2655
|
+
const config2 = mergeOverrides2(await getConfig2(options6.configPath), options6);
|
|
2189
2656
|
if (process.env["OPENFN_API_KEY"]) {
|
|
2190
2657
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
2191
|
-
|
|
2658
|
+
config2.apiKey = process.env["OPENFN_API_KEY"];
|
|
2192
2659
|
}
|
|
2193
2660
|
if (process.env["OPENFN_ENDPOINT"]) {
|
|
2194
2661
|
logger.info("Using OPENFN_ENDPOINT environment variable");
|
|
2195
|
-
|
|
2662
|
+
config2.endpoint = process.env["OPENFN_ENDPOINT"];
|
|
2196
2663
|
}
|
|
2197
2664
|
logger.always(
|
|
2198
2665
|
"Downloading existing project state (as JSON) from the server."
|
|
2199
2666
|
);
|
|
2200
2667
|
const { data: project } = await getProject2(
|
|
2201
|
-
|
|
2202
|
-
|
|
2203
|
-
|
|
2668
|
+
config2,
|
|
2669
|
+
options6.projectId,
|
|
2670
|
+
options6.snapshots
|
|
2204
2671
|
);
|
|
2205
2672
|
if (!project) {
|
|
2206
2673
|
logger.error("ERROR: Project not found.");
|
|
@@ -2213,18 +2680,18 @@ async function pullHandler(options, logger) {
|
|
|
2213
2680
|
const state = getStateFromProjectPayload(project);
|
|
2214
2681
|
logger.always("Downloading the project spec (as YAML) from the server.");
|
|
2215
2682
|
const queryParams = new URLSearchParams();
|
|
2216
|
-
queryParams.append("id",
|
|
2217
|
-
|
|
2683
|
+
queryParams.append("id", options6.projectId);
|
|
2684
|
+
options6.snapshots?.forEach(
|
|
2218
2685
|
(snapshot) => queryParams.append("snapshots[]", snapshot)
|
|
2219
2686
|
);
|
|
2220
2687
|
const url2 = new URL(
|
|
2221
2688
|
`api/provision/yaml?${queryParams.toString()}`,
|
|
2222
|
-
|
|
2689
|
+
config2.endpoint
|
|
2223
2690
|
);
|
|
2224
2691
|
logger.debug("Fetching project spec from", url2);
|
|
2225
2692
|
const res = await fetch(url2, {
|
|
2226
2693
|
headers: {
|
|
2227
|
-
Authorization: `Bearer ${
|
|
2694
|
+
Authorization: `Bearer ${config2.apiKey}`,
|
|
2228
2695
|
Accept: "application/json"
|
|
2229
2696
|
}
|
|
2230
2697
|
});
|
|
@@ -2236,16 +2703,16 @@ async function pullHandler(options, logger) {
|
|
|
2236
2703
|
process.exitCode = 1;
|
|
2237
2704
|
process.exit(1);
|
|
2238
2705
|
}
|
|
2239
|
-
const resolvedPath =
|
|
2706
|
+
const resolvedPath = path12.resolve(config2.specPath);
|
|
2240
2707
|
logger.debug("reading spec from", resolvedPath);
|
|
2241
2708
|
const updatedSpec = await syncRemoteSpec(
|
|
2242
2709
|
await res.text(),
|
|
2243
2710
|
state,
|
|
2244
|
-
|
|
2711
|
+
config2,
|
|
2245
2712
|
logger
|
|
2246
2713
|
);
|
|
2247
2714
|
await fs5.writeFile(
|
|
2248
|
-
|
|
2715
|
+
path12.resolve(config2.statePath),
|
|
2249
2716
|
JSON.stringify(state, null, 2)
|
|
2250
2717
|
);
|
|
2251
2718
|
await fs5.writeFile(resolvedPath, updatedSpec);
|
|
@@ -2263,13 +2730,13 @@ async function pullHandler(options, logger) {
|
|
|
2263
2730
|
throw error;
|
|
2264
2731
|
}
|
|
2265
2732
|
}
|
|
2266
|
-
function mergeOverrides2(
|
|
2733
|
+
function mergeOverrides2(config2, options6) {
|
|
2267
2734
|
return {
|
|
2268
|
-
...
|
|
2269
|
-
apiKey: pickFirst2(process.env["OPENFN_API_KEY"],
|
|
2270
|
-
endpoint: pickFirst2(process.env["OPENFN_ENDPOINT"],
|
|
2271
|
-
configPath:
|
|
2272
|
-
requireConfirmation: pickFirst2(
|
|
2735
|
+
...config2,
|
|
2736
|
+
apiKey: pickFirst2(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
2737
|
+
endpoint: pickFirst2(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
2738
|
+
configPath: options6.configPath,
|
|
2739
|
+
requireConfirmation: pickFirst2(options6.confirm, config2.requireConfirmation)
|
|
2273
2740
|
};
|
|
2274
2741
|
}
|
|
2275
2742
|
function pickFirst2(...args) {
|
|
@@ -2278,18 +2745,36 @@ function pickFirst2(...args) {
|
|
|
2278
2745
|
var handler_default10 = pullHandler;
|
|
2279
2746
|
|
|
2280
2747
|
// src/projects/handler.ts
|
|
2281
|
-
|
|
2282
|
-
|
|
2283
|
-
|
|
2284
|
-
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
|
|
2288
|
-
|
|
2748
|
+
var handler_exports = {};
|
|
2749
|
+
__export(handler_exports, {
|
|
2750
|
+
checkout: () => handler3,
|
|
2751
|
+
fetch: () => handler2,
|
|
2752
|
+
list: () => handler5,
|
|
2753
|
+
merge: () => handler7,
|
|
2754
|
+
version: () => handler6
|
|
2755
|
+
});
|
|
2756
|
+
|
|
2757
|
+
// src/projects/list.ts
|
|
2758
|
+
import { Workspace as Workspace3 } from "@openfn/project";
|
|
2759
|
+
var options3 = [log, workspace];
|
|
2760
|
+
var command3 = {
|
|
2761
|
+
command: "list [project-path]",
|
|
2762
|
+
describe: "List all the openfn projects available in the current directory",
|
|
2763
|
+
aliases: ["project", "$0"],
|
|
2764
|
+
handler: ensure("project-list", options3),
|
|
2765
|
+
builder: (yargs) => build(options3, yargs)
|
|
2766
|
+
};
|
|
2767
|
+
var handler5 = async (options6, logger) => {
|
|
2768
|
+
logger.info("Searching for projects in workspace at:");
|
|
2769
|
+
logger.info(" ", options6.workspace);
|
|
2770
|
+
logger.break();
|
|
2771
|
+
const workspace2 = new Workspace3(options6.workspace);
|
|
2772
|
+
if (!workspace2.valid) {
|
|
2773
|
+
throw new Error("No OpenFn projects found");
|
|
2289
2774
|
}
|
|
2290
|
-
logger.
|
|
2775
|
+
logger.always(`Available openfn projects
|
|
2291
2776
|
|
|
2292
|
-
${
|
|
2777
|
+
${workspace2.list().map((p) => describeProject(p, p.id === workspace2.activeProjectId)).join("\n\n")}
|
|
2293
2778
|
`);
|
|
2294
2779
|
};
|
|
2295
2780
|
function describeProject(project, active = false) {
|
|
@@ -2299,27 +2784,31 @@ function describeProject(project, active = false) {
|
|
|
2299
2784
|
workflows:
|
|
2300
2785
|
${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
2301
2786
|
}
|
|
2302
|
-
var handler_default11 = projectsHandler;
|
|
2303
2787
|
|
|
2304
|
-
// src/version
|
|
2305
|
-
import { Workspace as
|
|
2306
|
-
|
|
2307
|
-
var
|
|
2308
|
-
|
|
2309
|
-
|
|
2310
|
-
|
|
2788
|
+
// src/projects/version.ts
|
|
2789
|
+
import { Workspace as Workspace4 } from "@openfn/project";
|
|
2790
|
+
var options4 = [workflow, workspace, workflowMappings];
|
|
2791
|
+
var command4 = {
|
|
2792
|
+
command: "version [workflow]",
|
|
2793
|
+
describe: "Returns the version hash of a given workflow in a workspace",
|
|
2794
|
+
handler: ensure("project-version", options4),
|
|
2795
|
+
builder: (yargs) => build(options4, yargs)
|
|
2796
|
+
};
|
|
2797
|
+
var handler6 = async (options6, logger) => {
|
|
2798
|
+
const workspace2 = new Workspace4(options6.workspace);
|
|
2799
|
+
if (!workspace2.valid) {
|
|
2311
2800
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2312
2801
|
return;
|
|
2313
2802
|
}
|
|
2314
2803
|
const output = /* @__PURE__ */ new Map();
|
|
2315
|
-
const activeProject =
|
|
2316
|
-
if (
|
|
2317
|
-
const
|
|
2318
|
-
if (!
|
|
2319
|
-
logger.error(`No workflow found with id ${
|
|
2804
|
+
const activeProject = workspace2.getActiveProject();
|
|
2805
|
+
if (options6.workflow) {
|
|
2806
|
+
const workflow2 = activeProject?.getWorkflow(options6.workflow);
|
|
2807
|
+
if (!workflow2) {
|
|
2808
|
+
logger.error(`No workflow found with id ${options6.workflow}`);
|
|
2320
2809
|
return;
|
|
2321
2810
|
}
|
|
2322
|
-
output.set(
|
|
2811
|
+
output.set(workflow2.name || workflow2.id, workflow2.getVersionHash());
|
|
2323
2812
|
} else {
|
|
2324
2813
|
for (const wf of activeProject?.workflows || []) {
|
|
2325
2814
|
output.set(wf.name || wf.id, wf.getVersionHash());
|
|
@@ -2330,7 +2819,7 @@ var workflowVersionHandler = async (options, logger) => {
|
|
|
2330
2819
|
return;
|
|
2331
2820
|
}
|
|
2332
2821
|
let final;
|
|
2333
|
-
if (
|
|
2822
|
+
if (options6.json) {
|
|
2334
2823
|
final = JSON.stringify(Object.fromEntries(output), void 0, 2);
|
|
2335
2824
|
} else {
|
|
2336
2825
|
final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
|
|
@@ -2339,68 +2828,56 @@ var workflowVersionHandler = async (options, logger) => {
|
|
|
2339
2828
|
|
|
2340
2829
|
${final}`);
|
|
2341
2830
|
};
|
|
2342
|
-
var handler_default12 = workflowVersionHandler;
|
|
2343
|
-
|
|
2344
|
-
// src/checkout/handler.ts
|
|
2345
|
-
import Project5, { Workspace as Workspace4 } from "@openfn/project";
|
|
2346
|
-
import path13 from "path";
|
|
2347
|
-
import fs6 from "fs";
|
|
2348
|
-
import { rimraf as rimraf2 } from "rimraf";
|
|
2349
|
-
var checkoutHandler = async (options, logger) => {
|
|
2350
|
-
const commandPath = path13.resolve(options.projectPath ?? ".");
|
|
2351
|
-
const workspace = new Workspace4(commandPath);
|
|
2352
|
-
if (!workspace.valid) {
|
|
2353
|
-
logger.error("Command was run in an invalid openfn workspace");
|
|
2354
|
-
return;
|
|
2355
|
-
}
|
|
2356
|
-
const { project: _, ...config } = workspace.getConfig();
|
|
2357
|
-
let switchProject;
|
|
2358
|
-
if (/\.(yaml|json)$/.test(options.projectId)) {
|
|
2359
|
-
const filePath = options.projectId.startsWith("/") ? options.projectId : path13.join(commandPath, options.projectId);
|
|
2360
|
-
logger.debug("Loading project from path ", filePath);
|
|
2361
|
-
switchProject = await Project5.from("path", filePath, config);
|
|
2362
|
-
} else {
|
|
2363
|
-
switchProject = workspace.get(options.projectId);
|
|
2364
|
-
}
|
|
2365
|
-
if (!switchProject) {
|
|
2366
|
-
throw new Error(
|
|
2367
|
-
`Project with id ${options.projectId} not found in the workspace`
|
|
2368
|
-
);
|
|
2369
|
-
}
|
|
2370
|
-
await rimraf2(path13.join(commandPath, config.workflowRoot ?? "workflows"));
|
|
2371
|
-
const files = switchProject.serialize("fs");
|
|
2372
|
-
for (const f in files) {
|
|
2373
|
-
if (files[f]) {
|
|
2374
|
-
fs6.mkdirSync(path13.join(commandPath, path13.dirname(f)), {
|
|
2375
|
-
recursive: true
|
|
2376
|
-
});
|
|
2377
|
-
fs6.writeFileSync(path13.join(commandPath, f), files[f]);
|
|
2378
|
-
} else {
|
|
2379
|
-
logger.warn("WARNING! No content for file", f);
|
|
2380
|
-
}
|
|
2381
|
-
}
|
|
2382
|
-
logger.success(`Expanded project to ${commandPath}`);
|
|
2383
|
-
};
|
|
2384
|
-
var handler_default13 = checkoutHandler;
|
|
2385
2831
|
|
|
2386
|
-
// src/merge
|
|
2832
|
+
// src/projects/merge.ts
|
|
2387
2833
|
import Project6, { Workspace as Workspace5 } from "@openfn/project";
|
|
2388
|
-
import
|
|
2389
|
-
import
|
|
2390
|
-
var
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2834
|
+
import path13 from "node:path";
|
|
2835
|
+
import fs6 from "node:fs/promises";
|
|
2836
|
+
var options5 = [
|
|
2837
|
+
projectId,
|
|
2838
|
+
removeUnmapped,
|
|
2839
|
+
workflowMappings,
|
|
2840
|
+
log,
|
|
2841
|
+
workspace,
|
|
2842
|
+
// custom output because we don't want defaults or anything
|
|
2843
|
+
{
|
|
2844
|
+
name: "output-path",
|
|
2845
|
+
yargs: {
|
|
2846
|
+
alias: "o",
|
|
2847
|
+
description: "Optionally write the merged project file to a custom location"
|
|
2848
|
+
}
|
|
2849
|
+
},
|
|
2850
|
+
{
|
|
2851
|
+
name: "base",
|
|
2852
|
+
yargs: {
|
|
2853
|
+
alias: "target",
|
|
2854
|
+
description: "Path to the base (target) state file to merge into (ie, what main should be)"
|
|
2855
|
+
}
|
|
2856
|
+
},
|
|
2857
|
+
override(force, {
|
|
2858
|
+
description: "Force a merge even when workflows are incompatible"
|
|
2859
|
+
})
|
|
2860
|
+
];
|
|
2861
|
+
var command5 = {
|
|
2862
|
+
command: "merge <project-id>",
|
|
2863
|
+
describe: "Merges the specified project into the currently checked out project",
|
|
2864
|
+
handler: ensure("project-merge", options5),
|
|
2865
|
+
builder: (yargs) => build(options5, yargs)
|
|
2866
|
+
};
|
|
2867
|
+
var handler7 = async (options6, logger) => {
|
|
2868
|
+
const commandPath = options6.workspace;
|
|
2869
|
+
const workspace2 = new Workspace5(commandPath);
|
|
2870
|
+
if (!workspace2.valid) {
|
|
2394
2871
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2395
2872
|
return;
|
|
2396
2873
|
}
|
|
2397
2874
|
let targetProject;
|
|
2398
|
-
if (
|
|
2399
|
-
const basePath =
|
|
2875
|
+
if (options6.base) {
|
|
2876
|
+
const basePath = path13.resolve(options6.base);
|
|
2400
2877
|
logger.debug("Loading target project from path", basePath);
|
|
2401
2878
|
targetProject = await Project6.from("path", basePath);
|
|
2402
2879
|
} else {
|
|
2403
|
-
targetProject =
|
|
2880
|
+
targetProject = workspace2.getActiveProject();
|
|
2404
2881
|
if (!targetProject) {
|
|
2405
2882
|
logger.error(`No project currently checked out`);
|
|
2406
2883
|
return;
|
|
@@ -2408,16 +2885,16 @@ var mergeHandler = async (options, logger) => {
|
|
|
2408
2885
|
logger.debug(`Loading target project from workspace (${targetProject.id})`);
|
|
2409
2886
|
}
|
|
2410
2887
|
let sourceProject;
|
|
2411
|
-
if (/\.(yaml|json)$/.test(
|
|
2412
|
-
const filePath =
|
|
2888
|
+
if (/\.(yaml|json)$/.test(options6.projectId)) {
|
|
2889
|
+
const filePath = path13.join(commandPath, options6.projectId);
|
|
2413
2890
|
logger.debug("Loading source project from path ", filePath);
|
|
2414
2891
|
sourceProject = await Project6.from("path", filePath);
|
|
2415
2892
|
} else {
|
|
2416
|
-
logger.debug(`Loading source project from workspace ${
|
|
2417
|
-
sourceProject =
|
|
2893
|
+
logger.debug(`Loading source project from workspace ${options6.projectId}`);
|
|
2894
|
+
sourceProject = workspace2.get(options6.projectId);
|
|
2418
2895
|
}
|
|
2419
2896
|
if (!sourceProject) {
|
|
2420
|
-
logger.error(`Project "${
|
|
2897
|
+
logger.error(`Project "${options6.projectId}" not found in the workspace`);
|
|
2421
2898
|
return;
|
|
2422
2899
|
}
|
|
2423
2900
|
if (targetProject.id === sourceProject.id) {
|
|
@@ -2428,20 +2905,20 @@ var mergeHandler = async (options, logger) => {
|
|
|
2428
2905
|
logger.error("The checked out project has no id");
|
|
2429
2906
|
return;
|
|
2430
2907
|
}
|
|
2431
|
-
const finalPath =
|
|
2908
|
+
const finalPath = options6.outputPath ?? workspace2.getProjectPath(targetProject.id);
|
|
2432
2909
|
if (!finalPath) {
|
|
2433
2910
|
logger.error("Path to checked out project not found.");
|
|
2434
2911
|
return;
|
|
2435
2912
|
}
|
|
2436
2913
|
const final = Project6.merge(sourceProject, targetProject, {
|
|
2437
|
-
removeUnmapped:
|
|
2438
|
-
workflowMappings:
|
|
2439
|
-
force:
|
|
2914
|
+
removeUnmapped: options6.removeUnmapped,
|
|
2915
|
+
workflowMappings: options6.workflowMappings,
|
|
2916
|
+
force: options6.force
|
|
2440
2917
|
});
|
|
2441
|
-
let outputFormat =
|
|
2442
|
-
if (
|
|
2918
|
+
let outputFormat = workspace2.config.formats.project;
|
|
2919
|
+
if (options6.outputPath?.endsWith(".json")) {
|
|
2443
2920
|
outputFormat = "json";
|
|
2444
|
-
} else if (
|
|
2921
|
+
} else if (options6.outputPath?.endsWith(".yaml")) {
|
|
2445
2922
|
outputFormat = "yaml";
|
|
2446
2923
|
}
|
|
2447
2924
|
let finalState = final.serialize("state", {
|
|
@@ -2450,27 +2927,26 @@ var mergeHandler = async (options, logger) => {
|
|
|
2450
2927
|
if (outputFormat === "json") {
|
|
2451
2928
|
finalState = JSON.stringify(finalState, null, 2);
|
|
2452
2929
|
}
|
|
2453
|
-
await
|
|
2930
|
+
await fs6.writeFile(finalPath, finalState);
|
|
2454
2931
|
logger.info(`Updated statefile at `, finalPath);
|
|
2455
2932
|
logger.info("Checking out merged project to filesystem");
|
|
2456
|
-
await
|
|
2933
|
+
await handler3(
|
|
2457
2934
|
{
|
|
2458
|
-
command: "checkout",
|
|
2459
|
-
|
|
2460
|
-
projectId:
|
|
2461
|
-
log:
|
|
2935
|
+
command: "project-checkout",
|
|
2936
|
+
workspace: commandPath,
|
|
2937
|
+
projectId: options6.outputPath ? finalPath : final.id,
|
|
2938
|
+
log: options6.log
|
|
2462
2939
|
},
|
|
2463
2940
|
logger
|
|
2464
2941
|
);
|
|
2465
2942
|
logger.success(
|
|
2466
|
-
`Project ${sourceProject.id} has been merged into Project ${targetProject.id}
|
|
2943
|
+
`Project ${sourceProject.id} has been merged into Project ${targetProject.id}`
|
|
2467
2944
|
);
|
|
2468
2945
|
};
|
|
2469
|
-
var handler_default14 = mergeHandler;
|
|
2470
2946
|
|
|
2471
2947
|
// src/util/print-versions.ts
|
|
2472
2948
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
2473
|
-
import
|
|
2949
|
+
import path14 from "node:path";
|
|
2474
2950
|
import url from "node:url";
|
|
2475
2951
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
2476
2952
|
import { mainSymbols } from "figures";
|
|
@@ -2482,15 +2958,15 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
2482
2958
|
var loadVersionFromPath = (adaptorPath) => {
|
|
2483
2959
|
try {
|
|
2484
2960
|
const pkg = JSON.parse(
|
|
2485
|
-
readFileSync2(
|
|
2961
|
+
readFileSync2(path14.resolve(adaptorPath, "package.json"), "utf8")
|
|
2486
2962
|
);
|
|
2487
2963
|
return pkg.version;
|
|
2488
2964
|
} catch (e) {
|
|
2489
2965
|
return "unknown";
|
|
2490
2966
|
}
|
|
2491
2967
|
};
|
|
2492
|
-
var printVersions = async (logger,
|
|
2493
|
-
const { adaptors, logJson } =
|
|
2968
|
+
var printVersions = async (logger, options6 = {}, includeComponents = false) => {
|
|
2969
|
+
const { adaptors, logJson: logJson2 } = options6;
|
|
2494
2970
|
let longestAdaptorName = "";
|
|
2495
2971
|
const adaptorList = [];
|
|
2496
2972
|
adaptors?.forEach((adaptor) => {
|
|
@@ -2500,7 +2976,7 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
|
|
|
2500
2976
|
const [namePart, pathPart] = adaptor.split("=");
|
|
2501
2977
|
adaptorVersion = loadVersionFromPath(pathPart);
|
|
2502
2978
|
adaptorName = getNameAndVersion7(namePart).name;
|
|
2503
|
-
} else if (
|
|
2979
|
+
} else if (options6.monorepoPath) {
|
|
2504
2980
|
adaptorName = getNameAndVersion7(adaptor).name;
|
|
2505
2981
|
adaptorVersion = "monorepo";
|
|
2506
2982
|
} else {
|
|
@@ -2517,13 +2993,13 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
|
|
|
2517
2993
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
2518
2994
|
);
|
|
2519
2995
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
2520
|
-
const dirname3 =
|
|
2996
|
+
const dirname3 = path14.dirname(url.fileURLToPath(import.meta.url));
|
|
2521
2997
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
2522
2998
|
const { version, dependencies } = pkg;
|
|
2523
2999
|
const compilerVersion = dependencies["@openfn/compiler"];
|
|
2524
3000
|
const runtimeVersion = dependencies["@openfn/runtime"];
|
|
2525
3001
|
let output;
|
|
2526
|
-
if (
|
|
3002
|
+
if (logJson2) {
|
|
2527
3003
|
output = {
|
|
2528
3004
|
versions: {
|
|
2529
3005
|
"node.js": process.version.substring(1),
|
|
@@ -2559,6 +3035,25 @@ ${prefix(name)}${version2}`;
|
|
|
2559
3035
|
};
|
|
2560
3036
|
var print_versions_default = printVersions;
|
|
2561
3037
|
|
|
3038
|
+
// src/env.ts
|
|
3039
|
+
import { config } from "dotenv";
|
|
3040
|
+
import { expand as expand2 } from "dotenv-expand";
|
|
3041
|
+
var env2;
|
|
3042
|
+
var report = (logger) => {
|
|
3043
|
+
let envs = [];
|
|
3044
|
+
if (process.env.$DOT_ENV_OVERRIDES) {
|
|
3045
|
+
envs = process.env.$DOT_ENV_OVERRIDES.split(",").map((s) => s.trim());
|
|
3046
|
+
} else {
|
|
3047
|
+
envs = Object.keys(env2?.parsed ?? {});
|
|
3048
|
+
}
|
|
3049
|
+
if (envs.length) {
|
|
3050
|
+
logger?.always(`Imported ${envs.length} env vars from .env file`);
|
|
3051
|
+
logger?.debug("Envs set from .env: ", envs.join(", "));
|
|
3052
|
+
} else if (env2 && env2.error) {
|
|
3053
|
+
logger?.debug(".env not found");
|
|
3054
|
+
}
|
|
3055
|
+
};
|
|
3056
|
+
|
|
2562
3057
|
// src/commands.ts
|
|
2563
3058
|
var handlers = {
|
|
2564
3059
|
apollo: handler_default,
|
|
@@ -2570,10 +3065,8 @@ var handlers = {
|
|
|
2570
3065
|
docs: handler_default8,
|
|
2571
3066
|
metadata: handler_default9,
|
|
2572
3067
|
pull: handler_default10,
|
|
2573
|
-
projects:
|
|
2574
|
-
|
|
2575
|
-
merge: handler_default14,
|
|
2576
|
-
project: handler_default12,
|
|
3068
|
+
projects: handler_exports,
|
|
3069
|
+
project: handler_exports,
|
|
2577
3070
|
["collections-get"]: handler_default4.get,
|
|
2578
3071
|
["collections-set"]: handler_default4.set,
|
|
2579
3072
|
["collections-remove"]: handler_default4.remove,
|
|
@@ -2581,14 +3074,20 @@ var handlers = {
|
|
|
2581
3074
|
["repo-install"]: install,
|
|
2582
3075
|
["repo-pwd"]: pwd,
|
|
2583
3076
|
["repo-list"]: list,
|
|
3077
|
+
["project-list"]: handler5,
|
|
3078
|
+
["project-version"]: handler6,
|
|
3079
|
+
["project-merge"]: handler7,
|
|
3080
|
+
["project-checkout"]: handler3,
|
|
3081
|
+
["project-fetch"]: handler2,
|
|
2584
3082
|
version: async (opts, logger) => print_versions_default(logger, opts, true)
|
|
2585
3083
|
};
|
|
2586
|
-
var parse = async (
|
|
2587
|
-
const logger =
|
|
2588
|
-
if (
|
|
2589
|
-
await print_versions_default(logger,
|
|
3084
|
+
var parse = async (options6, log2) => {
|
|
3085
|
+
const logger = log2 || logger_default(CLI, options6);
|
|
3086
|
+
if (options6.command === "execute" || options6.command === "test") {
|
|
3087
|
+
await print_versions_default(logger, options6);
|
|
2590
3088
|
}
|
|
2591
|
-
|
|
3089
|
+
report(logger);
|
|
3090
|
+
const { monorepoPath } = options6;
|
|
2592
3091
|
if (monorepoPath) {
|
|
2593
3092
|
if (monorepoPath === "ERR") {
|
|
2594
3093
|
logger.error(
|
|
@@ -2599,19 +3098,19 @@ var parse = async (options, log) => {
|
|
|
2599
3098
|
}
|
|
2600
3099
|
await validateMonoRepo(monorepoPath, logger);
|
|
2601
3100
|
logger.success(`Loading adaptors from monorepo at ${monorepoPath}`);
|
|
2602
|
-
|
|
3101
|
+
options6.adaptors = map_adaptors_to_monorepo_default(
|
|
2603
3102
|
monorepoPath,
|
|
2604
|
-
|
|
3103
|
+
options6.adaptors,
|
|
2605
3104
|
logger
|
|
2606
3105
|
);
|
|
2607
3106
|
}
|
|
2608
|
-
const
|
|
2609
|
-
if (!
|
|
2610
|
-
logger.error(`Unrecognised command: ${
|
|
3107
|
+
const handler8 = handlers[options6.command];
|
|
3108
|
+
if (!handler8) {
|
|
3109
|
+
logger.error(`Unrecognised command: ${options6.command}`);
|
|
2611
3110
|
process.exit(1);
|
|
2612
3111
|
}
|
|
2613
3112
|
try {
|
|
2614
|
-
return await
|
|
3113
|
+
return await handler8(options6, logger);
|
|
2615
3114
|
} catch (e) {
|
|
2616
3115
|
if (!process.exitCode) {
|
|
2617
3116
|
process.exitCode = e.exitCode || 1;
|
|
@@ -2625,7 +3124,11 @@ var parse = async (options, log) => {
|
|
|
2625
3124
|
} else {
|
|
2626
3125
|
logger.break();
|
|
2627
3126
|
logger.error("Command failed!");
|
|
2628
|
-
|
|
3127
|
+
if (e instanceof CLIError) {
|
|
3128
|
+
logger.error(e.message);
|
|
3129
|
+
} else {
|
|
3130
|
+
logger.error(e);
|
|
3131
|
+
}
|
|
2629
3132
|
}
|
|
2630
3133
|
}
|
|
2631
3134
|
};
|