@openfn/cli 1.19.0 → 1.20.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +121 -51
- package/dist/process/runner.js +1017 -537
- package/package.json +3 -3
package/dist/process/runner.js
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __export = (target, all) => {
|
|
3
|
+
for (var name in all)
|
|
4
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
5
|
+
};
|
|
6
|
+
|
|
1
7
|
// src/apollo/handler.ts
|
|
2
8
|
import { WebSocket } from "ws";
|
|
3
9
|
import { readFile, writeFile, mkdir } from "node:fs/promises";
|
|
@@ -15,13 +21,13 @@ var urlMap = {
|
|
|
15
21
|
["local"]: LOCAL_URL
|
|
16
22
|
};
|
|
17
23
|
var DEFAULT_ENV = "staging";
|
|
18
|
-
var getURL = (
|
|
19
|
-
if (
|
|
20
|
-
if (
|
|
21
|
-
return urlMap[
|
|
24
|
+
var getURL = (options6) => {
|
|
25
|
+
if (options6.apolloUrl) {
|
|
26
|
+
if (options6.apolloUrl in urlMap) {
|
|
27
|
+
return urlMap[options6.apolloUrl];
|
|
22
28
|
}
|
|
23
|
-
if (
|
|
24
|
-
return
|
|
29
|
+
if (options6.apolloUrl.startsWith("http")) {
|
|
30
|
+
return options6.apolloUrl;
|
|
25
31
|
}
|
|
26
32
|
throw new Error(`Unrecognised apollo URL`);
|
|
27
33
|
}
|
|
@@ -46,14 +52,14 @@ var outputFiles = (files, logger) => {
|
|
|
46
52
|
};
|
|
47
53
|
|
|
48
54
|
// src/apollo/handler.ts
|
|
49
|
-
var apolloHandler = async (
|
|
50
|
-
logger.always(`Calling Apollo service: ${
|
|
51
|
-
const json = await loadPayload(logger,
|
|
52
|
-
const url2 = getURL(
|
|
55
|
+
var apolloHandler = async (options6, logger) => {
|
|
56
|
+
logger.always(`Calling Apollo service: ${options6.service}`);
|
|
57
|
+
const json = await loadPayload(logger, options6.payload);
|
|
58
|
+
const url2 = getURL(options6);
|
|
53
59
|
logger.success(`Using apollo server at`, url2);
|
|
54
|
-
const result = await callApollo(url2,
|
|
60
|
+
const result = await callApollo(url2, options6.service, json, logger);
|
|
55
61
|
if (result) {
|
|
56
|
-
await serializeOutput(
|
|
62
|
+
await serializeOutput(options6, result, logger);
|
|
57
63
|
} else {
|
|
58
64
|
logger.warn("No output returned from Apollo");
|
|
59
65
|
}
|
|
@@ -73,15 +79,15 @@ var write = async (basePath, filePath, content, logger) => {
|
|
|
73
79
|
await writeFile(dest, content);
|
|
74
80
|
logger.success(`Wrote content to ${dest}`);
|
|
75
81
|
};
|
|
76
|
-
var serializeOutput = async (
|
|
77
|
-
if (
|
|
78
|
-
if (result.files && !
|
|
82
|
+
var serializeOutput = async (options6, result, logger) => {
|
|
83
|
+
if (options6.outputPath) {
|
|
84
|
+
if (result.files && !options6.outputPath.endsWith(".json")) {
|
|
79
85
|
for (const p in result.files) {
|
|
80
|
-
await write(
|
|
86
|
+
await write(options6.outputPath, p, result.files[p], logger);
|
|
81
87
|
}
|
|
82
88
|
} else {
|
|
83
89
|
await write(
|
|
84
|
-
|
|
90
|
+
options6.outputPath,
|
|
85
91
|
"",
|
|
86
92
|
JSON.stringify(result, null, 2),
|
|
87
93
|
logger
|
|
@@ -127,14 +133,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
127
133
|
});
|
|
128
134
|
});
|
|
129
135
|
};
|
|
130
|
-
var loadPayload = async (logger,
|
|
131
|
-
if (!
|
|
136
|
+
var loadPayload = async (logger, path15) => {
|
|
137
|
+
if (!path15) {
|
|
132
138
|
logger.warn("No JSON payload provided");
|
|
133
139
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
134
140
|
return {};
|
|
135
141
|
}
|
|
136
|
-
if (
|
|
137
|
-
const str = await readFile(
|
|
142
|
+
if (path15.endsWith(".json")) {
|
|
143
|
+
const str = await readFile(path15, "utf8");
|
|
138
144
|
const json = JSON.parse(str);
|
|
139
145
|
logger.debug("Loaded JSON payload");
|
|
140
146
|
return json;
|
|
@@ -158,17 +164,17 @@ var namespaces = {
|
|
|
158
164
|
[COMPILER]: "CMP",
|
|
159
165
|
[JOB]: "JOB"
|
|
160
166
|
};
|
|
161
|
-
var createLogger2 = (name = "",
|
|
162
|
-
const logOptions =
|
|
167
|
+
var createLogger2 = (name = "", options6) => {
|
|
168
|
+
const logOptions = options6.log || {};
|
|
163
169
|
let json = false;
|
|
164
170
|
let level = logOptions[name] || logOptions.default || "default";
|
|
165
|
-
if (
|
|
171
|
+
if (options6.logJson) {
|
|
166
172
|
json = true;
|
|
167
173
|
}
|
|
168
174
|
return actualCreateLogger(namespaces[name] || name, {
|
|
169
175
|
level,
|
|
170
176
|
json,
|
|
171
|
-
sanitize:
|
|
177
|
+
sanitize: options6.sanitize || "none",
|
|
172
178
|
...logOptions
|
|
173
179
|
});
|
|
174
180
|
};
|
|
@@ -179,8 +185,8 @@ var createNullLogger = () => createLogger2(void 0, { log: { default: "none" } })
|
|
|
179
185
|
import fs from "node:fs";
|
|
180
186
|
import path2 from "node:path";
|
|
181
187
|
import { rmdir } from "node:fs/promises";
|
|
182
|
-
var getCachePath = async (plan,
|
|
183
|
-
const { baseDir } =
|
|
188
|
+
var getCachePath = async (plan, options6, stepId) => {
|
|
189
|
+
const { baseDir } = options6;
|
|
184
190
|
const { name } = plan.workflow;
|
|
185
191
|
const basePath = `${baseDir}/.cli-cache/${name}`;
|
|
186
192
|
if (stepId) {
|
|
@@ -188,10 +194,10 @@ var getCachePath = async (plan, options, stepId) => {
|
|
|
188
194
|
}
|
|
189
195
|
return path2.resolve(basePath);
|
|
190
196
|
};
|
|
191
|
-
var ensureGitIgnore = (
|
|
192
|
-
if (!
|
|
197
|
+
var ensureGitIgnore = (options6) => {
|
|
198
|
+
if (!options6._hasGitIgnore) {
|
|
193
199
|
const ignorePath = path2.resolve(
|
|
194
|
-
|
|
200
|
+
options6.baseDir,
|
|
195
201
|
".cli-cache",
|
|
196
202
|
".gitignore"
|
|
197
203
|
);
|
|
@@ -201,19 +207,19 @@ var ensureGitIgnore = (options) => {
|
|
|
201
207
|
fs.writeFileSync(ignorePath, "*");
|
|
202
208
|
}
|
|
203
209
|
}
|
|
204
|
-
|
|
210
|
+
options6._hasGitIgnore = true;
|
|
205
211
|
};
|
|
206
|
-
var saveToCache = async (plan, stepId, output,
|
|
207
|
-
if (
|
|
208
|
-
const cachePath = await getCachePath(plan,
|
|
212
|
+
var saveToCache = async (plan, stepId, output, options6, logger) => {
|
|
213
|
+
if (options6.cacheSteps) {
|
|
214
|
+
const cachePath = await getCachePath(plan, options6, stepId);
|
|
209
215
|
fs.mkdirSync(path2.dirname(cachePath), { recursive: true });
|
|
210
|
-
ensureGitIgnore(
|
|
216
|
+
ensureGitIgnore(options6);
|
|
211
217
|
logger.info(`Writing ${stepId} output to ${cachePath}`);
|
|
212
218
|
fs.writeFileSync(cachePath, JSON.stringify(output));
|
|
213
219
|
}
|
|
214
220
|
};
|
|
215
|
-
var clearCache = async (plan,
|
|
216
|
-
const cacheDir = await getCachePath(plan,
|
|
221
|
+
var clearCache = async (plan, options6, logger) => {
|
|
222
|
+
const cacheDir = await getCachePath(plan, options6);
|
|
217
223
|
try {
|
|
218
224
|
await rmdir(cacheDir, { recursive: true });
|
|
219
225
|
logger.info(`Cleared cache at ${cacheDir}`);
|
|
@@ -256,13 +262,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
256
262
|
};
|
|
257
263
|
function parseAdaptors(plan) {
|
|
258
264
|
const extractInfo = (specifier) => {
|
|
259
|
-
const [module,
|
|
265
|
+
const [module, path15] = specifier.split("=");
|
|
260
266
|
const { name, version } = getNameAndVersion(module);
|
|
261
267
|
const info = {
|
|
262
268
|
name
|
|
263
269
|
};
|
|
264
|
-
if (
|
|
265
|
-
info.path =
|
|
270
|
+
if (path15) {
|
|
271
|
+
info.path = path15;
|
|
266
272
|
}
|
|
267
273
|
if (version) {
|
|
268
274
|
info.version = version;
|
|
@@ -283,7 +289,7 @@ function parseAdaptors(plan) {
|
|
|
283
289
|
// src/execute/serialize-output.ts
|
|
284
290
|
import { mkdir as mkdir2, writeFile as writeFile2 } from "node:fs/promises";
|
|
285
291
|
import { dirname } from "node:path";
|
|
286
|
-
var serializeOutput2 = async (
|
|
292
|
+
var serializeOutput2 = async (options6, result, logger) => {
|
|
287
293
|
let output = result;
|
|
288
294
|
if (output && (output.configuration || output.data)) {
|
|
289
295
|
const { configuration, ...rest } = result;
|
|
@@ -294,14 +300,14 @@ var serializeOutput2 = async (options, result, logger) => {
|
|
|
294
300
|
} else {
|
|
295
301
|
output = JSON.stringify(output, void 0, 2);
|
|
296
302
|
}
|
|
297
|
-
if (
|
|
303
|
+
if (options6.outputStdout) {
|
|
298
304
|
logger.success(`Result: `);
|
|
299
305
|
logger.always(output);
|
|
300
|
-
} else if (
|
|
301
|
-
await mkdir2(dirname(
|
|
302
|
-
logger.debug(`Writing output to ${
|
|
303
|
-
await writeFile2(
|
|
304
|
-
logger.success(`State written to ${
|
|
306
|
+
} else if (options6.outputPath) {
|
|
307
|
+
await mkdir2(dirname(options6.outputPath), { recursive: true });
|
|
308
|
+
logger.debug(`Writing output to ${options6.outputPath}`);
|
|
309
|
+
await writeFile2(options6.outputPath, output);
|
|
310
|
+
logger.success(`State written to ${options6.outputPath}`);
|
|
305
311
|
}
|
|
306
312
|
return output;
|
|
307
313
|
};
|
|
@@ -328,16 +334,16 @@ import {
|
|
|
328
334
|
loadRepoPkg,
|
|
329
335
|
getNameAndVersion as getNameAndVersion2
|
|
330
336
|
} from "@openfn/runtime";
|
|
331
|
-
var install = async (opts,
|
|
337
|
+
var install = async (opts, log2 = defaultLogger) => {
|
|
332
338
|
let { packages, adaptors, repoDir } = opts;
|
|
333
339
|
const targets = [].concat(packages ?? [], adaptors ?? []);
|
|
334
340
|
if (targets) {
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
const result = await rtInstall(targets, repoDir,
|
|
339
|
-
const duration =
|
|
340
|
-
|
|
341
|
+
log2.timer("install");
|
|
342
|
+
log2.success("Installing packages...");
|
|
343
|
+
log2.debug("repoDir is set to:", repoDir);
|
|
344
|
+
const result = await rtInstall(targets, repoDir, log2);
|
|
345
|
+
const duration = log2.timer("install");
|
|
346
|
+
log2.success(`Installation complete in ${duration}`);
|
|
341
347
|
return result;
|
|
342
348
|
}
|
|
343
349
|
return [];
|
|
@@ -365,16 +371,16 @@ var removePackage = async (packageSpecifier, repoDir, logger) => {
|
|
|
365
371
|
logger.warn(`Failed to remove ${aliasedName}: ${error.message}`);
|
|
366
372
|
}
|
|
367
373
|
};
|
|
368
|
-
var clean = async (
|
|
369
|
-
if (
|
|
374
|
+
var clean = async (options6, logger) => {
|
|
375
|
+
if (options6.repoDir) {
|
|
370
376
|
const doIt = await logger.confirm(
|
|
371
|
-
`This will remove everything at ${
|
|
372
|
-
|
|
377
|
+
`This will remove everything at ${options6.repoDir}. Do you wish to proceed?`,
|
|
378
|
+
options6.force
|
|
373
379
|
);
|
|
374
380
|
if (doIt) {
|
|
375
381
|
return new Promise((resolve) => {
|
|
376
|
-
logger.info(`Cleaning repo at ${
|
|
377
|
-
exec(`npm exec rimraf ${
|
|
382
|
+
logger.info(`Cleaning repo at ${options6.repoDir} `);
|
|
383
|
+
exec(`npm exec rimraf ${options6.repoDir}`, () => {
|
|
378
384
|
logger.success("Repo cleaned");
|
|
379
385
|
resolve();
|
|
380
386
|
});
|
|
@@ -385,12 +391,12 @@ var clean = async (options, logger) => {
|
|
|
385
391
|
logger.error("No repoDir path detected");
|
|
386
392
|
}
|
|
387
393
|
};
|
|
388
|
-
var pwd = async (
|
|
394
|
+
var pwd = async (options6, logger) => {
|
|
389
395
|
logger.info(`OPENFN_REPO_DIR is set to ${process.env.OPENFN_REPO_DIR}`);
|
|
390
|
-
logger.success(`Repo working directory is: ${
|
|
396
|
+
logger.success(`Repo working directory is: ${options6.repoDir}`);
|
|
391
397
|
};
|
|
392
|
-
var getDependencyList = async (
|
|
393
|
-
const pkg = await loadRepoPkg(
|
|
398
|
+
var getDependencyList = async (options6, _logger) => {
|
|
399
|
+
const pkg = await loadRepoPkg(options6.repoDir);
|
|
394
400
|
const result = {};
|
|
395
401
|
if (pkg) {
|
|
396
402
|
Object.keys(pkg.dependencies).forEach((key) => {
|
|
@@ -403,9 +409,9 @@ var getDependencyList = async (options, _logger) => {
|
|
|
403
409
|
}
|
|
404
410
|
return result;
|
|
405
411
|
};
|
|
406
|
-
var list = async (
|
|
407
|
-
const tree = await getDependencyList(
|
|
408
|
-
await pwd(
|
|
412
|
+
var list = async (options6, logger) => {
|
|
413
|
+
const tree = await getDependencyList(options6, logger);
|
|
414
|
+
await pwd(options6, logger);
|
|
409
415
|
const output = {};
|
|
410
416
|
Object.keys(tree).forEach((key) => {
|
|
411
417
|
const versions = tree[key];
|
|
@@ -463,27 +469,27 @@ var throwAbortableError = (message, help) => {
|
|
|
463
469
|
};
|
|
464
470
|
|
|
465
471
|
// src/compile/compile.ts
|
|
466
|
-
async function compile_default(planOrPath, opts,
|
|
472
|
+
async function compile_default(planOrPath, opts, log2) {
|
|
467
473
|
if (typeof planOrPath === "string") {
|
|
468
|
-
const result = await compileJob(planOrPath, opts,
|
|
469
|
-
|
|
474
|
+
const result = await compileJob(planOrPath, opts, log2);
|
|
475
|
+
log2.success(`Compiled expression from ${opts.expressionPath}`);
|
|
470
476
|
return result;
|
|
471
477
|
}
|
|
472
478
|
const compiledPlan = await compileWorkflow(
|
|
473
479
|
planOrPath,
|
|
474
480
|
opts,
|
|
475
|
-
|
|
481
|
+
log2
|
|
476
482
|
);
|
|
477
|
-
|
|
483
|
+
log2.success("Compiled all expressions in workflow");
|
|
478
484
|
return compiledPlan;
|
|
479
485
|
}
|
|
480
|
-
var compileJob = async (job, opts,
|
|
486
|
+
var compileJob = async (job, opts, log2, jobName) => {
|
|
481
487
|
try {
|
|
482
|
-
const compilerOptions = await loadTransformOptions(opts,
|
|
488
|
+
const compilerOptions = await loadTransformOptions(opts, log2);
|
|
483
489
|
return compile(job, compilerOptions);
|
|
484
490
|
} catch (e) {
|
|
485
491
|
abort_default(
|
|
486
|
-
|
|
492
|
+
log2,
|
|
487
493
|
`Failed to compile job ${jobName ?? ""}`.trim(),
|
|
488
494
|
e,
|
|
489
495
|
"Check the syntax of the job expression:\n\n" + job
|
|
@@ -491,7 +497,7 @@ var compileJob = async (job, opts, log, jobName) => {
|
|
|
491
497
|
return { code: job };
|
|
492
498
|
}
|
|
493
499
|
};
|
|
494
|
-
var compileWorkflow = async (plan, opts,
|
|
500
|
+
var compileWorkflow = async (plan, opts, log2) => {
|
|
495
501
|
let globalsIgnoreList = getExports(plan.workflow.globals);
|
|
496
502
|
for (const step of plan.workflow.steps) {
|
|
497
503
|
const job = step;
|
|
@@ -505,7 +511,7 @@ var compileWorkflow = async (plan, opts, log) => {
|
|
|
505
511
|
const { code, map } = await compileJob(
|
|
506
512
|
job.expression,
|
|
507
513
|
jobOpts,
|
|
508
|
-
|
|
514
|
+
log2,
|
|
509
515
|
job.id
|
|
510
516
|
);
|
|
511
517
|
job.expression = code;
|
|
@@ -521,21 +527,21 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
521
527
|
}
|
|
522
528
|
return specifier;
|
|
523
529
|
};
|
|
524
|
-
var resolveSpecifierPath = async (pattern, repoDir,
|
|
525
|
-
const [specifier,
|
|
526
|
-
if (
|
|
527
|
-
|
|
528
|
-
return
|
|
530
|
+
var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
531
|
+
const [specifier, path15] = pattern.split("=");
|
|
532
|
+
if (path15) {
|
|
533
|
+
log2.debug(`Resolved ${specifier} to path: ${path15}`);
|
|
534
|
+
return path15;
|
|
529
535
|
}
|
|
530
|
-
const repoPath = await getModulePath(specifier, repoDir,
|
|
536
|
+
const repoPath = await getModulePath(specifier, repoDir, log2);
|
|
531
537
|
if (repoPath) {
|
|
532
538
|
return repoPath;
|
|
533
539
|
}
|
|
534
540
|
return null;
|
|
535
541
|
};
|
|
536
|
-
var loadTransformOptions = async (opts,
|
|
537
|
-
const
|
|
538
|
-
logger:
|
|
542
|
+
var loadTransformOptions = async (opts, log2) => {
|
|
543
|
+
const options6 = {
|
|
544
|
+
logger: log2 || logger_default(COMPILER, opts),
|
|
539
545
|
trace: opts.trace
|
|
540
546
|
};
|
|
541
547
|
if (opts.adaptors?.length && opts.ignoreImports != true) {
|
|
@@ -543,18 +549,18 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
543
549
|
for (const adaptorInput of opts.adaptors) {
|
|
544
550
|
let exports;
|
|
545
551
|
const [specifier] = adaptorInput.split("=");
|
|
546
|
-
|
|
547
|
-
const
|
|
548
|
-
if (
|
|
552
|
+
log2.debug(`Trying to preload types for ${specifier}`);
|
|
553
|
+
const path15 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log2);
|
|
554
|
+
if (path15) {
|
|
549
555
|
try {
|
|
550
|
-
exports = await preloadAdaptorExports(
|
|
556
|
+
exports = await preloadAdaptorExports(path15, log2);
|
|
551
557
|
} catch (e) {
|
|
552
|
-
|
|
553
|
-
|
|
558
|
+
log2.error(`Failed to load adaptor typedefs from path ${path15}`);
|
|
559
|
+
log2.error(e);
|
|
554
560
|
}
|
|
555
561
|
}
|
|
556
562
|
if (!exports || exports.length === 0) {
|
|
557
|
-
|
|
563
|
+
log2.debug(`No module exports found for ${adaptorInput}`);
|
|
558
564
|
}
|
|
559
565
|
adaptorsConfig.push({
|
|
560
566
|
name: stripVersionSpecifier(specifier),
|
|
@@ -562,12 +568,12 @@ var loadTransformOptions = async (opts, log) => {
|
|
|
562
568
|
exportAll: true
|
|
563
569
|
});
|
|
564
570
|
}
|
|
565
|
-
|
|
571
|
+
options6["add-imports"] = {
|
|
566
572
|
ignore: opts.ignoreImports,
|
|
567
573
|
adaptors: adaptorsConfig
|
|
568
574
|
};
|
|
569
575
|
}
|
|
570
|
-
return
|
|
576
|
+
return options6;
|
|
571
577
|
};
|
|
572
578
|
|
|
573
579
|
// src/util/load-state.ts
|
|
@@ -585,72 +591,72 @@ var getUpstreamStepId = (plan, stepId) => {
|
|
|
585
591
|
return typeof upstreamStep === "string" ? upstreamStep : upstreamStep.id;
|
|
586
592
|
}
|
|
587
593
|
};
|
|
588
|
-
var load_state_default = async (plan, opts,
|
|
589
|
-
const { stateStdin, statePath } = opts;
|
|
590
|
-
|
|
594
|
+
var load_state_default = async (plan, opts, log2, start) => {
|
|
595
|
+
const { stateStdin, statePath: statePath2 } = opts;
|
|
596
|
+
log2.debug("Loading state...");
|
|
591
597
|
if (stateStdin) {
|
|
592
598
|
try {
|
|
593
599
|
const json = JSON.parse(stateStdin);
|
|
594
|
-
|
|
595
|
-
|
|
600
|
+
log2.success("Read state from stdin");
|
|
601
|
+
log2.debug("state:", json);
|
|
596
602
|
return json;
|
|
597
603
|
} catch (e) {
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
604
|
+
log2.error("Failed to load state from stdin");
|
|
605
|
+
log2.error(stateStdin);
|
|
606
|
+
log2.error(e);
|
|
601
607
|
process.exit(1);
|
|
602
608
|
}
|
|
603
609
|
}
|
|
604
|
-
if (
|
|
610
|
+
if (statePath2) {
|
|
605
611
|
try {
|
|
606
|
-
const str = await fs2.readFile(
|
|
612
|
+
const str = await fs2.readFile(statePath2, "utf8");
|
|
607
613
|
const json = JSON.parse(str);
|
|
608
|
-
|
|
609
|
-
|
|
614
|
+
log2.success(`Loaded state from ${statePath2}`);
|
|
615
|
+
log2.debug("state:", json);
|
|
610
616
|
return json;
|
|
611
617
|
} catch (e) {
|
|
612
|
-
|
|
613
|
-
|
|
618
|
+
log2.warn(`Error loading state from ${statePath2}`);
|
|
619
|
+
log2.warn(e);
|
|
614
620
|
}
|
|
615
621
|
}
|
|
616
622
|
if (start) {
|
|
617
|
-
|
|
623
|
+
log2.info(
|
|
618
624
|
"No state provided to CLI. Will attempt to load state from cache instead"
|
|
619
625
|
);
|
|
620
|
-
|
|
626
|
+
log2.always(
|
|
621
627
|
`Attempting to load cached input state for starting step "${start}"`
|
|
622
628
|
);
|
|
623
629
|
try {
|
|
624
630
|
const upstreamStepId = getUpstreamStepId(plan, start);
|
|
625
631
|
if (upstreamStepId) {
|
|
626
|
-
|
|
632
|
+
log2.debug(`Input step for "${start}" is "${upstreamStepId}"`);
|
|
627
633
|
const cachedStatePath = await getCachePath(plan, opts, upstreamStepId);
|
|
628
|
-
|
|
634
|
+
log2.debug("Loading cached state from", cachedStatePath);
|
|
629
635
|
try {
|
|
630
636
|
await fs2.access(cachedStatePath);
|
|
631
637
|
const str = await fs2.readFile(cachedStatePath, "utf8");
|
|
632
638
|
const json = JSON.parse(str);
|
|
633
|
-
|
|
639
|
+
log2.success(
|
|
634
640
|
`Loaded cached state for step "${start}" from ${cachedStatePath}`
|
|
635
641
|
);
|
|
636
|
-
|
|
642
|
+
log2.info(` To force disable the cache, run again with --no-cache`);
|
|
637
643
|
return json;
|
|
638
644
|
} catch (e) {
|
|
639
|
-
|
|
640
|
-
|
|
645
|
+
log2.warn(`No cached state found for step "${start}"`);
|
|
646
|
+
log2.warn(
|
|
641
647
|
"Re-run this workflow with --cache to save the output of each step"
|
|
642
648
|
);
|
|
643
|
-
|
|
649
|
+
log2.break();
|
|
644
650
|
}
|
|
645
651
|
} else {
|
|
646
|
-
|
|
652
|
+
log2.warn(`Could not find an input step for step "${start}"`);
|
|
647
653
|
}
|
|
648
654
|
} catch (e) {
|
|
649
|
-
|
|
650
|
-
|
|
655
|
+
log2.warn("Error loading cached state");
|
|
656
|
+
log2.warn(e);
|
|
651
657
|
}
|
|
652
658
|
}
|
|
653
|
-
|
|
659
|
+
log2.info(
|
|
654
660
|
"No state provided - using default state { data: {}, configuration: {} }"
|
|
655
661
|
);
|
|
656
662
|
return {
|
|
@@ -660,12 +666,12 @@ var load_state_default = async (plan, opts, log, start) => {
|
|
|
660
666
|
};
|
|
661
667
|
|
|
662
668
|
// src/util/validate-adaptors.ts
|
|
663
|
-
var validateAdaptors = async (
|
|
664
|
-
if (
|
|
669
|
+
var validateAdaptors = async (options6, logger) => {
|
|
670
|
+
if (options6.skipAdaptorValidation) {
|
|
665
671
|
return;
|
|
666
672
|
}
|
|
667
|
-
const isPlan =
|
|
668
|
-
const hasDeclaredAdaptors =
|
|
673
|
+
const isPlan = options6.planPath || options6.workflowPath || options6.workflow;
|
|
674
|
+
const hasDeclaredAdaptors = options6.adaptors && options6.adaptors.length > 0;
|
|
669
675
|
if (isPlan && hasDeclaredAdaptors) {
|
|
670
676
|
logger.error("ERROR: adaptor and workflow provided");
|
|
671
677
|
logger.error(
|
|
@@ -721,43 +727,43 @@ import { readFile as readFile2 } from "node:fs/promises";
|
|
|
721
727
|
import path3 from "node:path";
|
|
722
728
|
import assert from "node:assert";
|
|
723
729
|
import { getNameAndVersion as getNameAndVersion3 } from "@openfn/runtime";
|
|
724
|
-
var validateMonoRepo = async (repoPath,
|
|
730
|
+
var validateMonoRepo = async (repoPath, log2) => {
|
|
725
731
|
try {
|
|
726
732
|
const raw = await readFile2(`${repoPath}/package.json`, "utf8");
|
|
727
733
|
const pkg = JSON.parse(raw);
|
|
728
734
|
assert(pkg.name === "adaptors");
|
|
729
735
|
} catch (e) {
|
|
730
|
-
|
|
736
|
+
log2.error(`ERROR: Adaptors Monorepo not found at ${repoPath}`);
|
|
731
737
|
process.exit(9);
|
|
732
738
|
}
|
|
733
739
|
};
|
|
734
|
-
var updatePath = (adaptor, repoPath,
|
|
740
|
+
var updatePath = (adaptor, repoPath, log2) => {
|
|
735
741
|
if (adaptor.match("=")) {
|
|
736
742
|
return adaptor;
|
|
737
743
|
}
|
|
738
744
|
const { name, version } = getNameAndVersion3(adaptor);
|
|
739
745
|
if (version) {
|
|
740
|
-
|
|
746
|
+
log2.warn(
|
|
741
747
|
`Warning: Ignoring version specifier on ${adaptor} as loading from the adaptors monorepo`
|
|
742
748
|
);
|
|
743
749
|
}
|
|
744
750
|
const shortName = name.replace("@openfn/language-", "");
|
|
745
751
|
const abspath = path3.resolve(repoPath, "packages", shortName);
|
|
746
|
-
|
|
752
|
+
log2.info(`Mapped adaptor ${name} to monorepo: ${abspath}`);
|
|
747
753
|
return `${name}=${abspath}`;
|
|
748
754
|
};
|
|
749
|
-
var mapAdaptorsToMonorepo = (monorepoPath = "", input = [],
|
|
755
|
+
var mapAdaptorsToMonorepo = (monorepoPath = "", input = [], log2) => {
|
|
750
756
|
if (monorepoPath) {
|
|
751
757
|
if (Array.isArray(input)) {
|
|
752
758
|
const adaptors = input;
|
|
753
|
-
return adaptors.map((a) => updatePath(a, monorepoPath,
|
|
759
|
+
return adaptors.map((a) => updatePath(a, monorepoPath, log2));
|
|
754
760
|
}
|
|
755
761
|
const plan = input;
|
|
756
762
|
Object.values(plan.workflow.steps).forEach((step) => {
|
|
757
763
|
const job = step;
|
|
758
764
|
if (job.adaptors) {
|
|
759
765
|
job.adaptors = job.adaptors.map(
|
|
760
|
-
(a) => updatePath(a, monorepoPath,
|
|
766
|
+
(a) => updatePath(a, monorepoPath, log2)
|
|
761
767
|
);
|
|
762
768
|
}
|
|
763
769
|
});
|
|
@@ -768,47 +774,47 @@ var mapAdaptorsToMonorepo = (monorepoPath = "", input = [], log) => {
|
|
|
768
774
|
var map_adaptors_to_monorepo_default = mapAdaptorsToMonorepo;
|
|
769
775
|
|
|
770
776
|
// src/util/load-plan.ts
|
|
771
|
-
var loadPlan = async (
|
|
772
|
-
const { workflowPath, planPath, expressionPath } =
|
|
773
|
-
if (
|
|
774
|
-
const content = await fs3.readFile(path4.resolve(
|
|
775
|
-
const
|
|
776
|
-
|
|
777
|
-
return loadXPlan({ workflow },
|
|
778
|
-
}
|
|
779
|
-
if (
|
|
780
|
-
|
|
781
|
-
return fromProject(
|
|
782
|
-
}
|
|
783
|
-
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(
|
|
784
|
-
const
|
|
785
|
-
return fromProject(path4.resolve("."),
|
|
777
|
+
var loadPlan = async (options6, logger) => {
|
|
778
|
+
const { workflowPath, planPath, expressionPath } = options6;
|
|
779
|
+
if (options6.path && /ya?ml$/.test(options6.path)) {
|
|
780
|
+
const content = await fs3.readFile(path4.resolve(options6.path), "utf-8");
|
|
781
|
+
const workflow2 = yamlToJson(content);
|
|
782
|
+
options6.baseDir = dirname2(options6.path);
|
|
783
|
+
return loadXPlan({ workflow: workflow2 }, options6, logger);
|
|
784
|
+
}
|
|
785
|
+
if (options6.path && options6.workflow) {
|
|
786
|
+
options6.baseDir = options6.path;
|
|
787
|
+
return fromProject(options6.path, options6.workflow, options6, logger);
|
|
788
|
+
}
|
|
789
|
+
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(options6.path || "") && !options6.workflow) {
|
|
790
|
+
const workflow2 = options6.path;
|
|
791
|
+
return fromProject(path4.resolve("."), workflow2, options6, logger);
|
|
786
792
|
}
|
|
787
793
|
if (expressionPath) {
|
|
788
|
-
return loadExpression(
|
|
794
|
+
return loadExpression(options6, logger);
|
|
789
795
|
}
|
|
790
796
|
const jsonPath = planPath || workflowPath;
|
|
791
|
-
if (!
|
|
792
|
-
|
|
797
|
+
if (!options6.baseDir) {
|
|
798
|
+
options6.baseDir = path4.dirname(jsonPath);
|
|
793
799
|
}
|
|
794
800
|
const json = await loadJson(jsonPath, logger);
|
|
795
801
|
const defaultName = path4.parse(jsonPath).name;
|
|
796
802
|
if (json.workflow) {
|
|
797
|
-
return loadXPlan(json,
|
|
803
|
+
return loadXPlan(json, options6, logger, defaultName);
|
|
798
804
|
} else {
|
|
799
|
-
return loadOldWorkflow(json,
|
|
805
|
+
return loadOldWorkflow(json, options6, logger, defaultName);
|
|
800
806
|
}
|
|
801
807
|
};
|
|
802
808
|
var load_plan_default = loadPlan;
|
|
803
|
-
var fromProject = async (rootDir, workflowName,
|
|
809
|
+
var fromProject = async (rootDir, workflowName, options6, logger) => {
|
|
804
810
|
logger.debug("Loading Repo from ", path4.resolve(rootDir));
|
|
805
811
|
const project = await Project.from("fs", { root: rootDir });
|
|
806
812
|
logger.debug("Loading workflow ", workflowName);
|
|
807
|
-
const
|
|
808
|
-
if (!
|
|
813
|
+
const workflow2 = project.getWorkflow(workflowName);
|
|
814
|
+
if (!workflow2) {
|
|
809
815
|
throw new Error(`Workflow "${workflowName}" not found`);
|
|
810
816
|
}
|
|
811
|
-
return loadXPlan({ workflow },
|
|
817
|
+
return loadXPlan({ workflow: workflow2 }, options6, logger);
|
|
812
818
|
};
|
|
813
819
|
var loadJson = async (workflowPath, logger) => {
|
|
814
820
|
let text;
|
|
@@ -843,8 +849,8 @@ var maybeAssign = (a, b, keys) => {
|
|
|
843
849
|
}
|
|
844
850
|
});
|
|
845
851
|
};
|
|
846
|
-
var loadExpression = async (
|
|
847
|
-
const expressionPath =
|
|
852
|
+
var loadExpression = async (options6, logger) => {
|
|
853
|
+
const expressionPath = options6.expressionPath;
|
|
848
854
|
logger.debug(`Loading expression from ${expressionPath}`);
|
|
849
855
|
try {
|
|
850
856
|
const expression = await fs3.readFile(expressionPath, "utf8");
|
|
@@ -852,19 +858,19 @@ var loadExpression = async (options, logger) => {
|
|
|
852
858
|
const step = {
|
|
853
859
|
expression,
|
|
854
860
|
// The adaptor should have been expanded nicely already, so we don't need intervene here
|
|
855
|
-
adaptors:
|
|
861
|
+
adaptors: options6.adaptors ?? []
|
|
856
862
|
};
|
|
857
863
|
const wfOptions = {};
|
|
858
|
-
maybeAssign(
|
|
864
|
+
maybeAssign(options6, wfOptions, ["timeout"]);
|
|
859
865
|
const plan = {
|
|
860
866
|
workflow: {
|
|
861
867
|
name,
|
|
862
868
|
steps: [step],
|
|
863
|
-
globals:
|
|
869
|
+
globals: options6.globals
|
|
864
870
|
},
|
|
865
871
|
options: wfOptions
|
|
866
872
|
};
|
|
867
|
-
return loadXPlan(plan,
|
|
873
|
+
return loadXPlan(plan, options6, logger);
|
|
868
874
|
} catch (e) {
|
|
869
875
|
abort_default(
|
|
870
876
|
logger,
|
|
@@ -875,33 +881,33 @@ var loadExpression = async (options, logger) => {
|
|
|
875
881
|
return {};
|
|
876
882
|
}
|
|
877
883
|
};
|
|
878
|
-
var loadOldWorkflow = async (
|
|
884
|
+
var loadOldWorkflow = async (workflow2, options6, logger, defaultName = "") => {
|
|
879
885
|
const plan = {
|
|
880
886
|
workflow: {
|
|
881
|
-
steps:
|
|
887
|
+
steps: workflow2.jobs
|
|
882
888
|
},
|
|
883
889
|
options: {
|
|
884
|
-
start:
|
|
890
|
+
start: workflow2.start
|
|
885
891
|
}
|
|
886
892
|
};
|
|
887
|
-
if (
|
|
888
|
-
plan.id =
|
|
893
|
+
if (workflow2.id) {
|
|
894
|
+
plan.id = workflow2.id;
|
|
889
895
|
}
|
|
890
|
-
const final = await loadXPlan(plan,
|
|
896
|
+
const final = await loadXPlan(plan, options6, logger, defaultName);
|
|
891
897
|
logger.warn("Converted workflow into new format:");
|
|
892
898
|
logger.warn(final);
|
|
893
899
|
return final;
|
|
894
900
|
};
|
|
895
|
-
var fetchFile = async (fileInfo,
|
|
901
|
+
var fetchFile = async (fileInfo, log2) => {
|
|
896
902
|
const { rootDir = "", filePath, name } = fileInfo;
|
|
897
903
|
try {
|
|
898
904
|
const fullPath = filePath.startsWith("~") ? filePath : path4.resolve(rootDir, filePath);
|
|
899
905
|
const result = await fs3.readFile(fullPath, "utf8");
|
|
900
|
-
|
|
906
|
+
log2.debug("Loaded file", fullPath);
|
|
901
907
|
return result;
|
|
902
908
|
} catch (e) {
|
|
903
909
|
abort_default(
|
|
904
|
-
|
|
910
|
+
log2,
|
|
905
911
|
`File not found for ${name}: ${filePath}`,
|
|
906
912
|
void 0,
|
|
907
913
|
`This workflow references a file which cannot be found at ${filePath}
|
|
@@ -911,20 +917,20 @@ Paths inside the workflow are relative to the workflow.json`
|
|
|
911
917
|
return ".";
|
|
912
918
|
}
|
|
913
919
|
};
|
|
914
|
-
var importGlobals = async (plan, rootDir,
|
|
920
|
+
var importGlobals = async (plan, rootDir, log2) => {
|
|
915
921
|
const fnStr = plan.workflow?.globals;
|
|
916
922
|
if (fnStr) {
|
|
917
923
|
if (isPath(fnStr)) {
|
|
918
924
|
plan.workflow.globals = await fetchFile(
|
|
919
925
|
{ name: "globals", rootDir, filePath: fnStr },
|
|
920
|
-
|
|
926
|
+
log2
|
|
921
927
|
);
|
|
922
928
|
} else {
|
|
923
929
|
plan.workflow.globals = fnStr;
|
|
924
930
|
}
|
|
925
931
|
}
|
|
926
932
|
};
|
|
927
|
-
var importExpressions = async (plan, rootDir,
|
|
933
|
+
var importExpressions = async (plan, rootDir, log2) => {
|
|
928
934
|
let idx = 0;
|
|
929
935
|
for (const step of plan.workflow.steps) {
|
|
930
936
|
const job = step;
|
|
@@ -942,7 +948,7 @@ var importExpressions = async (plan, rootDir, log) => {
|
|
|
942
948
|
rootDir,
|
|
943
949
|
filePath: expressionStr
|
|
944
950
|
},
|
|
945
|
-
|
|
951
|
+
log2
|
|
946
952
|
);
|
|
947
953
|
}
|
|
948
954
|
if (configurationStr && isPath(configurationStr)) {
|
|
@@ -952,7 +958,7 @@ var importExpressions = async (plan, rootDir, log) => {
|
|
|
952
958
|
rootDir,
|
|
953
959
|
filePath: configurationStr
|
|
954
960
|
},
|
|
955
|
-
|
|
961
|
+
log2
|
|
956
962
|
);
|
|
957
963
|
job.configuration = JSON.parse(configString);
|
|
958
964
|
}
|
|
@@ -963,7 +969,7 @@ var importExpressions = async (plan, rootDir, log) => {
|
|
|
963
969
|
rootDir,
|
|
964
970
|
filePath: stateStr
|
|
965
971
|
},
|
|
966
|
-
|
|
972
|
+
log2
|
|
967
973
|
);
|
|
968
974
|
job.state = JSON.parse(stateString);
|
|
969
975
|
}
|
|
@@ -979,7 +985,7 @@ var ensureAdaptors = (plan) => {
|
|
|
979
985
|
job.adaptors ??= [];
|
|
980
986
|
});
|
|
981
987
|
};
|
|
982
|
-
var loadXPlan = async (plan,
|
|
988
|
+
var loadXPlan = async (plan, options6, logger, defaultName = "") => {
|
|
983
989
|
if (!plan.options) {
|
|
984
990
|
plan.options = {};
|
|
985
991
|
}
|
|
@@ -987,22 +993,22 @@ var loadXPlan = async (plan, options, logger, defaultName = "") => {
|
|
|
987
993
|
plan.workflow.name = defaultName;
|
|
988
994
|
}
|
|
989
995
|
ensureAdaptors(plan);
|
|
990
|
-
if (
|
|
991
|
-
plan.workflow.globals =
|
|
992
|
-
await importGlobals(plan,
|
|
993
|
-
await importExpressions(plan,
|
|
994
|
-
if (
|
|
996
|
+
if (options6.globals)
|
|
997
|
+
plan.workflow.globals = options6.globals;
|
|
998
|
+
await importGlobals(plan, options6.baseDir, logger);
|
|
999
|
+
await importExpressions(plan, options6.baseDir, logger);
|
|
1000
|
+
if (options6.expandAdaptors) {
|
|
995
1001
|
expand_adaptors_default(plan);
|
|
996
1002
|
}
|
|
997
|
-
await map_adaptors_to_monorepo_default(
|
|
998
|
-
maybeAssign(
|
|
1003
|
+
await map_adaptors_to_monorepo_default(options6.monorepoPath, plan, logger);
|
|
1004
|
+
maybeAssign(options6, plan.options, ["timeout", "start"]);
|
|
999
1005
|
logger.info(`Loaded workflow ${plan.workflow.name ?? ""}`);
|
|
1000
1006
|
return plan;
|
|
1001
1007
|
};
|
|
1002
1008
|
|
|
1003
1009
|
// src/util/assert-path.ts
|
|
1004
|
-
var assert_path_default = (
|
|
1005
|
-
if (!
|
|
1010
|
+
var assert_path_default = (path15) => {
|
|
1011
|
+
if (!path15) {
|
|
1006
1012
|
console.error("ERROR: no path provided!");
|
|
1007
1013
|
console.error("\nUsage:");
|
|
1008
1014
|
console.error(" open path/to/job");
|
|
@@ -1038,20 +1044,20 @@ var fuzzy_match_step_default = (plan, stepPattern) => {
|
|
|
1038
1044
|
|
|
1039
1045
|
// src/util/validate-plan.ts
|
|
1040
1046
|
var assertWorkflowStructure = (plan, logger) => {
|
|
1041
|
-
const { workflow, options } = plan;
|
|
1042
|
-
if (!
|
|
1047
|
+
const { workflow: workflow2, options: options6 } = plan;
|
|
1048
|
+
if (!workflow2 || typeof workflow2 !== "object") {
|
|
1043
1049
|
throw new Error(`Missing or invalid "workflow" key in execution plan`);
|
|
1044
1050
|
}
|
|
1045
|
-
if (!Array.isArray(
|
|
1051
|
+
if (!Array.isArray(workflow2.steps)) {
|
|
1046
1052
|
throw new Error("The workflow.steps key must be an array");
|
|
1047
1053
|
}
|
|
1048
|
-
if (
|
|
1054
|
+
if (workflow2.steps.length === 0) {
|
|
1049
1055
|
logger.warn("The workflow.steps array is empty");
|
|
1050
1056
|
}
|
|
1051
|
-
|
|
1057
|
+
workflow2.steps.forEach((step, index) => {
|
|
1052
1058
|
assertStepStructure(step, index);
|
|
1053
1059
|
});
|
|
1054
|
-
assertOptionsStructure(
|
|
1060
|
+
assertOptionsStructure(options6, logger);
|
|
1055
1061
|
};
|
|
1056
1062
|
var assertStepStructure = (step, index) => {
|
|
1057
1063
|
const allowedKeys = [
|
|
@@ -1078,9 +1084,9 @@ var assertStepStructure = (step, index) => {
|
|
|
1078
1084
|
);
|
|
1079
1085
|
}
|
|
1080
1086
|
};
|
|
1081
|
-
var assertOptionsStructure = (
|
|
1087
|
+
var assertOptionsStructure = (options6 = {}, logger) => {
|
|
1082
1088
|
const allowedKeys = ["timeout", "stepTimeout", "start", "end", "sanitize"];
|
|
1083
|
-
for (const key in
|
|
1089
|
+
for (const key in options6) {
|
|
1084
1090
|
if (!allowedKeys.includes(key)) {
|
|
1085
1091
|
logger.warn(`Unrecognized option "${key}" in options object`);
|
|
1086
1092
|
}
|
|
@@ -1136,17 +1142,17 @@ var matchStep = (plan, stepPattern, stepName, logger) => {
|
|
|
1136
1142
|
}
|
|
1137
1143
|
return "";
|
|
1138
1144
|
};
|
|
1139
|
-
var executeHandler = async (
|
|
1145
|
+
var executeHandler = async (options6, logger) => {
|
|
1140
1146
|
const start = (/* @__PURE__ */ new Date()).getTime();
|
|
1141
|
-
assert_path_default(
|
|
1142
|
-
await validate_adaptors_default(
|
|
1143
|
-
let plan = await load_plan_default(
|
|
1147
|
+
assert_path_default(options6.path);
|
|
1148
|
+
await validate_adaptors_default(options6, logger);
|
|
1149
|
+
let plan = await load_plan_default(options6, logger);
|
|
1144
1150
|
validate_plan_default(plan, logger);
|
|
1145
|
-
if (
|
|
1146
|
-
await clearCache(plan,
|
|
1151
|
+
if (options6.cacheSteps) {
|
|
1152
|
+
await clearCache(plan, options6, logger);
|
|
1147
1153
|
}
|
|
1148
1154
|
const moduleResolutions = {};
|
|
1149
|
-
const { repoDir, monorepoPath, autoinstall } =
|
|
1155
|
+
const { repoDir, monorepoPath, autoinstall } = options6;
|
|
1150
1156
|
if (autoinstall) {
|
|
1151
1157
|
if (monorepoPath) {
|
|
1152
1158
|
logger.warn("Skipping auto-install as monorepo is being used");
|
|
@@ -1154,13 +1160,13 @@ var executeHandler = async (options, logger) => {
|
|
|
1154
1160
|
const autoInstallTargets = get_autoinstall_targets_default(plan);
|
|
1155
1161
|
if (autoInstallTargets.length) {
|
|
1156
1162
|
logger.info("Auto-installing language adaptors");
|
|
1157
|
-
|
|
1163
|
+
options6.adaptors = await install(
|
|
1158
1164
|
{ packages: autoInstallTargets, repoDir },
|
|
1159
1165
|
logger
|
|
1160
1166
|
);
|
|
1161
|
-
if (autoInstallTargets.length ===
|
|
1167
|
+
if (autoInstallTargets.length === options6.adaptors.length) {
|
|
1162
1168
|
for (let i = 0; i < autoInstallTargets.length; i++) {
|
|
1163
|
-
moduleResolutions[autoInstallTargets[i]] =
|
|
1169
|
+
moduleResolutions[autoInstallTargets[i]] = options6.adaptors[i];
|
|
1164
1170
|
}
|
|
1165
1171
|
}
|
|
1166
1172
|
}
|
|
@@ -1168,35 +1174,35 @@ var executeHandler = async (options, logger) => {
|
|
|
1168
1174
|
}
|
|
1169
1175
|
let customStart;
|
|
1170
1176
|
let customEnd;
|
|
1171
|
-
if (
|
|
1172
|
-
const step = matchStep(plan,
|
|
1177
|
+
if (options6.only) {
|
|
1178
|
+
const step = matchStep(plan, options6.only, "only", logger);
|
|
1173
1179
|
customStart = step;
|
|
1174
1180
|
customEnd = step;
|
|
1175
|
-
logger.always(`Only running workflow step "${
|
|
1181
|
+
logger.always(`Only running workflow step "${options6.start}"`);
|
|
1176
1182
|
} else {
|
|
1177
|
-
if (
|
|
1183
|
+
if (options6.start) {
|
|
1178
1184
|
customStart = matchStep(
|
|
1179
1185
|
plan,
|
|
1180
|
-
|
|
1186
|
+
options6.start ?? plan.options.start,
|
|
1181
1187
|
"start",
|
|
1182
1188
|
logger
|
|
1183
1189
|
);
|
|
1184
|
-
logger.info(`Starting workflow from step "${
|
|
1190
|
+
logger.info(`Starting workflow from step "${options6.start}"`);
|
|
1185
1191
|
}
|
|
1186
|
-
if (
|
|
1192
|
+
if (options6.end) {
|
|
1187
1193
|
customEnd = matchStep(
|
|
1188
1194
|
plan,
|
|
1189
|
-
|
|
1195
|
+
options6.end ?? plan.options.end,
|
|
1190
1196
|
"end",
|
|
1191
1197
|
logger
|
|
1192
1198
|
);
|
|
1193
|
-
logger.always(`Ending workflow at step "${
|
|
1199
|
+
logger.always(`Ending workflow at step "${options6.end}"`);
|
|
1194
1200
|
}
|
|
1195
1201
|
}
|
|
1196
|
-
const state = await load_state_default(plan,
|
|
1202
|
+
const state = await load_state_default(plan, options6, logger, customStart);
|
|
1197
1203
|
plan = override_plan_adaptors_default(plan, moduleResolutions);
|
|
1198
|
-
if (
|
|
1199
|
-
plan = await compile_default(plan,
|
|
1204
|
+
if (options6.compile) {
|
|
1205
|
+
plan = await compile_default(plan, options6, logger);
|
|
1200
1206
|
} else {
|
|
1201
1207
|
logger.info("Skipping compilation as noCompile is set");
|
|
1202
1208
|
}
|
|
@@ -1210,13 +1216,13 @@ var executeHandler = async (options, logger) => {
|
|
|
1210
1216
|
workflow: plan.workflow
|
|
1211
1217
|
};
|
|
1212
1218
|
try {
|
|
1213
|
-
const result = await execute_default(finalPlan, state,
|
|
1214
|
-
if (
|
|
1219
|
+
const result = await execute_default(finalPlan, state, options6, logger);
|
|
1220
|
+
if (options6.cacheSteps) {
|
|
1215
1221
|
logger.success(
|
|
1216
1222
|
"Cached output written to ./cli-cache (see info logs for details)"
|
|
1217
1223
|
);
|
|
1218
1224
|
}
|
|
1219
|
-
await serialize_output_default(
|
|
1225
|
+
await serialize_output_default(options6, result, logger);
|
|
1220
1226
|
const duration = printDuration((/* @__PURE__ */ new Date()).getTime() - start);
|
|
1221
1227
|
if (result?.errors) {
|
|
1222
1228
|
logger.warn(
|
|
@@ -1239,22 +1245,22 @@ var handler_default2 = executeHandler;
|
|
|
1239
1245
|
|
|
1240
1246
|
// src/compile/handler.ts
|
|
1241
1247
|
import { writeFile as writeFile3 } from "node:fs/promises";
|
|
1242
|
-
var compileHandler = async (
|
|
1243
|
-
assert_path_default(
|
|
1248
|
+
var compileHandler = async (options6, logger) => {
|
|
1249
|
+
assert_path_default(options6.path);
|
|
1244
1250
|
let result;
|
|
1245
|
-
if (
|
|
1246
|
-
const { code } = await compile_default(
|
|
1251
|
+
if (options6.expressionPath) {
|
|
1252
|
+
const { code } = await compile_default(options6.expressionPath, options6, logger);
|
|
1247
1253
|
result = code;
|
|
1248
1254
|
} else {
|
|
1249
|
-
const plan = await load_plan_default(
|
|
1250
|
-
const compiledPlan = await compile_default(plan,
|
|
1255
|
+
const plan = await load_plan_default(options6, logger);
|
|
1256
|
+
const compiledPlan = await compile_default(plan, options6, logger);
|
|
1251
1257
|
result = JSON.stringify(compiledPlan, null, 2);
|
|
1252
1258
|
}
|
|
1253
|
-
if (
|
|
1259
|
+
if (options6.outputStdout) {
|
|
1254
1260
|
logger.success("Result:\n\n" + result);
|
|
1255
1261
|
} else {
|
|
1256
|
-
await writeFile3(
|
|
1257
|
-
logger.success(`Compiled to ${
|
|
1262
|
+
await writeFile3(options6.outputPath, result);
|
|
1263
|
+
logger.success(`Compiled to ${options6.outputPath}`);
|
|
1258
1264
|
}
|
|
1259
1265
|
};
|
|
1260
1266
|
var handler_default3 = compileHandler;
|
|
@@ -1267,27 +1273,27 @@ import { readFile as readFile3, writeFile as writeFile4 } from "node:fs/promises
|
|
|
1267
1273
|
import path5 from "node:path";
|
|
1268
1274
|
import { request } from "undici";
|
|
1269
1275
|
var DEFAULT_PAGE_SIZE = 1e3;
|
|
1270
|
-
var request_default = async (method,
|
|
1271
|
-
const base =
|
|
1272
|
-
const url2 = path5.join(base, "/collections",
|
|
1276
|
+
var request_default = async (method, options6, logger) => {
|
|
1277
|
+
const base = options6.lightning || process.env.OPENFN_ENDPOINT || "https://app.openfn.org";
|
|
1278
|
+
const url2 = path5.join(base, "/collections", options6.collectionName);
|
|
1273
1279
|
logger.debug("Calling Collections server at ", url2);
|
|
1274
1280
|
const headers = {
|
|
1275
|
-
Authorization: `Bearer ${
|
|
1281
|
+
Authorization: `Bearer ${options6.token}`
|
|
1276
1282
|
};
|
|
1277
1283
|
const query = Object.assign(
|
|
1278
1284
|
{
|
|
1279
|
-
key:
|
|
1280
|
-
limit:
|
|
1285
|
+
key: options6.key,
|
|
1286
|
+
limit: options6.pageSize || DEFAULT_PAGE_SIZE
|
|
1281
1287
|
},
|
|
1282
|
-
|
|
1288
|
+
options6.query
|
|
1283
1289
|
);
|
|
1284
1290
|
const args = {
|
|
1285
1291
|
headers,
|
|
1286
1292
|
method,
|
|
1287
1293
|
query
|
|
1288
1294
|
};
|
|
1289
|
-
if (
|
|
1290
|
-
args.body = JSON.stringify(
|
|
1295
|
+
if (options6.data) {
|
|
1296
|
+
args.body = JSON.stringify(options6.data);
|
|
1291
1297
|
headers["content-type"] = "application/json";
|
|
1292
1298
|
}
|
|
1293
1299
|
let result = {};
|
|
@@ -1298,11 +1304,11 @@ var request_default = async (method, options, logger) => {
|
|
|
1298
1304
|
if (cursor) {
|
|
1299
1305
|
query.cursor = cursor;
|
|
1300
1306
|
}
|
|
1301
|
-
if (
|
|
1302
|
-
limit =
|
|
1307
|
+
if (options6.limit) {
|
|
1308
|
+
limit = options6.limit;
|
|
1303
1309
|
query.limit = Math.min(
|
|
1304
|
-
|
|
1305
|
-
|
|
1310
|
+
options6.pageSize || DEFAULT_PAGE_SIZE,
|
|
1311
|
+
options6.limit - count
|
|
1306
1312
|
);
|
|
1307
1313
|
}
|
|
1308
1314
|
try {
|
|
@@ -1338,7 +1344,7 @@ var request_default = async (method, options, logger) => {
|
|
|
1338
1344
|
logger.error(e);
|
|
1339
1345
|
throwAbortableError(
|
|
1340
1346
|
`CONNECTION_REFUSED: error connecting to server at ${base}`,
|
|
1341
|
-
"Check you have passed the correct URL to --
|
|
1347
|
+
"Check you have passed the correct URL to --endpoint or OPENFN_ENDPOINT"
|
|
1342
1348
|
);
|
|
1343
1349
|
}
|
|
1344
1350
|
} while (cursor && count < limit);
|
|
@@ -1396,7 +1402,7 @@ var ensureToken = (opts, logger) => {
|
|
|
1396
1402
|
}
|
|
1397
1403
|
}
|
|
1398
1404
|
};
|
|
1399
|
-
var buildQuery = (
|
|
1405
|
+
var buildQuery = (options6) => {
|
|
1400
1406
|
const map = {
|
|
1401
1407
|
createdBefore: "created_before",
|
|
1402
1408
|
createdAfter: "created_after",
|
|
@@ -1405,34 +1411,34 @@ var buildQuery = (options) => {
|
|
|
1405
1411
|
};
|
|
1406
1412
|
const query = {};
|
|
1407
1413
|
Object.keys(map).forEach((key) => {
|
|
1408
|
-
if (
|
|
1409
|
-
query[map[key]] =
|
|
1414
|
+
if (options6[key]) {
|
|
1415
|
+
query[map[key]] = options6[key];
|
|
1410
1416
|
}
|
|
1411
1417
|
});
|
|
1412
1418
|
return query;
|
|
1413
1419
|
};
|
|
1414
|
-
var get = async (
|
|
1415
|
-
ensureToken(
|
|
1416
|
-
const multiMode =
|
|
1420
|
+
var get = async (options6, logger) => {
|
|
1421
|
+
ensureToken(options6, logger);
|
|
1422
|
+
const multiMode = options6.key.includes("*");
|
|
1417
1423
|
if (multiMode) {
|
|
1418
1424
|
logger.info(
|
|
1419
|
-
`Fetching multiple items from collection "${
|
|
1425
|
+
`Fetching multiple items from collection "${options6.collectionName}" with pattern ${options6.key}`
|
|
1420
1426
|
);
|
|
1421
1427
|
} else {
|
|
1422
1428
|
logger.info(
|
|
1423
|
-
`Fetching "${
|
|
1429
|
+
`Fetching "${options6.key}" from collection "${options6.collectionName}"`
|
|
1424
1430
|
);
|
|
1425
1431
|
}
|
|
1426
1432
|
let result = await request_default(
|
|
1427
1433
|
"GET",
|
|
1428
1434
|
{
|
|
1429
|
-
lightning:
|
|
1430
|
-
token:
|
|
1431
|
-
pageSize:
|
|
1432
|
-
limit:
|
|
1433
|
-
key:
|
|
1434
|
-
collectionName:
|
|
1435
|
-
query: buildQuery(
|
|
1435
|
+
lightning: options6.endpoint,
|
|
1436
|
+
token: options6.token,
|
|
1437
|
+
pageSize: options6.pageSize,
|
|
1438
|
+
limit: options6.limit,
|
|
1439
|
+
key: options6.key,
|
|
1440
|
+
collectionName: options6.collectionName,
|
|
1441
|
+
query: buildQuery(options6)
|
|
1436
1442
|
},
|
|
1437
1443
|
logger
|
|
1438
1444
|
);
|
|
@@ -1440,32 +1446,32 @@ var get = async (options, logger) => {
|
|
|
1440
1446
|
logger.success(`Fetched ${Object.keys(result).length} items!`);
|
|
1441
1447
|
} else {
|
|
1442
1448
|
result = Object.values(result)[0];
|
|
1443
|
-
logger.success(`Fetched ${
|
|
1449
|
+
logger.success(`Fetched ${options6.key}`);
|
|
1444
1450
|
}
|
|
1445
|
-
if (
|
|
1451
|
+
if (options6.outputPath) {
|
|
1446
1452
|
const content = JSON.stringify(
|
|
1447
1453
|
result,
|
|
1448
1454
|
null,
|
|
1449
|
-
|
|
1455
|
+
options6.pretty ? 2 : void 0
|
|
1450
1456
|
);
|
|
1451
|
-
await writeFile4(
|
|
1452
|
-
logger.always(`Wrote items to ${
|
|
1457
|
+
await writeFile4(options6.outputPath, content);
|
|
1458
|
+
logger.always(`Wrote items to ${options6.outputPath}`);
|
|
1453
1459
|
} else {
|
|
1454
1460
|
logger.print(result);
|
|
1455
1461
|
}
|
|
1456
1462
|
};
|
|
1457
|
-
var set = async (
|
|
1458
|
-
if (
|
|
1463
|
+
var set = async (options6, logger) => {
|
|
1464
|
+
if (options6.key && options6.items) {
|
|
1459
1465
|
throwAbortableError(
|
|
1460
1466
|
"ARGUMENT_ERROR: arguments for key and items were provided",
|
|
1461
1467
|
"If upserting multiple items with --items, do not pass a key"
|
|
1462
1468
|
);
|
|
1463
1469
|
}
|
|
1464
|
-
ensureToken(
|
|
1465
|
-
logger.info(`Upserting items to collection "${
|
|
1470
|
+
ensureToken(options6, logger);
|
|
1471
|
+
logger.info(`Upserting items to collection "${options6.collectionName}"`);
|
|
1466
1472
|
const items = [];
|
|
1467
|
-
if (
|
|
1468
|
-
const resolvedPath = path6.resolve(
|
|
1473
|
+
if (options6.items) {
|
|
1474
|
+
const resolvedPath = path6.resolve(options6.items);
|
|
1469
1475
|
logger.debug("Loading items from ", resolvedPath);
|
|
1470
1476
|
const data = await readFile3(resolvedPath, "utf8");
|
|
1471
1477
|
const obj = JSON.parse(data);
|
|
@@ -1473,43 +1479,43 @@ var set = async (options, logger) => {
|
|
|
1473
1479
|
items.push({ key, value: JSON.stringify(value) });
|
|
1474
1480
|
});
|
|
1475
1481
|
logger.info(`Upserting ${items.length} items`);
|
|
1476
|
-
} else if (
|
|
1477
|
-
const resolvedPath = path6.resolve(
|
|
1482
|
+
} else if (options6.key && options6.value) {
|
|
1483
|
+
const resolvedPath = path6.resolve(options6.value);
|
|
1478
1484
|
logger.debug("Loading value from ", resolvedPath);
|
|
1479
|
-
const data = await readFile3(path6.resolve(
|
|
1485
|
+
const data = await readFile3(path6.resolve(options6.value), "utf8");
|
|
1480
1486
|
const value = JSON.stringify(JSON.parse(data));
|
|
1481
|
-
items.push({ key:
|
|
1482
|
-
logger.info(`Upserting data to "${
|
|
1487
|
+
items.push({ key: options6.key, value });
|
|
1488
|
+
logger.info(`Upserting data to "${options6.key}"`);
|
|
1483
1489
|
} else {
|
|
1484
1490
|
throw new Error("INVALID_ARGUMENTS");
|
|
1485
1491
|
}
|
|
1486
1492
|
const result = await request_default(
|
|
1487
1493
|
"POST",
|
|
1488
1494
|
{
|
|
1489
|
-
lightning:
|
|
1490
|
-
token:
|
|
1491
|
-
key:
|
|
1492
|
-
collectionName:
|
|
1495
|
+
lightning: options6.endpoint,
|
|
1496
|
+
token: options6.token,
|
|
1497
|
+
key: options6.key,
|
|
1498
|
+
collectionName: options6.collectionName,
|
|
1493
1499
|
data: { items }
|
|
1494
1500
|
},
|
|
1495
1501
|
logger
|
|
1496
1502
|
);
|
|
1497
1503
|
logger.success(`Upserted ${result.upserted} items!`);
|
|
1498
1504
|
};
|
|
1499
|
-
var remove = async (
|
|
1500
|
-
ensureToken(
|
|
1505
|
+
var remove = async (options6, logger) => {
|
|
1506
|
+
ensureToken(options6, logger);
|
|
1501
1507
|
logger.info(
|
|
1502
|
-
`Removing "${
|
|
1508
|
+
`Removing "${options6.key}" from collection "${options6.collectionName}"`
|
|
1503
1509
|
);
|
|
1504
|
-
if (
|
|
1510
|
+
if (options6.dryRun) {
|
|
1505
1511
|
logger.info("--dry-run passed: fetching affected items");
|
|
1506
1512
|
let result = await request_default(
|
|
1507
1513
|
"GET",
|
|
1508
1514
|
{
|
|
1509
|
-
lightning:
|
|
1510
|
-
token:
|
|
1511
|
-
key:
|
|
1512
|
-
collectionName:
|
|
1515
|
+
lightning: options6.endpoint,
|
|
1516
|
+
token: options6.token,
|
|
1517
|
+
key: options6.key,
|
|
1518
|
+
collectionName: options6.collectionName
|
|
1513
1519
|
},
|
|
1514
1520
|
logger
|
|
1515
1521
|
);
|
|
@@ -1521,11 +1527,11 @@ var remove = async (options, logger) => {
|
|
|
1521
1527
|
let result = await request_default(
|
|
1522
1528
|
"DELETE",
|
|
1523
1529
|
{
|
|
1524
|
-
lightning:
|
|
1525
|
-
token:
|
|
1526
|
-
key:
|
|
1527
|
-
collectionName:
|
|
1528
|
-
query: buildQuery(
|
|
1530
|
+
lightning: options6.endpoint,
|
|
1531
|
+
token: options6.token,
|
|
1532
|
+
key: options6.key,
|
|
1533
|
+
collectionName: options6.collectionName,
|
|
1534
|
+
query: buildQuery(options6)
|
|
1529
1535
|
},
|
|
1530
1536
|
logger
|
|
1531
1537
|
);
|
|
@@ -1539,9 +1545,9 @@ var handler_default4 = {
|
|
|
1539
1545
|
};
|
|
1540
1546
|
|
|
1541
1547
|
// src/test/handler.ts
|
|
1542
|
-
var testHandler = async (
|
|
1548
|
+
var testHandler = async (options6, logger) => {
|
|
1543
1549
|
logger.log("Running test workflow...");
|
|
1544
|
-
const opts = { ...
|
|
1550
|
+
const opts = { ...options6 };
|
|
1545
1551
|
opts.compile = true;
|
|
1546
1552
|
opts.adaptors = [];
|
|
1547
1553
|
const plan = {
|
|
@@ -1606,22 +1612,22 @@ import {
|
|
|
1606
1612
|
// src/deploy/beta.ts
|
|
1607
1613
|
import Project2 from "@openfn/project";
|
|
1608
1614
|
import { deployProject } from "@openfn/deploy";
|
|
1609
|
-
async function handler(
|
|
1615
|
+
async function handler(options6, logger) {
|
|
1610
1616
|
const { OPENFN_API_KEY } = process.env;
|
|
1611
|
-
const { endpoint } =
|
|
1617
|
+
const { endpoint: endpoint2 } = options6;
|
|
1612
1618
|
const config2 = {
|
|
1613
|
-
apiKey:
|
|
1619
|
+
apiKey: options6.apiKey
|
|
1614
1620
|
};
|
|
1615
|
-
if (!
|
|
1621
|
+
if (!options6.apiKey && OPENFN_API_KEY) {
|
|
1616
1622
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
1617
1623
|
config2.apiKey = OPENFN_API_KEY;
|
|
1618
1624
|
}
|
|
1619
|
-
const project = await Project2.from("fs", { root:
|
|
1625
|
+
const project = await Project2.from("fs", { root: options6.path || "." });
|
|
1620
1626
|
console.log({ openfn: project.openfn });
|
|
1621
1627
|
const state = project.serialize("state", { format: "json" });
|
|
1622
1628
|
logger.debug("Converted local project to app state:");
|
|
1623
1629
|
logger.debug(JSON.stringify(state, null, 2));
|
|
1624
|
-
config2.endpoint =
|
|
1630
|
+
config2.endpoint = endpoint2 || project.openfn?.endpoint;
|
|
1625
1631
|
logger.info("Sending project to app...");
|
|
1626
1632
|
await deployProject(config2, state);
|
|
1627
1633
|
logger.success("Updated project at", config2.endpoint);
|
|
@@ -1629,15 +1635,15 @@ async function handler(options, logger) {
|
|
|
1629
1635
|
|
|
1630
1636
|
// src/deploy/handler.ts
|
|
1631
1637
|
var actualDeploy = deploy;
|
|
1632
|
-
async function deployHandler(
|
|
1633
|
-
if (
|
|
1634
|
-
return handler(
|
|
1638
|
+
async function deployHandler(options6, logger, deployFn = actualDeploy) {
|
|
1639
|
+
if (options6.beta) {
|
|
1640
|
+
return handler(options6, logger);
|
|
1635
1641
|
}
|
|
1636
1642
|
try {
|
|
1637
|
-
const config2 = mergeOverrides(await getConfig(
|
|
1643
|
+
const config2 = mergeOverrides(await getConfig(options6.configPath), options6);
|
|
1638
1644
|
logger.debug("Deploying with config", JSON.stringify(config2, null, 2));
|
|
1639
|
-
if (
|
|
1640
|
-
config2.requireConfirmation =
|
|
1645
|
+
if (options6.confirm === false) {
|
|
1646
|
+
config2.requireConfirmation = options6.confirm;
|
|
1641
1647
|
}
|
|
1642
1648
|
if (process.env["OPENFN_API_KEY"]) {
|
|
1643
1649
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
@@ -1662,15 +1668,15 @@ async function deployHandler(options, logger, deployFn = actualDeploy) {
|
|
|
1662
1668
|
throw error;
|
|
1663
1669
|
}
|
|
1664
1670
|
}
|
|
1665
|
-
function mergeOverrides(config2,
|
|
1671
|
+
function mergeOverrides(config2, options6) {
|
|
1666
1672
|
return {
|
|
1667
1673
|
...config2,
|
|
1668
1674
|
apiKey: pickFirst(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
1669
1675
|
endpoint: pickFirst(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
1670
|
-
statePath: pickFirst(
|
|
1671
|
-
specPath: pickFirst(
|
|
1672
|
-
configPath:
|
|
1673
|
-
requireConfirmation: pickFirst(
|
|
1676
|
+
statePath: pickFirst(options6.statePath, config2.statePath),
|
|
1677
|
+
specPath: pickFirst(options6.projectPath, config2.specPath),
|
|
1678
|
+
configPath: options6.configPath,
|
|
1679
|
+
requireConfirmation: pickFirst(options6.confirm, config2.requireConfirmation)
|
|
1674
1680
|
};
|
|
1675
1681
|
}
|
|
1676
1682
|
function pickFirst(...args) {
|
|
@@ -1689,20 +1695,20 @@ var RETRY_COUNT = 20;
|
|
|
1689
1695
|
var TIMEOUT_MS = 1e3 * 60;
|
|
1690
1696
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
1691
1697
|
var ensurePath = (filePath) => mkdirSync(path7.dirname(filePath), { recursive: true });
|
|
1692
|
-
var generatePlaceholder = (
|
|
1693
|
-
writeFileSync(
|
|
1698
|
+
var generatePlaceholder = (path15) => {
|
|
1699
|
+
writeFileSync(path15, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
1694
1700
|
};
|
|
1695
1701
|
var finish = (logger, resultPath) => {
|
|
1696
1702
|
logger.success("Done! Docs can be found at:\n");
|
|
1697
1703
|
logger.print(` ${path7.resolve(resultPath)}`);
|
|
1698
1704
|
};
|
|
1699
|
-
var generateDocs = async (specifier,
|
|
1705
|
+
var generateDocs = async (specifier, path15, docgen, logger) => {
|
|
1700
1706
|
const result = await docgen(specifier);
|
|
1701
|
-
await writeFile5(
|
|
1702
|
-
finish(logger,
|
|
1703
|
-
return
|
|
1707
|
+
await writeFile5(path15, JSON.stringify(result, null, 2));
|
|
1708
|
+
finish(logger, path15);
|
|
1709
|
+
return path15;
|
|
1704
1710
|
};
|
|
1705
|
-
var waitForDocs = async (docs,
|
|
1711
|
+
var waitForDocs = async (docs, path15, logger, retryDuration = RETRY_DURATION) => {
|
|
1706
1712
|
try {
|
|
1707
1713
|
if (docs.hasOwnProperty("loading")) {
|
|
1708
1714
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -1714,27 +1720,27 @@ var waitForDocs = async (docs, path16, logger, retryDuration = RETRY_DURATION) =
|
|
|
1714
1720
|
clearInterval(i);
|
|
1715
1721
|
reject(new Error("Timed out waiting for docs to load"));
|
|
1716
1722
|
}
|
|
1717
|
-
const updated = JSON.parse(readFileSync(
|
|
1723
|
+
const updated = JSON.parse(readFileSync(path15, "utf8"));
|
|
1718
1724
|
if (!updated.hasOwnProperty("loading")) {
|
|
1719
1725
|
logger.info("Docs found!");
|
|
1720
1726
|
clearInterval(i);
|
|
1721
|
-
resolve(
|
|
1727
|
+
resolve(path15);
|
|
1722
1728
|
}
|
|
1723
1729
|
count++;
|
|
1724
1730
|
}, retryDuration);
|
|
1725
1731
|
});
|
|
1726
1732
|
} else {
|
|
1727
|
-
logger.info(`Docs already written to cache at ${
|
|
1728
|
-
finish(logger,
|
|
1729
|
-
return
|
|
1733
|
+
logger.info(`Docs already written to cache at ${path15}`);
|
|
1734
|
+
finish(logger, path15);
|
|
1735
|
+
return path15;
|
|
1730
1736
|
}
|
|
1731
1737
|
} catch (e) {
|
|
1732
1738
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
1733
1739
|
throw e;
|
|
1734
1740
|
}
|
|
1735
1741
|
};
|
|
1736
|
-
var docgenHandler = (
|
|
1737
|
-
const { specifier, repoDir } =
|
|
1742
|
+
var docgenHandler = (options6, logger, docgen = actualDocGen, retryDuration = RETRY_DURATION) => {
|
|
1743
|
+
const { specifier, repoDir } = options6;
|
|
1738
1744
|
const { version } = getNameAndVersion4(specifier);
|
|
1739
1745
|
if (!version) {
|
|
1740
1746
|
logger.error("Error: No version number detected");
|
|
@@ -1743,28 +1749,28 @@ var docgenHandler = (options, logger, docgen = actualDocGen, retryDuration = RET
|
|
|
1743
1749
|
process.exit(9);
|
|
1744
1750
|
}
|
|
1745
1751
|
logger.success(`Generating docs for ${specifier}`);
|
|
1746
|
-
const
|
|
1747
|
-
ensurePath(
|
|
1752
|
+
const path15 = `${repoDir}/docs/${specifier}.json`;
|
|
1753
|
+
ensurePath(path15);
|
|
1748
1754
|
const handleError2 = () => {
|
|
1749
1755
|
logger.info("Removing placeholder");
|
|
1750
|
-
rmSync(
|
|
1756
|
+
rmSync(path15);
|
|
1751
1757
|
};
|
|
1752
1758
|
try {
|
|
1753
|
-
const existing = readFileSync(
|
|
1759
|
+
const existing = readFileSync(path15, "utf8");
|
|
1754
1760
|
const json = JSON.parse(existing);
|
|
1755
1761
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
1756
1762
|
logger.info(`Expired placeholder found. Removing.`);
|
|
1757
|
-
rmSync(
|
|
1763
|
+
rmSync(path15);
|
|
1758
1764
|
throw new Error("TIMEOUT");
|
|
1759
1765
|
}
|
|
1760
|
-
return waitForDocs(json,
|
|
1766
|
+
return waitForDocs(json, path15, logger, retryDuration);
|
|
1761
1767
|
} catch (e) {
|
|
1762
1768
|
if (e.message !== "TIMEOUT") {
|
|
1763
|
-
logger.info(`Docs JSON not found at ${
|
|
1769
|
+
logger.info(`Docs JSON not found at ${path15}`);
|
|
1764
1770
|
}
|
|
1765
1771
|
logger.debug("Generating placeholder");
|
|
1766
|
-
generatePlaceholder(
|
|
1767
|
-
return generateDocs(specifier,
|
|
1772
|
+
generatePlaceholder(path15);
|
|
1773
|
+
return generateDocs(specifier, path15, docgen, logger).catch((e2) => {
|
|
1768
1774
|
logger.error("Error generating documentation");
|
|
1769
1775
|
logger.error(e2);
|
|
1770
1776
|
handleError2();
|
|
@@ -1803,8 +1809,8 @@ ${data.functions.map(
|
|
|
1803
1809
|
(fn) => ` ${c.yellow(fn.name)} (${fn.parameters.map((p) => p.name).join(", ")})`
|
|
1804
1810
|
).sort().join("\n")}
|
|
1805
1811
|
`;
|
|
1806
|
-
var docsHandler = async (
|
|
1807
|
-
const { adaptor, operation, repoDir } =
|
|
1812
|
+
var docsHandler = async (options6, logger) => {
|
|
1813
|
+
const { adaptor, operation, repoDir } = options6;
|
|
1808
1814
|
const adaptors = expand_adaptors_default([adaptor]);
|
|
1809
1815
|
const [adaptorName] = adaptors;
|
|
1810
1816
|
let { name, version } = getNameAndVersion5(adaptorName);
|
|
@@ -1815,7 +1821,7 @@ var docsHandler = async (options, logger) => {
|
|
|
1815
1821
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
1816
1822
|
}
|
|
1817
1823
|
logger.info("Generating/loading documentation...");
|
|
1818
|
-
const
|
|
1824
|
+
const path15 = await handler_default7(
|
|
1819
1825
|
{
|
|
1820
1826
|
specifier: `${name}@${version}`,
|
|
1821
1827
|
repoDir
|
|
@@ -1824,8 +1830,8 @@ var docsHandler = async (options, logger) => {
|
|
|
1824
1830
|
createNullLogger()
|
|
1825
1831
|
);
|
|
1826
1832
|
let didError = false;
|
|
1827
|
-
if (
|
|
1828
|
-
const source = await readFile4(
|
|
1833
|
+
if (path15) {
|
|
1834
|
+
const source = await readFile4(path15, "utf8");
|
|
1829
1835
|
const data = JSON.parse(source);
|
|
1830
1836
|
let desc;
|
|
1831
1837
|
if (operation) {
|
|
@@ -2009,8 +2015,8 @@ var getAdaptorPath = async (adaptor, logger, repoDir) => {
|
|
|
2009
2015
|
return adaptorPath;
|
|
2010
2016
|
};
|
|
2011
2017
|
var shouldAutoinstall = (adaptor) => adaptor?.length > 0 && !adaptor.startsWith("/") && !adaptor.includes("=");
|
|
2012
|
-
var metadataHandler = async (
|
|
2013
|
-
const { repoDir, adaptors, keepUnsupported } =
|
|
2018
|
+
var metadataHandler = async (options6, logger) => {
|
|
2019
|
+
const { repoDir, adaptors, keepUnsupported } = options6;
|
|
2014
2020
|
let adaptor = adaptors[0];
|
|
2015
2021
|
if (await isAdaptorUnsupported(adaptor, repoDir)) {
|
|
2016
2022
|
logger.info(
|
|
@@ -2019,7 +2025,7 @@ var metadataHandler = async (options, logger) => {
|
|
|
2019
2025
|
logger.error("No metadata helper found");
|
|
2020
2026
|
process.exit(1);
|
|
2021
2027
|
}
|
|
2022
|
-
const state = await load_state_default({},
|
|
2028
|
+
const state = await load_state_default({}, options6, logger);
|
|
2023
2029
|
logger.success(`Generating metadata`);
|
|
2024
2030
|
logger.info("config:", state);
|
|
2025
2031
|
const config2 = state.configuration;
|
|
@@ -2032,7 +2038,7 @@ var metadataHandler = async (options, logger) => {
|
|
|
2032
2038
|
logger.print(getCachePath2(repoDir, id));
|
|
2033
2039
|
};
|
|
2034
2040
|
const id = generateKey(config2, adaptor);
|
|
2035
|
-
if (!
|
|
2041
|
+
if (!options6.force) {
|
|
2036
2042
|
logger.debug("config hash: ", id);
|
|
2037
2043
|
const cached = await get2(repoDir, id);
|
|
2038
2044
|
if (cached) {
|
|
@@ -2050,7 +2056,7 @@ var metadataHandler = async (options, logger) => {
|
|
|
2050
2056
|
wasAutoInstalled = true;
|
|
2051
2057
|
adaptor = autoinstallResult[0];
|
|
2052
2058
|
}
|
|
2053
|
-
const adaptorPath = await getAdaptorPath(adaptor, logger,
|
|
2059
|
+
const adaptorPath = await getAdaptorPath(adaptor, logger, options6.repoDir);
|
|
2054
2060
|
if (!adaptorPath) {
|
|
2055
2061
|
throw new Error(`Could not resolve adaptor path for ${adaptor}`);
|
|
2056
2062
|
}
|
|
@@ -2092,7 +2098,7 @@ var metadataHandler = async (options, logger) => {
|
|
|
2092
2098
|
var handler_default9 = metadataHandler;
|
|
2093
2099
|
|
|
2094
2100
|
// src/pull/handler.ts
|
|
2095
|
-
import
|
|
2101
|
+
import path12 from "path";
|
|
2096
2102
|
import fs5 from "node:fs/promises";
|
|
2097
2103
|
import {
|
|
2098
2104
|
getConfig as getConfig2,
|
|
@@ -2102,94 +2108,552 @@ import {
|
|
|
2102
2108
|
syncRemoteSpec
|
|
2103
2109
|
} from "@openfn/deploy";
|
|
2104
2110
|
|
|
2105
|
-
// src/
|
|
2106
|
-
import
|
|
2107
|
-
import fs4 from "node:fs/promises";
|
|
2108
|
-
import { rimraf } from "rimraf";
|
|
2109
|
-
import { confirm } from "@inquirer/prompts";
|
|
2110
|
-
import { getProject } from "@openfn/deploy";
|
|
2111
|
+
// src/projects/fetch.ts
|
|
2112
|
+
import path10 from "node:path";
|
|
2111
2113
|
import Project3, { Workspace } from "@openfn/project";
|
|
2112
|
-
|
|
2114
|
+
|
|
2115
|
+
// src/util/command-builders.ts
|
|
2116
|
+
import c2 from "chalk";
|
|
2117
|
+
var expandYargs = (y) => {
|
|
2118
|
+
if (typeof y === "function") {
|
|
2119
|
+
return y();
|
|
2120
|
+
}
|
|
2121
|
+
return y;
|
|
2122
|
+
};
|
|
2123
|
+
function build(opts, yargs) {
|
|
2124
|
+
return opts.reduce(
|
|
2125
|
+
(_y, o) => yargs.option(o.name, expandYargs(o.yargs)),
|
|
2126
|
+
yargs
|
|
2127
|
+
);
|
|
2128
|
+
}
|
|
2129
|
+
var ensure = (command6, opts) => (yargs) => {
|
|
2130
|
+
yargs.command = command6;
|
|
2131
|
+
opts.filter((opt) => opt.ensure).forEach((opt) => {
|
|
2132
|
+
try {
|
|
2133
|
+
opt.ensure(yargs);
|
|
2134
|
+
} catch (e) {
|
|
2135
|
+
console.log(e);
|
|
2136
|
+
console.error(
|
|
2137
|
+
c2.red(`
|
|
2138
|
+
Error parsing command arguments: ${command6}.${opt.name}
|
|
2139
|
+
`)
|
|
2140
|
+
);
|
|
2141
|
+
console.error(c2.red("Aborting"));
|
|
2142
|
+
console.error();
|
|
2143
|
+
process.exit(9);
|
|
2144
|
+
}
|
|
2145
|
+
});
|
|
2146
|
+
};
|
|
2147
|
+
var override = (command6, yargs) => {
|
|
2148
|
+
return {
|
|
2149
|
+
...command6,
|
|
2150
|
+
yargs: {
|
|
2151
|
+
...command6.yargs || {},
|
|
2152
|
+
...yargs
|
|
2153
|
+
}
|
|
2154
|
+
};
|
|
2155
|
+
};
|
|
2156
|
+
|
|
2157
|
+
// src/options.ts
|
|
2158
|
+
import nodePath from "node:path";
|
|
2159
|
+
|
|
2160
|
+
// src/util/ensure-log-opts.ts
|
|
2161
|
+
var defaultLoggerOptions = {
|
|
2162
|
+
default: "default",
|
|
2163
|
+
// TODO fix to lower case
|
|
2164
|
+
job: "debug"
|
|
2165
|
+
};
|
|
2166
|
+
var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
|
|
2167
|
+
var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
|
|
2168
|
+
var componentShorthands = {
|
|
2169
|
+
cmp: "compiler",
|
|
2170
|
+
rt: "runtime",
|
|
2171
|
+
"r/t": "runtime"
|
|
2172
|
+
};
|
|
2173
|
+
var ensureLogOpts = (opts) => {
|
|
2174
|
+
const components = {};
|
|
2175
|
+
const outgoingOpts = opts;
|
|
2176
|
+
if (!opts.log && /^(version|test)$/.test(opts.command)) {
|
|
2177
|
+
outgoingOpts.log = { default: "info" };
|
|
2178
|
+
return outgoingOpts;
|
|
2179
|
+
}
|
|
2180
|
+
if (opts.log) {
|
|
2181
|
+
const parts = opts.log.split(",");
|
|
2182
|
+
parts.forEach((l) => {
|
|
2183
|
+
let component = "";
|
|
2184
|
+
let level = "";
|
|
2185
|
+
if (l.match(/=/)) {
|
|
2186
|
+
const parts2 = l.split("=");
|
|
2187
|
+
component = parts2[0].toLowerCase();
|
|
2188
|
+
if (componentShorthands[component]) {
|
|
2189
|
+
component = componentShorthands[component];
|
|
2190
|
+
}
|
|
2191
|
+
level = parts2[1].toLowerCase();
|
|
2192
|
+
} else {
|
|
2193
|
+
component = "default";
|
|
2194
|
+
level = l.toLowerCase();
|
|
2195
|
+
if (level === "none" && !parts.find((p) => p.startsWith("job"))) {
|
|
2196
|
+
components["job"] = "none";
|
|
2197
|
+
}
|
|
2198
|
+
}
|
|
2199
|
+
if (!/^(cli|runtime|compiler|job|default)$/i.test(component)) {
|
|
2200
|
+
throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
|
|
2201
|
+
}
|
|
2202
|
+
level = level.toLowerCase();
|
|
2203
|
+
if (!isValidLogLevel(level)) {
|
|
2204
|
+
throw new Error(ERROR_MESSAGE_LOG_LEVEL);
|
|
2205
|
+
}
|
|
2206
|
+
components[component] = level;
|
|
2207
|
+
});
|
|
2208
|
+
}
|
|
2209
|
+
outgoingOpts.log = {
|
|
2210
|
+
...defaultLoggerOptions,
|
|
2211
|
+
...components
|
|
2212
|
+
};
|
|
2213
|
+
return outgoingOpts;
|
|
2214
|
+
};
|
|
2215
|
+
var ensure_log_opts_default = ensureLogOpts;
|
|
2216
|
+
|
|
2217
|
+
// src/util/get-cli-option-object.ts
|
|
2218
|
+
function getCLIOptionObject(arg) {
|
|
2219
|
+
if (isObject(arg)) {
|
|
2220
|
+
return arg;
|
|
2221
|
+
} else if (typeof arg === "string") {
|
|
2222
|
+
try {
|
|
2223
|
+
const p = JSON.parse(arg);
|
|
2224
|
+
if (isObject(p))
|
|
2225
|
+
return p;
|
|
2226
|
+
} catch (e) {
|
|
2227
|
+
}
|
|
2228
|
+
return Object.fromEntries(
|
|
2229
|
+
arg.split(",").map((pair) => {
|
|
2230
|
+
const [k, v] = pair.split("=");
|
|
2231
|
+
return [k.trim(), v.trim()];
|
|
2232
|
+
})
|
|
2233
|
+
);
|
|
2234
|
+
}
|
|
2235
|
+
}
|
|
2236
|
+
function isObject(arg) {
|
|
2237
|
+
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
2238
|
+
}
|
|
2239
|
+
|
|
2240
|
+
// src/options.ts
|
|
2241
|
+
var setDefaultValue = (opts, key, value) => {
|
|
2242
|
+
const v = opts[key];
|
|
2243
|
+
if (isNaN(v) && !v) {
|
|
2244
|
+
opts[key] = value;
|
|
2245
|
+
}
|
|
2246
|
+
};
|
|
2247
|
+
var apikey = {
|
|
2248
|
+
name: "apikey",
|
|
2249
|
+
yargs: {
|
|
2250
|
+
alias: ["key", "pat", "token"],
|
|
2251
|
+
description: "[beta only] API Key, Personal Access Token (Pat), or other access token"
|
|
2252
|
+
}
|
|
2253
|
+
};
|
|
2254
|
+
var configPath = {
|
|
2255
|
+
name: "config",
|
|
2256
|
+
yargs: {
|
|
2257
|
+
alias: ["c", "config-path"],
|
|
2258
|
+
description: "The location of your config file",
|
|
2259
|
+
default: "./.config.json"
|
|
2260
|
+
}
|
|
2261
|
+
};
|
|
2262
|
+
var endpoint = {
|
|
2263
|
+
name: "endpoint",
|
|
2264
|
+
yargs: {
|
|
2265
|
+
alias: ["lightning"],
|
|
2266
|
+
description: "[beta only] URL to Lightning endpoint"
|
|
2267
|
+
}
|
|
2268
|
+
};
|
|
2269
|
+
var env = {
|
|
2270
|
+
name: "env",
|
|
2271
|
+
yargs: {
|
|
2272
|
+
description: "[beta only] Environment name (eg staging, prod, branch)"
|
|
2273
|
+
}
|
|
2274
|
+
};
|
|
2275
|
+
var force = {
|
|
2276
|
+
name: "force",
|
|
2277
|
+
yargs: {
|
|
2278
|
+
alias: ["f"],
|
|
2279
|
+
boolean: true,
|
|
2280
|
+
description: "Force metadata to be regenerated",
|
|
2281
|
+
default: false
|
|
2282
|
+
}
|
|
2283
|
+
};
|
|
2284
|
+
var getBaseDir = (opts) => {
|
|
2285
|
+
const basePath = opts.path ?? ".";
|
|
2286
|
+
if (/\.(jso?n?|ya?ml)$/.test(basePath)) {
|
|
2287
|
+
return nodePath.dirname(basePath);
|
|
2288
|
+
}
|
|
2289
|
+
return basePath;
|
|
2290
|
+
};
|
|
2291
|
+
var projectId = {
|
|
2292
|
+
name: "project-id",
|
|
2293
|
+
yargs: {
|
|
2294
|
+
description: "The id or UUID of an openfn project",
|
|
2295
|
+
string: true
|
|
2296
|
+
},
|
|
2297
|
+
ensure: (opts) => {
|
|
2298
|
+
return opts.projectName;
|
|
2299
|
+
}
|
|
2300
|
+
};
|
|
2301
|
+
var log = {
|
|
2302
|
+
name: "log",
|
|
2303
|
+
yargs: {
|
|
2304
|
+
alias: ["l"],
|
|
2305
|
+
description: "Set the log level",
|
|
2306
|
+
string: true
|
|
2307
|
+
},
|
|
2308
|
+
ensure: (opts) => {
|
|
2309
|
+
ensure_log_opts_default(opts);
|
|
2310
|
+
}
|
|
2311
|
+
};
|
|
2312
|
+
var logJson = {
|
|
2313
|
+
name: "log-json",
|
|
2314
|
+
yargs: {
|
|
2315
|
+
description: "Output all logs as JSON objects",
|
|
2316
|
+
boolean: true
|
|
2317
|
+
}
|
|
2318
|
+
};
|
|
2319
|
+
var outputPath = {
|
|
2320
|
+
name: "output-path",
|
|
2321
|
+
yargs: {
|
|
2322
|
+
alias: "o",
|
|
2323
|
+
description: "Path to the output file"
|
|
2324
|
+
},
|
|
2325
|
+
ensure: (opts) => {
|
|
2326
|
+
if (/^(compile|apollo)$/.test(opts.command)) {
|
|
2327
|
+
if (opts.outputPath) {
|
|
2328
|
+
delete opts.outputStdout;
|
|
2329
|
+
}
|
|
2330
|
+
} else {
|
|
2331
|
+
if (opts.outputStdout) {
|
|
2332
|
+
delete opts.outputPath;
|
|
2333
|
+
} else {
|
|
2334
|
+
const base = getBaseDir(opts);
|
|
2335
|
+
setDefaultValue(opts, "outputPath", nodePath.join(base, "output.json"));
|
|
2336
|
+
}
|
|
2337
|
+
}
|
|
2338
|
+
delete opts.o;
|
|
2339
|
+
}
|
|
2340
|
+
};
|
|
2341
|
+
var snapshots = {
|
|
2342
|
+
name: "snapshots",
|
|
2343
|
+
yargs: {
|
|
2344
|
+
description: "List of snapshot ids to pull",
|
|
2345
|
+
array: true
|
|
2346
|
+
}
|
|
2347
|
+
};
|
|
2348
|
+
var statePath = {
|
|
2349
|
+
name: "state-path",
|
|
2350
|
+
yargs: {
|
|
2351
|
+
alias: ["s"],
|
|
2352
|
+
description: "Path to the state file"
|
|
2353
|
+
},
|
|
2354
|
+
ensure: (opts) => {
|
|
2355
|
+
delete opts.s;
|
|
2356
|
+
}
|
|
2357
|
+
};
|
|
2358
|
+
var timeout = {
|
|
2359
|
+
name: "timeout",
|
|
2360
|
+
yargs: {
|
|
2361
|
+
alias: ["t"],
|
|
2362
|
+
number: true,
|
|
2363
|
+
description: "Set the timeout duration (ms). Defaults to 5 minutes.",
|
|
2364
|
+
default: 5 * 60 * 1e3
|
|
2365
|
+
}
|
|
2366
|
+
};
|
|
2367
|
+
var workflow = {
|
|
2368
|
+
name: "workflow",
|
|
2369
|
+
yargs: {
|
|
2370
|
+
string: true,
|
|
2371
|
+
description: "Name of the workflow to execute"
|
|
2372
|
+
}
|
|
2373
|
+
};
|
|
2374
|
+
var removeUnmapped = {
|
|
2375
|
+
name: "remove-unmapped",
|
|
2376
|
+
yargs: {
|
|
2377
|
+
boolean: true,
|
|
2378
|
+
description: "Removes all workflows that didn't get mapped from the final project after merge"
|
|
2379
|
+
}
|
|
2380
|
+
};
|
|
2381
|
+
var workflowMappings = {
|
|
2382
|
+
name: "workflow-mappings",
|
|
2383
|
+
yargs: {
|
|
2384
|
+
type: "string",
|
|
2385
|
+
coerce: getCLIOptionObject,
|
|
2386
|
+
description: "A manual object mapping of which workflows in source and target should be matched for a merge."
|
|
2387
|
+
}
|
|
2388
|
+
};
|
|
2389
|
+
var workspace = {
|
|
2390
|
+
name: "workspace",
|
|
2391
|
+
yargs: {
|
|
2392
|
+
alias: ["w"],
|
|
2393
|
+
description: "Path to the project workspace (ie, path to openfn.yaml)"
|
|
2394
|
+
},
|
|
2395
|
+
ensure: (opts) => {
|
|
2396
|
+
const ws = opts.workspace ?? process.env.OPENFN_WORKSPACE;
|
|
2397
|
+
if (!ws) {
|
|
2398
|
+
opts.workspace = process.cwd();
|
|
2399
|
+
} else {
|
|
2400
|
+
opts.workspace = nodePath.resolve(ws);
|
|
2401
|
+
}
|
|
2402
|
+
}
|
|
2403
|
+
};
|
|
2404
|
+
|
|
2405
|
+
// src/projects/util.ts
|
|
2406
|
+
import path9 from "node:path";
|
|
2407
|
+
import { mkdir as mkdir4, writeFile as writeFile7 } from "node:fs/promises";
|
|
2408
|
+
|
|
2409
|
+
// src/errors.ts
|
|
2410
|
+
var CLIError = class extends Error {
|
|
2411
|
+
constructor(message) {
|
|
2412
|
+
super(message);
|
|
2413
|
+
}
|
|
2414
|
+
};
|
|
2415
|
+
|
|
2416
|
+
// src/projects/util.ts
|
|
2417
|
+
var loadAppAuthConfig = (options6, logger) => {
|
|
2113
2418
|
const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
|
|
2114
|
-
const
|
|
2115
|
-
apiKey:
|
|
2116
|
-
endpoint:
|
|
2419
|
+
const config2 = {
|
|
2420
|
+
apiKey: options6.apiKey,
|
|
2421
|
+
endpoint: options6.endpoint
|
|
2117
2422
|
};
|
|
2118
|
-
if (!
|
|
2423
|
+
if (!options6.apiKey && OPENFN_API_KEY) {
|
|
2119
2424
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
2120
|
-
|
|
2425
|
+
config2.apiKey = OPENFN_API_KEY;
|
|
2121
2426
|
}
|
|
2122
|
-
if (!
|
|
2427
|
+
if (!options6.endpoint && OPENFN_ENDPOINT) {
|
|
2123
2428
|
logger.info("Using OPENFN_ENDPOINT environment variable");
|
|
2124
|
-
|
|
2429
|
+
config2.endpoint = OPENFN_ENDPOINT;
|
|
2430
|
+
}
|
|
2431
|
+
return config2;
|
|
2432
|
+
};
|
|
2433
|
+
var ensureExt = (filePath, ext) => {
|
|
2434
|
+
if (!filePath.endsWith(ext)) {
|
|
2435
|
+
return `${filePath}.${ext}`;
|
|
2436
|
+
}
|
|
2437
|
+
return filePath;
|
|
2438
|
+
};
|
|
2439
|
+
var serialize = async (project, outputPath2, formatOverride, dryRun = false) => {
|
|
2440
|
+
const root = path9.dirname(outputPath2);
|
|
2441
|
+
await mkdir4(root, { recursive: true });
|
|
2442
|
+
const format = formatOverride ?? project.config?.formats.project;
|
|
2443
|
+
const output = project?.serialize("project", { format });
|
|
2444
|
+
const maybeWriteFile = (filePath, output2) => {
|
|
2445
|
+
if (!dryRun) {
|
|
2446
|
+
return writeFile7(filePath, output2);
|
|
2447
|
+
}
|
|
2448
|
+
};
|
|
2449
|
+
let finalPath;
|
|
2450
|
+
if (format === "yaml") {
|
|
2451
|
+
finalPath = ensureExt(outputPath2, "yaml");
|
|
2452
|
+
await maybeWriteFile(finalPath, output);
|
|
2453
|
+
} else {
|
|
2454
|
+
finalPath = ensureExt(outputPath2, "json");
|
|
2455
|
+
await maybeWriteFile(finalPath, JSON.stringify(output, null, 2));
|
|
2456
|
+
}
|
|
2457
|
+
return finalPath;
|
|
2458
|
+
};
|
|
2459
|
+
var getLightningUrl = (config2, path15 = "", snapshots2) => {
|
|
2460
|
+
const params = new URLSearchParams();
|
|
2461
|
+
snapshots2?.forEach((snapshot) => params.append("snapshots[]", snapshot));
|
|
2462
|
+
return new URL(
|
|
2463
|
+
`/api/provision/${path15}?${params.toString()}`,
|
|
2464
|
+
config2.endpoint
|
|
2465
|
+
);
|
|
2466
|
+
};
|
|
2467
|
+
async function getProject(logger, config2, projectId2, snapshots2) {
|
|
2468
|
+
const url2 = getLightningUrl(config2, projectId2, snapshots2);
|
|
2469
|
+
logger.info(`Checking ${url2} for existing project`);
|
|
2470
|
+
try {
|
|
2471
|
+
const response = await fetch(url2, {
|
|
2472
|
+
headers: {
|
|
2473
|
+
Authorization: `Bearer ${config2.apiKey}`,
|
|
2474
|
+
Accept: "application/json"
|
|
2475
|
+
}
|
|
2476
|
+
});
|
|
2477
|
+
if (!response.ok) {
|
|
2478
|
+
if (response.status === 401 || response.status === 403) {
|
|
2479
|
+
throw new CLIError(
|
|
2480
|
+
`Failed to authorize request with endpoint ${config2.endpoint}, got ${response.status} ${response.statusText}`
|
|
2481
|
+
);
|
|
2482
|
+
}
|
|
2483
|
+
if (response.status === 404) {
|
|
2484
|
+
throw new CLIError(`Project not found: ${projectId2}`);
|
|
2485
|
+
}
|
|
2486
|
+
throw new CLIError(
|
|
2487
|
+
`Failed to fetch project ${projectId2}: ${response.statusText}`
|
|
2488
|
+
);
|
|
2489
|
+
}
|
|
2490
|
+
logger.info("Project found");
|
|
2491
|
+
return response.json();
|
|
2492
|
+
} catch (error) {
|
|
2493
|
+
handleCommonErrors(config2, error);
|
|
2494
|
+
throw error;
|
|
2495
|
+
}
|
|
2496
|
+
}
|
|
2497
|
+
function handleCommonErrors(config2, error) {
|
|
2498
|
+
if (error.cause?.code === "ECONNREFUSED") {
|
|
2499
|
+
throw new DeployError2(
|
|
2500
|
+
`Failed to connect to endpoint ${config2.endpoint}, got ECONNREFUSED.`
|
|
2501
|
+
);
|
|
2502
|
+
}
|
|
2503
|
+
}
|
|
2504
|
+
var DeployError2 = class extends Error {
|
|
2505
|
+
constructor(message) {
|
|
2506
|
+
super(message);
|
|
2125
2507
|
}
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
|
|
2508
|
+
};
|
|
2509
|
+
|
|
2510
|
+
// src/projects/fetch.ts
|
|
2511
|
+
var options = [
|
|
2512
|
+
apikey,
|
|
2513
|
+
configPath,
|
|
2514
|
+
endpoint,
|
|
2515
|
+
env,
|
|
2516
|
+
log,
|
|
2517
|
+
override(outputPath, {
|
|
2518
|
+
description: "Path to output the fetched project to"
|
|
2519
|
+
}),
|
|
2520
|
+
logJson,
|
|
2521
|
+
workspace,
|
|
2522
|
+
snapshots,
|
|
2523
|
+
statePath,
|
|
2524
|
+
override(force, {
|
|
2525
|
+
description: "Overwrite local file contents with the fetched contents"
|
|
2526
|
+
})
|
|
2527
|
+
];
|
|
2528
|
+
var command = {
|
|
2529
|
+
command: "fetch [projectId]",
|
|
2530
|
+
describe: `Fetch a project's state and spec from a Lightning Instance to the local state file without expanding to the filesystem.`,
|
|
2531
|
+
builder: (yargs) => build(options, yargs).positional("projectId", {
|
|
2532
|
+
describe: "The id of the project that should be fetched, should be a UUID",
|
|
2533
|
+
demandOption: true
|
|
2534
|
+
}).example(
|
|
2535
|
+
"fetch 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2536
|
+
"Fetch an updated copy of a the above spec and state from a Lightning Instance"
|
|
2537
|
+
),
|
|
2538
|
+
handler: ensure("project-fetch", options)
|
|
2539
|
+
};
|
|
2540
|
+
var handler2 = async (options6, logger) => {
|
|
2541
|
+
const workspacePath = path10.resolve(options6.workspace ?? process.cwd());
|
|
2542
|
+
const workspace2 = new Workspace(workspacePath);
|
|
2543
|
+
const { projectId: projectId2, outputPath: outputPath2 } = options6;
|
|
2544
|
+
const config2 = loadAppAuthConfig(options6, logger);
|
|
2545
|
+
const { data } = await getProject(logger, config2, projectId2);
|
|
2131
2546
|
const project = await Project3.from(
|
|
2132
2547
|
"state",
|
|
2133
2548
|
data,
|
|
2134
2549
|
{
|
|
2135
|
-
endpoint:
|
|
2136
|
-
env:
|
|
2137
|
-
// TODO this is NOT an openfn metadata key
|
|
2138
|
-
// (it should not be sent back to lighting)
|
|
2139
|
-
// should add it to the local or meta objects instead
|
|
2140
|
-
fetched_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
2550
|
+
endpoint: config2.endpoint,
|
|
2551
|
+
env: options6.env || "project"
|
|
2141
2552
|
},
|
|
2142
|
-
|
|
2553
|
+
workspace2.getConfig()
|
|
2143
2554
|
);
|
|
2555
|
+
const outputRoot = path10.resolve(outputPath2 || workspacePath);
|
|
2144
2556
|
const projectFileName = project.getIdentifier();
|
|
2145
|
-
|
|
2146
|
-
const
|
|
2147
|
-
|
|
2148
|
-
|
|
2149
|
-
|
|
2557
|
+
const projectsDir = project.config.dirs.projects ?? ".projects";
|
|
2558
|
+
const finalOutputPath = outputPath2 ?? `${outputRoot}/${projectsDir}/${projectFileName}`;
|
|
2559
|
+
let format = void 0;
|
|
2560
|
+
if (outputPath2) {
|
|
2561
|
+
const ext = path10.extname(outputPath2).substring(1);
|
|
2562
|
+
if (ext.length) {
|
|
2563
|
+
format = ext;
|
|
2564
|
+
}
|
|
2565
|
+
}
|
|
2566
|
+
const finalOutput = await serialize(
|
|
2567
|
+
project,
|
|
2568
|
+
finalOutputPath,
|
|
2569
|
+
format,
|
|
2570
|
+
true
|
|
2571
|
+
// dry run - this won't trigger an actual write!
|
|
2150
2572
|
);
|
|
2151
|
-
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
`,
|
|
2156
|
-
default: true
|
|
2157
|
-
})) {
|
|
2158
|
-
logger.always("Cancelled");
|
|
2159
|
-
return false;
|
|
2573
|
+
let current = null;
|
|
2574
|
+
try {
|
|
2575
|
+
current = await Project3.from("path", finalOutput);
|
|
2576
|
+
} catch (e) {
|
|
2160
2577
|
}
|
|
2161
|
-
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2578
|
+
const hasAnyHistory = project.workflows.find(
|
|
2579
|
+
(w) => w.workflow.history?.length
|
|
2580
|
+
);
|
|
2581
|
+
const skipVersionCheck = options6.force || // The user forced the checkout
|
|
2582
|
+
!current || // there is no project on disk
|
|
2583
|
+
!hasAnyHistory;
|
|
2584
|
+
if (!skipVersionCheck && !project.canMergeInto(current)) {
|
|
2585
|
+
throw new Error("Error! An incompatible project exists at this location");
|
|
2586
|
+
}
|
|
2587
|
+
await serialize(project, finalOutputPath, format);
|
|
2588
|
+
logger.success(`Fetched project file to ${finalOutput}`);
|
|
2589
|
+
return project;
|
|
2590
|
+
};
|
|
2591
|
+
|
|
2592
|
+
// src/projects/checkout.ts
|
|
2593
|
+
import Project4, { Workspace as Workspace2 } from "@openfn/project";
|
|
2594
|
+
import path11 from "path";
|
|
2595
|
+
import fs4 from "fs";
|
|
2596
|
+
import { rimraf } from "rimraf";
|
|
2597
|
+
var options2 = [projectId, workspace, log];
|
|
2598
|
+
var command2 = {
|
|
2599
|
+
command: "checkout <project-id>",
|
|
2600
|
+
describe: "Switch to a different OpenFn project in the same workspace",
|
|
2601
|
+
handler: ensure("project-checkout", options2),
|
|
2602
|
+
builder: (yargs) => build(options2, yargs)
|
|
2603
|
+
};
|
|
2604
|
+
var handler3 = async (options6, logger) => {
|
|
2605
|
+
const projectId2 = options6.projectId;
|
|
2606
|
+
const workspacePath = options6.workspace ?? process.cwd();
|
|
2607
|
+
const workspace2 = new Workspace2(workspacePath, logger);
|
|
2608
|
+
const { project: _, ...config2 } = workspace2.getConfig();
|
|
2609
|
+
let switchProject;
|
|
2610
|
+
if (/\.(yaml|json)$/.test(projectId2)) {
|
|
2611
|
+
const filePath = projectId2.startsWith("/") ? projectId2 : path11.join(workspacePath, projectId2);
|
|
2612
|
+
logger.debug("Loading project from path ", filePath);
|
|
2613
|
+
switchProject = await Project4.from("path", filePath, config2);
|
|
2165
2614
|
} else {
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
);
|
|
2615
|
+
switchProject = workspace2.get(projectId2);
|
|
2616
|
+
}
|
|
2617
|
+
if (!switchProject) {
|
|
2618
|
+
throw new Error(`Project with id ${projectId2} not found in the workspace`);
|
|
2170
2619
|
}
|
|
2171
|
-
|
|
2172
|
-
const files =
|
|
2620
|
+
await rimraf(path11.join(workspacePath, config2.workflowRoot ?? "workflows"));
|
|
2621
|
+
const files = switchProject.serialize("fs");
|
|
2173
2622
|
for (const f in files) {
|
|
2174
2623
|
if (files[f]) {
|
|
2175
|
-
|
|
2624
|
+
fs4.mkdirSync(path11.join(workspacePath, path11.dirname(f)), {
|
|
2176
2625
|
recursive: true
|
|
2177
2626
|
});
|
|
2178
|
-
|
|
2627
|
+
fs4.writeFileSync(path11.join(workspacePath, f), files[f]);
|
|
2179
2628
|
} else {
|
|
2180
|
-
|
|
2629
|
+
logger.warn("WARNING! No content for file", f);
|
|
2181
2630
|
}
|
|
2182
2631
|
}
|
|
2183
|
-
logger.success(`Expanded project to ${
|
|
2632
|
+
logger.success(`Expanded project to ${workspacePath}`);
|
|
2633
|
+
};
|
|
2634
|
+
|
|
2635
|
+
// src/projects/pull.ts
|
|
2636
|
+
async function handler4(options6, logger) {
|
|
2637
|
+
const project = await handler2(options6, logger);
|
|
2638
|
+
logger.success(`Downloaded latest project version`);
|
|
2639
|
+
await handler3(
|
|
2640
|
+
{
|
|
2641
|
+
...options6,
|
|
2642
|
+
projectId: project.id
|
|
2643
|
+
},
|
|
2644
|
+
logger
|
|
2645
|
+
);
|
|
2646
|
+
logger.success(`Checked out project locally`);
|
|
2184
2647
|
}
|
|
2648
|
+
var pull_default = handler4;
|
|
2185
2649
|
|
|
2186
2650
|
// src/pull/handler.ts
|
|
2187
|
-
async function pullHandler(
|
|
2188
|
-
if (
|
|
2189
|
-
return
|
|
2651
|
+
async function pullHandler(options6, logger) {
|
|
2652
|
+
if (options6.beta) {
|
|
2653
|
+
return pull_default(options6, logger);
|
|
2190
2654
|
}
|
|
2191
2655
|
try {
|
|
2192
|
-
const config2 = mergeOverrides2(await getConfig2(
|
|
2656
|
+
const config2 = mergeOverrides2(await getConfig2(options6.configPath), options6);
|
|
2193
2657
|
if (process.env["OPENFN_API_KEY"]) {
|
|
2194
2658
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
2195
2659
|
config2.apiKey = process.env["OPENFN_API_KEY"];
|
|
@@ -2203,8 +2667,8 @@ async function pullHandler(options, logger) {
|
|
|
2203
2667
|
);
|
|
2204
2668
|
const { data: project } = await getProject2(
|
|
2205
2669
|
config2,
|
|
2206
|
-
|
|
2207
|
-
|
|
2670
|
+
options6.projectId,
|
|
2671
|
+
options6.snapshots
|
|
2208
2672
|
);
|
|
2209
2673
|
if (!project) {
|
|
2210
2674
|
logger.error("ERROR: Project not found.");
|
|
@@ -2217,8 +2681,8 @@ async function pullHandler(options, logger) {
|
|
|
2217
2681
|
const state = getStateFromProjectPayload(project);
|
|
2218
2682
|
logger.always("Downloading the project spec (as YAML) from the server.");
|
|
2219
2683
|
const queryParams = new URLSearchParams();
|
|
2220
|
-
queryParams.append("id",
|
|
2221
|
-
|
|
2684
|
+
queryParams.append("id", options6.projectId);
|
|
2685
|
+
options6.snapshots?.forEach(
|
|
2222
2686
|
(snapshot) => queryParams.append("snapshots[]", snapshot)
|
|
2223
2687
|
);
|
|
2224
2688
|
const url2 = new URL(
|
|
@@ -2240,7 +2704,7 @@ async function pullHandler(options, logger) {
|
|
|
2240
2704
|
process.exitCode = 1;
|
|
2241
2705
|
process.exit(1);
|
|
2242
2706
|
}
|
|
2243
|
-
const resolvedPath =
|
|
2707
|
+
const resolvedPath = path12.resolve(config2.specPath);
|
|
2244
2708
|
logger.debug("reading spec from", resolvedPath);
|
|
2245
2709
|
const updatedSpec = await syncRemoteSpec(
|
|
2246
2710
|
await res.text(),
|
|
@@ -2249,7 +2713,7 @@ async function pullHandler(options, logger) {
|
|
|
2249
2713
|
logger
|
|
2250
2714
|
);
|
|
2251
2715
|
await fs5.writeFile(
|
|
2252
|
-
|
|
2716
|
+
path12.resolve(config2.statePath),
|
|
2253
2717
|
JSON.stringify(state, null, 2)
|
|
2254
2718
|
);
|
|
2255
2719
|
await fs5.writeFile(resolvedPath, updatedSpec);
|
|
@@ -2267,13 +2731,13 @@ async function pullHandler(options, logger) {
|
|
|
2267
2731
|
throw error;
|
|
2268
2732
|
}
|
|
2269
2733
|
}
|
|
2270
|
-
function mergeOverrides2(config2,
|
|
2734
|
+
function mergeOverrides2(config2, options6) {
|
|
2271
2735
|
return {
|
|
2272
2736
|
...config2,
|
|
2273
2737
|
apiKey: pickFirst2(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
2274
2738
|
endpoint: pickFirst2(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
2275
|
-
configPath:
|
|
2276
|
-
requireConfirmation: pickFirst2(
|
|
2739
|
+
configPath: options6.configPath,
|
|
2740
|
+
requireConfirmation: pickFirst2(options6.confirm, config2.requireConfirmation)
|
|
2277
2741
|
};
|
|
2278
2742
|
}
|
|
2279
2743
|
function pickFirst2(...args) {
|
|
@@ -2282,18 +2746,36 @@ function pickFirst2(...args) {
|
|
|
2282
2746
|
var handler_default10 = pullHandler;
|
|
2283
2747
|
|
|
2284
2748
|
// src/projects/handler.ts
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
|
|
2291
|
-
|
|
2292
|
-
|
|
2749
|
+
var handler_exports = {};
|
|
2750
|
+
__export(handler_exports, {
|
|
2751
|
+
checkout: () => handler3,
|
|
2752
|
+
fetch: () => handler2,
|
|
2753
|
+
list: () => handler5,
|
|
2754
|
+
merge: () => handler7,
|
|
2755
|
+
version: () => handler6
|
|
2756
|
+
});
|
|
2757
|
+
|
|
2758
|
+
// src/projects/list.ts
|
|
2759
|
+
import { Workspace as Workspace3 } from "@openfn/project";
|
|
2760
|
+
var options3 = [log, workspace];
|
|
2761
|
+
var command3 = {
|
|
2762
|
+
command: "list [project-path]",
|
|
2763
|
+
describe: "List all the openfn projects available in the current directory",
|
|
2764
|
+
aliases: ["project", "$0"],
|
|
2765
|
+
handler: ensure("project-list", options3),
|
|
2766
|
+
builder: (yargs) => build(options3, yargs)
|
|
2767
|
+
};
|
|
2768
|
+
var handler5 = async (options6, logger) => {
|
|
2769
|
+
logger.info("Searching for projects in workspace at:");
|
|
2770
|
+
logger.info(" ", options6.workspace);
|
|
2771
|
+
logger.break();
|
|
2772
|
+
const workspace2 = new Workspace3(options6.workspace);
|
|
2773
|
+
if (!workspace2.valid) {
|
|
2774
|
+
throw new Error("No OpenFn projects found");
|
|
2293
2775
|
}
|
|
2294
|
-
logger.
|
|
2776
|
+
logger.always(`Available openfn projects
|
|
2295
2777
|
|
|
2296
|
-
${
|
|
2778
|
+
${workspace2.list().map((p) => describeProject(p, p.id === workspace2.activeProjectId)).join("\n\n")}
|
|
2297
2779
|
`);
|
|
2298
2780
|
};
|
|
2299
2781
|
function describeProject(project, active = false) {
|
|
@@ -2303,27 +2785,31 @@ function describeProject(project, active = false) {
|
|
|
2303
2785
|
workflows:
|
|
2304
2786
|
${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
2305
2787
|
}
|
|
2306
|
-
var handler_default11 = projectsHandler;
|
|
2307
2788
|
|
|
2308
|
-
// src/version
|
|
2309
|
-
import { Workspace as
|
|
2310
|
-
|
|
2311
|
-
var
|
|
2312
|
-
|
|
2313
|
-
|
|
2314
|
-
|
|
2789
|
+
// src/projects/version.ts
|
|
2790
|
+
import { Workspace as Workspace4 } from "@openfn/project";
|
|
2791
|
+
var options4 = [workflow, workspace, workflowMappings];
|
|
2792
|
+
var command4 = {
|
|
2793
|
+
command: "version [workflow]",
|
|
2794
|
+
describe: "Returns the version hash of a given workflow in a workspace",
|
|
2795
|
+
handler: ensure("project-version", options4),
|
|
2796
|
+
builder: (yargs) => build(options4, yargs)
|
|
2797
|
+
};
|
|
2798
|
+
var handler6 = async (options6, logger) => {
|
|
2799
|
+
const workspace2 = new Workspace4(options6.workspace);
|
|
2800
|
+
if (!workspace2.valid) {
|
|
2315
2801
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2316
2802
|
return;
|
|
2317
2803
|
}
|
|
2318
2804
|
const output = /* @__PURE__ */ new Map();
|
|
2319
|
-
const activeProject =
|
|
2320
|
-
if (
|
|
2321
|
-
const
|
|
2322
|
-
if (!
|
|
2323
|
-
logger.error(`No workflow found with id ${
|
|
2805
|
+
const activeProject = workspace2.getActiveProject();
|
|
2806
|
+
if (options6.workflow) {
|
|
2807
|
+
const workflow2 = activeProject?.getWorkflow(options6.workflow);
|
|
2808
|
+
if (!workflow2) {
|
|
2809
|
+
logger.error(`No workflow found with id ${options6.workflow}`);
|
|
2324
2810
|
return;
|
|
2325
2811
|
}
|
|
2326
|
-
output.set(
|
|
2812
|
+
output.set(workflow2.name || workflow2.id, workflow2.getVersionHash());
|
|
2327
2813
|
} else {
|
|
2328
2814
|
for (const wf of activeProject?.workflows || []) {
|
|
2329
2815
|
output.set(wf.name || wf.id, wf.getVersionHash());
|
|
@@ -2334,7 +2820,7 @@ var workflowVersionHandler = async (options, logger) => {
|
|
|
2334
2820
|
return;
|
|
2335
2821
|
}
|
|
2336
2822
|
let final;
|
|
2337
|
-
if (
|
|
2823
|
+
if (options6.json) {
|
|
2338
2824
|
final = JSON.stringify(Object.fromEntries(output), void 0, 2);
|
|
2339
2825
|
} else {
|
|
2340
2826
|
final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
|
|
@@ -2343,68 +2829,56 @@ var workflowVersionHandler = async (options, logger) => {
|
|
|
2343
2829
|
|
|
2344
2830
|
${final}`);
|
|
2345
2831
|
};
|
|
2346
|
-
var handler_default12 = workflowVersionHandler;
|
|
2347
|
-
|
|
2348
|
-
// src/checkout/handler.ts
|
|
2349
|
-
import Project5, { Workspace as Workspace4 } from "@openfn/project";
|
|
2350
|
-
import path13 from "path";
|
|
2351
|
-
import fs6 from "fs";
|
|
2352
|
-
import { rimraf as rimraf2 } from "rimraf";
|
|
2353
|
-
var checkoutHandler = async (options, logger) => {
|
|
2354
|
-
const commandPath = path13.resolve(options.projectPath ?? ".");
|
|
2355
|
-
const workspace = new Workspace4(commandPath);
|
|
2356
|
-
if (!workspace.valid) {
|
|
2357
|
-
logger.error("Command was run in an invalid openfn workspace");
|
|
2358
|
-
return;
|
|
2359
|
-
}
|
|
2360
|
-
const { project: _, ...config2 } = workspace.getConfig();
|
|
2361
|
-
let switchProject;
|
|
2362
|
-
if (/\.(yaml|json)$/.test(options.projectId)) {
|
|
2363
|
-
const filePath = options.projectId.startsWith("/") ? options.projectId : path13.join(commandPath, options.projectId);
|
|
2364
|
-
logger.debug("Loading project from path ", filePath);
|
|
2365
|
-
switchProject = await Project5.from("path", filePath, config2);
|
|
2366
|
-
} else {
|
|
2367
|
-
switchProject = workspace.get(options.projectId);
|
|
2368
|
-
}
|
|
2369
|
-
if (!switchProject) {
|
|
2370
|
-
throw new Error(
|
|
2371
|
-
`Project with id ${options.projectId} not found in the workspace`
|
|
2372
|
-
);
|
|
2373
|
-
}
|
|
2374
|
-
await rimraf2(path13.join(commandPath, config2.workflowRoot ?? "workflows"));
|
|
2375
|
-
const files = switchProject.serialize("fs");
|
|
2376
|
-
for (const f in files) {
|
|
2377
|
-
if (files[f]) {
|
|
2378
|
-
fs6.mkdirSync(path13.join(commandPath, path13.dirname(f)), {
|
|
2379
|
-
recursive: true
|
|
2380
|
-
});
|
|
2381
|
-
fs6.writeFileSync(path13.join(commandPath, f), files[f]);
|
|
2382
|
-
} else {
|
|
2383
|
-
logger.warn("WARNING! No content for file", f);
|
|
2384
|
-
}
|
|
2385
|
-
}
|
|
2386
|
-
logger.success(`Expanded project to ${commandPath}`);
|
|
2387
|
-
};
|
|
2388
|
-
var handler_default13 = checkoutHandler;
|
|
2389
2832
|
|
|
2390
|
-
// src/merge
|
|
2833
|
+
// src/projects/merge.ts
|
|
2391
2834
|
import Project6, { Workspace as Workspace5 } from "@openfn/project";
|
|
2392
|
-
import
|
|
2393
|
-
import
|
|
2394
|
-
var
|
|
2395
|
-
|
|
2396
|
-
|
|
2397
|
-
|
|
2835
|
+
import path13 from "node:path";
|
|
2836
|
+
import fs6 from "node:fs/promises";
|
|
2837
|
+
var options5 = [
|
|
2838
|
+
projectId,
|
|
2839
|
+
removeUnmapped,
|
|
2840
|
+
workflowMappings,
|
|
2841
|
+
log,
|
|
2842
|
+
workspace,
|
|
2843
|
+
// custom output because we don't want defaults or anything
|
|
2844
|
+
{
|
|
2845
|
+
name: "output-path",
|
|
2846
|
+
yargs: {
|
|
2847
|
+
alias: "o",
|
|
2848
|
+
description: "Optionally write the merged project file to a custom location"
|
|
2849
|
+
}
|
|
2850
|
+
},
|
|
2851
|
+
{
|
|
2852
|
+
name: "base",
|
|
2853
|
+
yargs: {
|
|
2854
|
+
alias: "target",
|
|
2855
|
+
description: "Path to the base (target) state file to merge into (ie, what main should be)"
|
|
2856
|
+
}
|
|
2857
|
+
},
|
|
2858
|
+
override(force, {
|
|
2859
|
+
description: "Force a merge even when workflows are incompatible"
|
|
2860
|
+
})
|
|
2861
|
+
];
|
|
2862
|
+
var command5 = {
|
|
2863
|
+
command: "merge <project-id>",
|
|
2864
|
+
describe: "Merges the specified project into the currently checked out project",
|
|
2865
|
+
handler: ensure("project-merge", options5),
|
|
2866
|
+
builder: (yargs) => build(options5, yargs)
|
|
2867
|
+
};
|
|
2868
|
+
var handler7 = async (options6, logger) => {
|
|
2869
|
+
const commandPath = options6.workspace;
|
|
2870
|
+
const workspace2 = new Workspace5(commandPath);
|
|
2871
|
+
if (!workspace2.valid) {
|
|
2398
2872
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2399
2873
|
return;
|
|
2400
2874
|
}
|
|
2401
2875
|
let targetProject;
|
|
2402
|
-
if (
|
|
2403
|
-
const basePath =
|
|
2876
|
+
if (options6.base) {
|
|
2877
|
+
const basePath = path13.resolve(options6.base);
|
|
2404
2878
|
logger.debug("Loading target project from path", basePath);
|
|
2405
2879
|
targetProject = await Project6.from("path", basePath);
|
|
2406
2880
|
} else {
|
|
2407
|
-
targetProject =
|
|
2881
|
+
targetProject = workspace2.getActiveProject();
|
|
2408
2882
|
if (!targetProject) {
|
|
2409
2883
|
logger.error(`No project currently checked out`);
|
|
2410
2884
|
return;
|
|
@@ -2412,16 +2886,16 @@ var mergeHandler = async (options, logger) => {
|
|
|
2412
2886
|
logger.debug(`Loading target project from workspace (${targetProject.id})`);
|
|
2413
2887
|
}
|
|
2414
2888
|
let sourceProject;
|
|
2415
|
-
if (/\.(yaml|json)$/.test(
|
|
2416
|
-
const filePath =
|
|
2889
|
+
if (/\.(yaml|json)$/.test(options6.projectId)) {
|
|
2890
|
+
const filePath = path13.join(commandPath, options6.projectId);
|
|
2417
2891
|
logger.debug("Loading source project from path ", filePath);
|
|
2418
2892
|
sourceProject = await Project6.from("path", filePath);
|
|
2419
2893
|
} else {
|
|
2420
|
-
logger.debug(`Loading source project from workspace ${
|
|
2421
|
-
sourceProject =
|
|
2894
|
+
logger.debug(`Loading source project from workspace ${options6.projectId}`);
|
|
2895
|
+
sourceProject = workspace2.get(options6.projectId);
|
|
2422
2896
|
}
|
|
2423
2897
|
if (!sourceProject) {
|
|
2424
|
-
logger.error(`Project "${
|
|
2898
|
+
logger.error(`Project "${options6.projectId}" not found in the workspace`);
|
|
2425
2899
|
return;
|
|
2426
2900
|
}
|
|
2427
2901
|
if (targetProject.id === sourceProject.id) {
|
|
@@ -2432,20 +2906,20 @@ var mergeHandler = async (options, logger) => {
|
|
|
2432
2906
|
logger.error("The checked out project has no id");
|
|
2433
2907
|
return;
|
|
2434
2908
|
}
|
|
2435
|
-
const finalPath =
|
|
2909
|
+
const finalPath = options6.outputPath ?? workspace2.getProjectPath(targetProject.id);
|
|
2436
2910
|
if (!finalPath) {
|
|
2437
2911
|
logger.error("Path to checked out project not found.");
|
|
2438
2912
|
return;
|
|
2439
2913
|
}
|
|
2440
2914
|
const final = Project6.merge(sourceProject, targetProject, {
|
|
2441
|
-
removeUnmapped:
|
|
2442
|
-
workflowMappings:
|
|
2443
|
-
force:
|
|
2915
|
+
removeUnmapped: options6.removeUnmapped,
|
|
2916
|
+
workflowMappings: options6.workflowMappings,
|
|
2917
|
+
force: options6.force
|
|
2444
2918
|
});
|
|
2445
|
-
let outputFormat =
|
|
2446
|
-
if (
|
|
2919
|
+
let outputFormat = workspace2.config.formats.project;
|
|
2920
|
+
if (options6.outputPath?.endsWith(".json")) {
|
|
2447
2921
|
outputFormat = "json";
|
|
2448
|
-
} else if (
|
|
2922
|
+
} else if (options6.outputPath?.endsWith(".yaml")) {
|
|
2449
2923
|
outputFormat = "yaml";
|
|
2450
2924
|
}
|
|
2451
2925
|
let finalState = final.serialize("state", {
|
|
@@ -2454,27 +2928,26 @@ var mergeHandler = async (options, logger) => {
|
|
|
2454
2928
|
if (outputFormat === "json") {
|
|
2455
2929
|
finalState = JSON.stringify(finalState, null, 2);
|
|
2456
2930
|
}
|
|
2457
|
-
await
|
|
2931
|
+
await fs6.writeFile(finalPath, finalState);
|
|
2458
2932
|
logger.info(`Updated statefile at `, finalPath);
|
|
2459
2933
|
logger.info("Checking out merged project to filesystem");
|
|
2460
|
-
await
|
|
2934
|
+
await handler3(
|
|
2461
2935
|
{
|
|
2462
|
-
command: "checkout",
|
|
2463
|
-
|
|
2464
|
-
projectId:
|
|
2465
|
-
log:
|
|
2936
|
+
command: "project-checkout",
|
|
2937
|
+
workspace: commandPath,
|
|
2938
|
+
projectId: options6.outputPath ? finalPath : final.id,
|
|
2939
|
+
log: options6.log
|
|
2466
2940
|
},
|
|
2467
2941
|
logger
|
|
2468
2942
|
);
|
|
2469
2943
|
logger.success(
|
|
2470
|
-
`Project ${sourceProject.id} has been merged into Project ${targetProject.id}
|
|
2944
|
+
`Project ${sourceProject.id} has been merged into Project ${targetProject.id}`
|
|
2471
2945
|
);
|
|
2472
2946
|
};
|
|
2473
|
-
var handler_default14 = mergeHandler;
|
|
2474
2947
|
|
|
2475
2948
|
// src/util/print-versions.ts
|
|
2476
2949
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
2477
|
-
import
|
|
2950
|
+
import path14 from "node:path";
|
|
2478
2951
|
import url from "node:url";
|
|
2479
2952
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
2480
2953
|
import { mainSymbols } from "figures";
|
|
@@ -2486,15 +2959,15 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
2486
2959
|
var loadVersionFromPath = (adaptorPath) => {
|
|
2487
2960
|
try {
|
|
2488
2961
|
const pkg = JSON.parse(
|
|
2489
|
-
readFileSync2(
|
|
2962
|
+
readFileSync2(path14.resolve(adaptorPath, "package.json"), "utf8")
|
|
2490
2963
|
);
|
|
2491
2964
|
return pkg.version;
|
|
2492
2965
|
} catch (e) {
|
|
2493
2966
|
return "unknown";
|
|
2494
2967
|
}
|
|
2495
2968
|
};
|
|
2496
|
-
var printVersions = async (logger,
|
|
2497
|
-
const { adaptors, logJson } =
|
|
2969
|
+
var printVersions = async (logger, options6 = {}, includeComponents = false) => {
|
|
2970
|
+
const { adaptors, logJson: logJson2 } = options6;
|
|
2498
2971
|
let longestAdaptorName = "";
|
|
2499
2972
|
const adaptorList = [];
|
|
2500
2973
|
adaptors?.forEach((adaptor) => {
|
|
@@ -2504,7 +2977,7 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
|
|
|
2504
2977
|
const [namePart, pathPart] = adaptor.split("=");
|
|
2505
2978
|
adaptorVersion = loadVersionFromPath(pathPart);
|
|
2506
2979
|
adaptorName = getNameAndVersion7(namePart).name;
|
|
2507
|
-
} else if (
|
|
2980
|
+
} else if (options6.monorepoPath) {
|
|
2508
2981
|
adaptorName = getNameAndVersion7(adaptor).name;
|
|
2509
2982
|
adaptorVersion = "monorepo";
|
|
2510
2983
|
} else {
|
|
@@ -2521,13 +2994,13 @@ var printVersions = async (logger, options = {}, includeComponents = false) => {
|
|
|
2521
2994
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
2522
2995
|
);
|
|
2523
2996
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
2524
|
-
const dirname3 =
|
|
2997
|
+
const dirname3 = path14.dirname(url.fileURLToPath(import.meta.url));
|
|
2525
2998
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
2526
2999
|
const { version, dependencies } = pkg;
|
|
2527
3000
|
const compilerVersion = dependencies["@openfn/compiler"];
|
|
2528
3001
|
const runtimeVersion = dependencies["@openfn/runtime"];
|
|
2529
3002
|
let output;
|
|
2530
|
-
if (
|
|
3003
|
+
if (logJson2) {
|
|
2531
3004
|
output = {
|
|
2532
3005
|
versions: {
|
|
2533
3006
|
"node.js": process.version.substring(1),
|
|
@@ -2566,18 +3039,18 @@ var print_versions_default = printVersions;
|
|
|
2566
3039
|
// src/env.ts
|
|
2567
3040
|
import { config } from "dotenv";
|
|
2568
3041
|
import { expand as expand2 } from "dotenv-expand";
|
|
2569
|
-
var
|
|
3042
|
+
var env2;
|
|
2570
3043
|
var report = (logger) => {
|
|
2571
3044
|
let envs = [];
|
|
2572
3045
|
if (process.env.$DOT_ENV_OVERRIDES) {
|
|
2573
3046
|
envs = process.env.$DOT_ENV_OVERRIDES.split(",").map((s) => s.trim());
|
|
2574
3047
|
} else {
|
|
2575
|
-
envs = Object.keys(
|
|
3048
|
+
envs = Object.keys(env2?.parsed ?? {});
|
|
2576
3049
|
}
|
|
2577
3050
|
if (envs.length) {
|
|
2578
3051
|
logger?.always(`Imported ${envs.length} env vars from .env file`);
|
|
2579
3052
|
logger?.debug("Envs set from .env: ", envs.join(", "));
|
|
2580
|
-
} else if (
|
|
3053
|
+
} else if (env2 && env2.error) {
|
|
2581
3054
|
logger?.debug(".env not found");
|
|
2582
3055
|
}
|
|
2583
3056
|
};
|
|
@@ -2593,10 +3066,8 @@ var handlers = {
|
|
|
2593
3066
|
docs: handler_default8,
|
|
2594
3067
|
metadata: handler_default9,
|
|
2595
3068
|
pull: handler_default10,
|
|
2596
|
-
projects:
|
|
2597
|
-
|
|
2598
|
-
merge: handler_default14,
|
|
2599
|
-
project: handler_default12,
|
|
3069
|
+
projects: handler_exports,
|
|
3070
|
+
project: handler_exports,
|
|
2600
3071
|
["collections-get"]: handler_default4.get,
|
|
2601
3072
|
["collections-set"]: handler_default4.set,
|
|
2602
3073
|
["collections-remove"]: handler_default4.remove,
|
|
@@ -2604,15 +3075,20 @@ var handlers = {
|
|
|
2604
3075
|
["repo-install"]: install,
|
|
2605
3076
|
["repo-pwd"]: pwd,
|
|
2606
3077
|
["repo-list"]: list,
|
|
3078
|
+
["project-list"]: handler5,
|
|
3079
|
+
["project-version"]: handler6,
|
|
3080
|
+
["project-merge"]: handler7,
|
|
3081
|
+
["project-checkout"]: handler3,
|
|
3082
|
+
["project-fetch"]: handler2,
|
|
2607
3083
|
version: async (opts, logger) => print_versions_default(logger, opts, true)
|
|
2608
3084
|
};
|
|
2609
|
-
var parse = async (
|
|
2610
|
-
const logger =
|
|
2611
|
-
if (
|
|
2612
|
-
await print_versions_default(logger,
|
|
3085
|
+
var parse = async (options6, log2) => {
|
|
3086
|
+
const logger = log2 || logger_default(CLI, options6);
|
|
3087
|
+
if (options6.command === "execute" || options6.command === "test") {
|
|
3088
|
+
await print_versions_default(logger, options6);
|
|
2613
3089
|
}
|
|
2614
3090
|
report(logger);
|
|
2615
|
-
const { monorepoPath } =
|
|
3091
|
+
const { monorepoPath } = options6;
|
|
2616
3092
|
if (monorepoPath) {
|
|
2617
3093
|
if (monorepoPath === "ERR") {
|
|
2618
3094
|
logger.error(
|
|
@@ -2623,19 +3099,19 @@ var parse = async (options, log) => {
|
|
|
2623
3099
|
}
|
|
2624
3100
|
await validateMonoRepo(monorepoPath, logger);
|
|
2625
3101
|
logger.success(`Loading adaptors from monorepo at ${monorepoPath}`);
|
|
2626
|
-
|
|
3102
|
+
options6.adaptors = map_adaptors_to_monorepo_default(
|
|
2627
3103
|
monorepoPath,
|
|
2628
|
-
|
|
3104
|
+
options6.adaptors,
|
|
2629
3105
|
logger
|
|
2630
3106
|
);
|
|
2631
3107
|
}
|
|
2632
|
-
const
|
|
2633
|
-
if (!
|
|
2634
|
-
logger.error(`Unrecognised command: ${
|
|
3108
|
+
const handler8 = handlers[options6.command];
|
|
3109
|
+
if (!handler8) {
|
|
3110
|
+
logger.error(`Unrecognised command: ${options6.command}`);
|
|
2635
3111
|
process.exit(1);
|
|
2636
3112
|
}
|
|
2637
3113
|
try {
|
|
2638
|
-
return await
|
|
3114
|
+
return await handler8(options6, logger);
|
|
2639
3115
|
} catch (e) {
|
|
2640
3116
|
if (!process.exitCode) {
|
|
2641
3117
|
process.exitCode = e.exitCode || 1;
|
|
@@ -2649,7 +3125,11 @@ var parse = async (options, log) => {
|
|
|
2649
3125
|
} else {
|
|
2650
3126
|
logger.break();
|
|
2651
3127
|
logger.error("Command failed!");
|
|
2652
|
-
|
|
3128
|
+
if (e instanceof CLIError) {
|
|
3129
|
+
logger.error(e.message);
|
|
3130
|
+
} else {
|
|
3131
|
+
logger.error(e);
|
|
3132
|
+
}
|
|
2653
3133
|
}
|
|
2654
3134
|
}
|
|
2655
3135
|
};
|