@openfn/cli 1.20.2 → 1.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +188 -117
- package/dist/process/runner.js +787 -572
- package/package.json +8 -8
package/dist/process/runner.js
CHANGED
|
@@ -21,13 +21,13 @@ var urlMap = {
|
|
|
21
21
|
["local"]: LOCAL_URL
|
|
22
22
|
};
|
|
23
23
|
var DEFAULT_ENV = "staging";
|
|
24
|
-
var getURL = (
|
|
25
|
-
if (
|
|
26
|
-
if (
|
|
27
|
-
return urlMap[
|
|
24
|
+
var getURL = (options7) => {
|
|
25
|
+
if (options7.apolloUrl) {
|
|
26
|
+
if (options7.apolloUrl in urlMap) {
|
|
27
|
+
return urlMap[options7.apolloUrl];
|
|
28
28
|
}
|
|
29
|
-
if (
|
|
30
|
-
return
|
|
29
|
+
if (options7.apolloUrl.startsWith("http")) {
|
|
30
|
+
return options7.apolloUrl;
|
|
31
31
|
}
|
|
32
32
|
throw new Error(`Unrecognised apollo URL`);
|
|
33
33
|
}
|
|
@@ -52,14 +52,14 @@ var outputFiles = (files, logger) => {
|
|
|
52
52
|
};
|
|
53
53
|
|
|
54
54
|
// src/apollo/handler.ts
|
|
55
|
-
var apolloHandler = async (
|
|
56
|
-
logger.always(`Calling Apollo service: ${
|
|
57
|
-
const json = await loadPayload(logger,
|
|
58
|
-
const url2 = getURL(
|
|
55
|
+
var apolloHandler = async (options7, logger) => {
|
|
56
|
+
logger.always(`Calling Apollo service: ${options7.service}`);
|
|
57
|
+
const json = await loadPayload(logger, options7.payload);
|
|
58
|
+
const url2 = getURL(options7);
|
|
59
59
|
logger.success(`Using apollo server at`, url2);
|
|
60
|
-
const result = await callApollo(url2,
|
|
60
|
+
const result = await callApollo(url2, options7.service, json, logger);
|
|
61
61
|
if (result) {
|
|
62
|
-
await serializeOutput(
|
|
62
|
+
await serializeOutput(options7, result, logger);
|
|
63
63
|
} else {
|
|
64
64
|
logger.warn("No output returned from Apollo");
|
|
65
65
|
}
|
|
@@ -79,15 +79,15 @@ var write = async (basePath, filePath, content, logger) => {
|
|
|
79
79
|
await writeFile(dest, content);
|
|
80
80
|
logger.success(`Wrote content to ${dest}`);
|
|
81
81
|
};
|
|
82
|
-
var serializeOutput = async (
|
|
83
|
-
if (
|
|
84
|
-
if (result.files && !
|
|
82
|
+
var serializeOutput = async (options7, result, logger) => {
|
|
83
|
+
if (options7.outputPath) {
|
|
84
|
+
if (result.files && !options7.outputPath.endsWith(".json")) {
|
|
85
85
|
for (const p in result.files) {
|
|
86
|
-
await write(
|
|
86
|
+
await write(options7.outputPath, p, result.files[p], logger);
|
|
87
87
|
}
|
|
88
88
|
} else {
|
|
89
89
|
await write(
|
|
90
|
-
|
|
90
|
+
options7.outputPath,
|
|
91
91
|
"",
|
|
92
92
|
JSON.stringify(result, null, 2),
|
|
93
93
|
logger
|
|
@@ -133,14 +133,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
133
133
|
});
|
|
134
134
|
});
|
|
135
135
|
};
|
|
136
|
-
var loadPayload = async (logger,
|
|
137
|
-
if (!
|
|
136
|
+
var loadPayload = async (logger, path17) => {
|
|
137
|
+
if (!path17) {
|
|
138
138
|
logger.warn("No JSON payload provided");
|
|
139
139
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
140
140
|
return {};
|
|
141
141
|
}
|
|
142
|
-
if (
|
|
143
|
-
const str = await readFile(
|
|
142
|
+
if (path17.endsWith(".json")) {
|
|
143
|
+
const str = await readFile(path17, "utf8");
|
|
144
144
|
const json = JSON.parse(str);
|
|
145
145
|
logger.debug("Loaded JSON payload");
|
|
146
146
|
return json;
|
|
@@ -148,6 +148,11 @@ var loadPayload = async (logger, path15) => {
|
|
|
148
148
|
};
|
|
149
149
|
var handler_default = apolloHandler;
|
|
150
150
|
|
|
151
|
+
// src/execute/handler.ts
|
|
152
|
+
import { yamlToJson as yamlToJson2 } from "@openfn/project";
|
|
153
|
+
import { readFile as readFile3 } from "node:fs/promises";
|
|
154
|
+
import path5 from "node:path";
|
|
155
|
+
|
|
151
156
|
// src/execute/execute.ts
|
|
152
157
|
import run, { NOTIFY_JOB_COMPLETE, getNameAndVersion } from "@openfn/runtime";
|
|
153
158
|
|
|
@@ -164,17 +169,17 @@ var namespaces = {
|
|
|
164
169
|
[COMPILER]: "CMP",
|
|
165
170
|
[JOB]: "JOB"
|
|
166
171
|
};
|
|
167
|
-
var createLogger2 = (name = "",
|
|
168
|
-
const logOptions =
|
|
172
|
+
var createLogger2 = (name = "", options7) => {
|
|
173
|
+
const logOptions = options7.log || {};
|
|
169
174
|
let json = false;
|
|
170
175
|
let level = logOptions[name] || logOptions.default || "default";
|
|
171
|
-
if (
|
|
176
|
+
if (options7.logJson) {
|
|
172
177
|
json = true;
|
|
173
178
|
}
|
|
174
179
|
return actualCreateLogger(namespaces[name] || name, {
|
|
175
180
|
level,
|
|
176
181
|
json,
|
|
177
|
-
sanitize:
|
|
182
|
+
sanitize: options7.sanitize || "none",
|
|
178
183
|
...logOptions
|
|
179
184
|
});
|
|
180
185
|
};
|
|
@@ -185,8 +190,8 @@ var createNullLogger = () => createLogger2(void 0, { log: { default: "none" } })
|
|
|
185
190
|
import fs from "node:fs";
|
|
186
191
|
import path2 from "node:path";
|
|
187
192
|
import { rmdir } from "node:fs/promises";
|
|
188
|
-
var getCachePath = async (plan,
|
|
189
|
-
const { baseDir } =
|
|
193
|
+
var getCachePath = async (plan, options7, stepId) => {
|
|
194
|
+
const { baseDir } = options7;
|
|
190
195
|
const { name } = plan.workflow;
|
|
191
196
|
const basePath = `${baseDir}/.cli-cache/${name}`;
|
|
192
197
|
if (stepId) {
|
|
@@ -194,10 +199,10 @@ var getCachePath = async (plan, options6, stepId) => {
|
|
|
194
199
|
}
|
|
195
200
|
return path2.resolve(basePath);
|
|
196
201
|
};
|
|
197
|
-
var ensureGitIgnore = (
|
|
198
|
-
if (!
|
|
202
|
+
var ensureGitIgnore = (options7) => {
|
|
203
|
+
if (!options7._hasGitIgnore) {
|
|
199
204
|
const ignorePath = path2.resolve(
|
|
200
|
-
|
|
205
|
+
options7.baseDir,
|
|
201
206
|
".cli-cache",
|
|
202
207
|
".gitignore"
|
|
203
208
|
);
|
|
@@ -207,19 +212,19 @@ var ensureGitIgnore = (options6) => {
|
|
|
207
212
|
fs.writeFileSync(ignorePath, "*");
|
|
208
213
|
}
|
|
209
214
|
}
|
|
210
|
-
|
|
215
|
+
options7._hasGitIgnore = true;
|
|
211
216
|
};
|
|
212
|
-
var saveToCache = async (plan, stepId, output,
|
|
213
|
-
if (
|
|
214
|
-
const cachePath = await getCachePath(plan,
|
|
217
|
+
var saveToCache = async (plan, stepId, output, options7, logger) => {
|
|
218
|
+
if (options7.cacheSteps) {
|
|
219
|
+
const cachePath = await getCachePath(plan, options7, stepId);
|
|
215
220
|
fs.mkdirSync(path2.dirname(cachePath), { recursive: true });
|
|
216
|
-
ensureGitIgnore(
|
|
221
|
+
ensureGitIgnore(options7);
|
|
217
222
|
logger.info(`Writing ${stepId} output to ${cachePath}`);
|
|
218
223
|
fs.writeFileSync(cachePath, JSON.stringify(output));
|
|
219
224
|
}
|
|
220
225
|
};
|
|
221
|
-
var clearCache = async (plan,
|
|
222
|
-
const cacheDir = await getCachePath(plan,
|
|
226
|
+
var clearCache = async (plan, options7, logger) => {
|
|
227
|
+
const cacheDir = await getCachePath(plan, options7);
|
|
223
228
|
try {
|
|
224
229
|
await rmdir(cacheDir, { recursive: true });
|
|
225
230
|
logger.info(`Cleared cache at ${cacheDir}`);
|
|
@@ -262,13 +267,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
262
267
|
};
|
|
263
268
|
function parseAdaptors(plan) {
|
|
264
269
|
const extractInfo = (specifier) => {
|
|
265
|
-
const [module,
|
|
270
|
+
const [module, path17] = specifier.split("=");
|
|
266
271
|
const { name, version } = getNameAndVersion(module);
|
|
267
272
|
const info = {
|
|
268
273
|
name
|
|
269
274
|
};
|
|
270
|
-
if (
|
|
271
|
-
info.path =
|
|
275
|
+
if (path17) {
|
|
276
|
+
info.path = path17;
|
|
272
277
|
}
|
|
273
278
|
if (version) {
|
|
274
279
|
info.version = version;
|
|
@@ -289,7 +294,7 @@ function parseAdaptors(plan) {
|
|
|
289
294
|
// src/execute/serialize-output.ts
|
|
290
295
|
import { mkdir as mkdir2, writeFile as writeFile2 } from "node:fs/promises";
|
|
291
296
|
import { dirname } from "node:path";
|
|
292
|
-
var serializeOutput2 = async (
|
|
297
|
+
var serializeOutput2 = async (options7, result, logger) => {
|
|
293
298
|
let output = result;
|
|
294
299
|
if (output && (output.configuration || output.data)) {
|
|
295
300
|
const { configuration, ...rest } = result;
|
|
@@ -300,14 +305,14 @@ var serializeOutput2 = async (options6, result, logger) => {
|
|
|
300
305
|
} else {
|
|
301
306
|
output = JSON.stringify(output, void 0, 2);
|
|
302
307
|
}
|
|
303
|
-
if (
|
|
308
|
+
if (options7.outputStdout) {
|
|
304
309
|
logger.success(`Result: `);
|
|
305
310
|
logger.always(output);
|
|
306
|
-
} else if (
|
|
307
|
-
await mkdir2(dirname(
|
|
308
|
-
logger.debug(`Writing output to ${
|
|
309
|
-
await writeFile2(
|
|
310
|
-
logger.success(`State written to ${
|
|
311
|
+
} else if (options7.outputPath) {
|
|
312
|
+
await mkdir2(dirname(options7.outputPath), { recursive: true });
|
|
313
|
+
logger.debug(`Writing output to ${options7.outputPath}`);
|
|
314
|
+
await writeFile2(options7.outputPath, output);
|
|
315
|
+
logger.success(`State written to ${options7.outputPath}`);
|
|
311
316
|
}
|
|
312
317
|
return output;
|
|
313
318
|
};
|
|
@@ -326,6 +331,39 @@ var getAutoinstallTargets = (plan) => {
|
|
|
326
331
|
};
|
|
327
332
|
var get_autoinstall_targets_default = getAutoinstallTargets;
|
|
328
333
|
|
|
334
|
+
// src/execute/apply-credential-map.ts
|
|
335
|
+
var applyCredentialMap = (plan, map = {}, logger) => {
|
|
336
|
+
const stepsWithCredentialIds = plan.workflow.steps.filter(
|
|
337
|
+
(step) => typeof step.configuration === "string" && !step.configuration.endsWith(".json")
|
|
338
|
+
);
|
|
339
|
+
const unmapped = {};
|
|
340
|
+
for (const step of stepsWithCredentialIds) {
|
|
341
|
+
if (map[step.configuration]) {
|
|
342
|
+
logger?.debug(
|
|
343
|
+
`Applying credential ${step.configuration} to "${step.name ?? step.id}"`
|
|
344
|
+
);
|
|
345
|
+
step.configuration = map[step.configuration];
|
|
346
|
+
} else {
|
|
347
|
+
unmapped[step.configuration] = true;
|
|
348
|
+
delete step.configuration;
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
if (Object.keys(unmapped).length) {
|
|
352
|
+
logger?.warn(
|
|
353
|
+
`WARNING: credential IDs were found in the workflow, but values have not been provided:`
|
|
354
|
+
);
|
|
355
|
+
logger?.warn(" ", Object.keys(unmapped).join(","));
|
|
356
|
+
if (map) {
|
|
357
|
+
logger?.warn(
|
|
358
|
+
"If the workflow fails, add these credentials to the credential map"
|
|
359
|
+
);
|
|
360
|
+
} else {
|
|
361
|
+
logger?.warn("Pass a credential map with --credentials");
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
};
|
|
365
|
+
var apply_credential_map_default = applyCredentialMap;
|
|
366
|
+
|
|
329
367
|
// src/repo/handler.ts
|
|
330
368
|
import { exec } from "node:child_process";
|
|
331
369
|
import treeify from "treeify";
|
|
@@ -371,16 +409,16 @@ var removePackage = async (packageSpecifier, repoDir, logger) => {
|
|
|
371
409
|
logger.warn(`Failed to remove ${aliasedName}: ${error.message}`);
|
|
372
410
|
}
|
|
373
411
|
};
|
|
374
|
-
var clean = async (
|
|
375
|
-
if (
|
|
412
|
+
var clean = async (options7, logger) => {
|
|
413
|
+
if (options7.repoDir) {
|
|
376
414
|
const doIt = await logger.confirm(
|
|
377
|
-
`This will remove everything at ${
|
|
378
|
-
|
|
415
|
+
`This will remove everything at ${options7.repoDir}. Do you wish to proceed?`,
|
|
416
|
+
options7.force
|
|
379
417
|
);
|
|
380
418
|
if (doIt) {
|
|
381
419
|
return new Promise((resolve) => {
|
|
382
|
-
logger.info(`Cleaning repo at ${
|
|
383
|
-
exec(`npm exec rimraf ${
|
|
420
|
+
logger.info(`Cleaning repo at ${options7.repoDir} `);
|
|
421
|
+
exec(`npm exec rimraf ${options7.repoDir}`, () => {
|
|
384
422
|
logger.success("Repo cleaned");
|
|
385
423
|
resolve();
|
|
386
424
|
});
|
|
@@ -391,12 +429,12 @@ var clean = async (options6, logger) => {
|
|
|
391
429
|
logger.error("No repoDir path detected");
|
|
392
430
|
}
|
|
393
431
|
};
|
|
394
|
-
var pwd = async (
|
|
432
|
+
var pwd = async (options7, logger) => {
|
|
395
433
|
logger.info(`OPENFN_REPO_DIR is set to ${process.env.OPENFN_REPO_DIR}`);
|
|
396
|
-
logger.success(`Repo working directory is: ${
|
|
434
|
+
logger.success(`Repo working directory is: ${options7.repoDir}`);
|
|
397
435
|
};
|
|
398
|
-
var getDependencyList = async (
|
|
399
|
-
const pkg = await loadRepoPkg(
|
|
436
|
+
var getDependencyList = async (options7, _logger) => {
|
|
437
|
+
const pkg = await loadRepoPkg(options7.repoDir);
|
|
400
438
|
const result = {};
|
|
401
439
|
if (pkg) {
|
|
402
440
|
Object.keys(pkg.dependencies).forEach((key) => {
|
|
@@ -409,9 +447,9 @@ var getDependencyList = async (options6, _logger) => {
|
|
|
409
447
|
}
|
|
410
448
|
return result;
|
|
411
449
|
};
|
|
412
|
-
var list = async (
|
|
413
|
-
const tree = await getDependencyList(
|
|
414
|
-
await pwd(
|
|
450
|
+
var list = async (options7, logger) => {
|
|
451
|
+
const tree = await getDependencyList(options7, logger);
|
|
452
|
+
await pwd(options7, logger);
|
|
415
453
|
const output = {};
|
|
416
454
|
Object.keys(tree).forEach((key) => {
|
|
417
455
|
const versions = tree[key];
|
|
@@ -486,6 +524,9 @@ async function compile_default(planOrPath, opts, log2) {
|
|
|
486
524
|
var compileJob = async (job, opts, log2, jobName) => {
|
|
487
525
|
try {
|
|
488
526
|
const compilerOptions = await loadTransformOptions(opts, log2);
|
|
527
|
+
if (jobName) {
|
|
528
|
+
compilerOptions.name = jobName;
|
|
529
|
+
}
|
|
489
530
|
return compile(job, compilerOptions);
|
|
490
531
|
} catch (e) {
|
|
491
532
|
abort_default(
|
|
@@ -512,7 +553,7 @@ var compileWorkflow = async (plan, opts, log2) => {
|
|
|
512
553
|
job.expression,
|
|
513
554
|
jobOpts,
|
|
514
555
|
log2,
|
|
515
|
-
job.id
|
|
556
|
+
job.name ?? job.id
|
|
516
557
|
);
|
|
517
558
|
job.expression = code;
|
|
518
559
|
job.sourceMap = map;
|
|
@@ -528,10 +569,10 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
528
569
|
return specifier;
|
|
529
570
|
};
|
|
530
571
|
var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
531
|
-
const [specifier,
|
|
532
|
-
if (
|
|
533
|
-
log2.debug(`Resolved ${specifier} to path: ${
|
|
534
|
-
return
|
|
572
|
+
const [specifier, path17] = pattern.split("=");
|
|
573
|
+
if (path17) {
|
|
574
|
+
log2.debug(`Resolved ${specifier} to path: ${path17}`);
|
|
575
|
+
return path17;
|
|
535
576
|
}
|
|
536
577
|
const repoPath = await getModulePath(specifier, repoDir, log2);
|
|
537
578
|
if (repoPath) {
|
|
@@ -540,7 +581,7 @@ var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
|
540
581
|
return null;
|
|
541
582
|
};
|
|
542
583
|
var loadTransformOptions = async (opts, log2) => {
|
|
543
|
-
const
|
|
584
|
+
const options7 = {
|
|
544
585
|
logger: log2 || logger_default(COMPILER, opts),
|
|
545
586
|
trace: opts.trace
|
|
546
587
|
};
|
|
@@ -550,12 +591,12 @@ var loadTransformOptions = async (opts, log2) => {
|
|
|
550
591
|
let exports;
|
|
551
592
|
const [specifier] = adaptorInput.split("=");
|
|
552
593
|
log2.debug(`Trying to preload types for ${specifier}`);
|
|
553
|
-
const
|
|
554
|
-
if (
|
|
594
|
+
const path17 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log2);
|
|
595
|
+
if (path17) {
|
|
555
596
|
try {
|
|
556
|
-
exports = await preloadAdaptorExports(
|
|
597
|
+
exports = await preloadAdaptorExports(path17, log2);
|
|
557
598
|
} catch (e) {
|
|
558
|
-
log2.error(`Failed to load adaptor typedefs from path ${
|
|
599
|
+
log2.error(`Failed to load adaptor typedefs from path ${path17}`);
|
|
559
600
|
log2.error(e);
|
|
560
601
|
}
|
|
561
602
|
}
|
|
@@ -568,12 +609,12 @@ var loadTransformOptions = async (opts, log2) => {
|
|
|
568
609
|
exportAll: true
|
|
569
610
|
});
|
|
570
611
|
}
|
|
571
|
-
|
|
612
|
+
options7["add-imports"] = {
|
|
572
613
|
ignore: opts.ignoreImports,
|
|
573
614
|
adaptors: adaptorsConfig
|
|
574
615
|
};
|
|
575
616
|
}
|
|
576
|
-
return
|
|
617
|
+
return options7;
|
|
577
618
|
};
|
|
578
619
|
|
|
579
620
|
// src/util/load-state.ts
|
|
@@ -592,7 +633,7 @@ var getUpstreamStepId = (plan, stepId) => {
|
|
|
592
633
|
}
|
|
593
634
|
};
|
|
594
635
|
var load_state_default = async (plan, opts, log2, start) => {
|
|
595
|
-
const { stateStdin, statePath
|
|
636
|
+
const { stateStdin, statePath } = opts;
|
|
596
637
|
log2.debug("Loading state...");
|
|
597
638
|
if (stateStdin) {
|
|
598
639
|
try {
|
|
@@ -607,15 +648,15 @@ var load_state_default = async (plan, opts, log2, start) => {
|
|
|
607
648
|
process.exit(1);
|
|
608
649
|
}
|
|
609
650
|
}
|
|
610
|
-
if (
|
|
651
|
+
if (statePath) {
|
|
611
652
|
try {
|
|
612
|
-
const str = await fs2.readFile(
|
|
653
|
+
const str = await fs2.readFile(statePath, "utf8");
|
|
613
654
|
const json = JSON.parse(str);
|
|
614
|
-
log2.success(`Loaded state from ${
|
|
655
|
+
log2.success(`Loaded state from ${statePath}`);
|
|
615
656
|
log2.debug("state:", json);
|
|
616
657
|
return json;
|
|
617
658
|
} catch (e) {
|
|
618
|
-
log2.warn(`Error loading state from ${
|
|
659
|
+
log2.warn(`Error loading state from ${statePath}`);
|
|
619
660
|
log2.warn(e);
|
|
620
661
|
}
|
|
621
662
|
}
|
|
@@ -666,12 +707,12 @@ var load_state_default = async (plan, opts, log2, start) => {
|
|
|
666
707
|
};
|
|
667
708
|
|
|
668
709
|
// src/util/validate-adaptors.ts
|
|
669
|
-
var validateAdaptors = async (
|
|
670
|
-
if (
|
|
710
|
+
var validateAdaptors = async (options7, logger) => {
|
|
711
|
+
if (options7.skipAdaptorValidation) {
|
|
671
712
|
return;
|
|
672
713
|
}
|
|
673
|
-
const isPlan =
|
|
674
|
-
const hasDeclaredAdaptors =
|
|
714
|
+
const isPlan = options7.planPath || options7.workflowPath || options7.workflow;
|
|
715
|
+
const hasDeclaredAdaptors = options7.adaptors && options7.adaptors.length > 0;
|
|
675
716
|
if (isPlan && hasDeclaredAdaptors) {
|
|
676
717
|
logger.error("ERROR: adaptor and workflow provided");
|
|
677
718
|
logger.error(
|
|
@@ -773,40 +814,47 @@ var mapAdaptorsToMonorepo = (monorepoPath = "", input = [], log2) => {
|
|
|
773
814
|
};
|
|
774
815
|
var map_adaptors_to_monorepo_default = mapAdaptorsToMonorepo;
|
|
775
816
|
|
|
817
|
+
// src/util/resolve-path.ts
|
|
818
|
+
import nodepath from "node:path";
|
|
819
|
+
import os from "node:os";
|
|
820
|
+
var resolve_path_default = (path17, root) => {
|
|
821
|
+
return path17.startsWith("~") ? path17.replace(`~`, os.homedir) : nodepath.resolve(root ?? "", path17);
|
|
822
|
+
};
|
|
823
|
+
|
|
776
824
|
// src/util/load-plan.ts
|
|
777
|
-
var loadPlan = async (
|
|
778
|
-
const { workflowPath, planPath, expressionPath } =
|
|
779
|
-
if (
|
|
780
|
-
const content = await fs3.readFile(path4.resolve(
|
|
825
|
+
var loadPlan = async (options7, logger) => {
|
|
826
|
+
const { workflowPath, planPath, expressionPath } = options7;
|
|
827
|
+
if (options7.path && /ya?ml$/.test(options7.path)) {
|
|
828
|
+
const content = await fs3.readFile(path4.resolve(options7.path), "utf-8");
|
|
781
829
|
const workflow2 = yamlToJson(content);
|
|
782
|
-
|
|
783
|
-
return loadXPlan({ workflow: workflow2 },
|
|
830
|
+
options7.baseDir = dirname2(options7.path);
|
|
831
|
+
return loadXPlan({ workflow: workflow2 }, options7, logger);
|
|
784
832
|
}
|
|
785
|
-
if (
|
|
786
|
-
|
|
787
|
-
return fromProject(
|
|
833
|
+
if (options7.path && options7.workflow) {
|
|
834
|
+
options7.baseDir = options7.path;
|
|
835
|
+
return fromProject(options7.path, options7.workflow, options7, logger);
|
|
788
836
|
}
|
|
789
|
-
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(
|
|
790
|
-
const workflow2 =
|
|
791
|
-
return fromProject(path4.resolve("."), workflow2,
|
|
837
|
+
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(options7.path || "") && !options7.workflow) {
|
|
838
|
+
const workflow2 = options7.path;
|
|
839
|
+
return fromProject(path4.resolve("."), workflow2, options7, logger);
|
|
792
840
|
}
|
|
793
841
|
if (expressionPath) {
|
|
794
|
-
return loadExpression(
|
|
842
|
+
return loadExpression(options7, logger);
|
|
795
843
|
}
|
|
796
844
|
const jsonPath = planPath || workflowPath;
|
|
797
|
-
if (!
|
|
798
|
-
|
|
845
|
+
if (!options7.baseDir) {
|
|
846
|
+
options7.baseDir = path4.dirname(jsonPath);
|
|
799
847
|
}
|
|
800
848
|
const json = await loadJson(jsonPath, logger);
|
|
801
849
|
const defaultName = path4.parse(jsonPath).name;
|
|
802
850
|
if (json.workflow) {
|
|
803
|
-
return loadXPlan(json,
|
|
851
|
+
return loadXPlan(json, options7, logger, defaultName);
|
|
804
852
|
} else {
|
|
805
|
-
return loadOldWorkflow(json,
|
|
853
|
+
return loadOldWorkflow(json, options7, logger, defaultName);
|
|
806
854
|
}
|
|
807
855
|
};
|
|
808
856
|
var load_plan_default = loadPlan;
|
|
809
|
-
var fromProject = async (rootDir, workflowName,
|
|
857
|
+
var fromProject = async (rootDir, workflowName, options7, logger) => {
|
|
810
858
|
logger.debug("Loading Repo from ", path4.resolve(rootDir));
|
|
811
859
|
const project = await Project.from("fs", { root: rootDir });
|
|
812
860
|
logger.debug("Loading workflow ", workflowName);
|
|
@@ -814,7 +862,7 @@ var fromProject = async (rootDir, workflowName, options6, logger) => {
|
|
|
814
862
|
if (!workflow2) {
|
|
815
863
|
throw new Error(`Workflow "${workflowName}" not found`);
|
|
816
864
|
}
|
|
817
|
-
return loadXPlan({ workflow: workflow2 },
|
|
865
|
+
return loadXPlan({ workflow: workflow2 }, options7, logger);
|
|
818
866
|
};
|
|
819
867
|
var loadJson = async (workflowPath, logger) => {
|
|
820
868
|
let text;
|
|
@@ -849,8 +897,8 @@ var maybeAssign = (a, b, keys) => {
|
|
|
849
897
|
}
|
|
850
898
|
});
|
|
851
899
|
};
|
|
852
|
-
var loadExpression = async (
|
|
853
|
-
const expressionPath =
|
|
900
|
+
var loadExpression = async (options7, logger) => {
|
|
901
|
+
const expressionPath = options7.expressionPath;
|
|
854
902
|
logger.debug(`Loading expression from ${expressionPath}`);
|
|
855
903
|
try {
|
|
856
904
|
const expression = await fs3.readFile(expressionPath, "utf8");
|
|
@@ -858,19 +906,19 @@ var loadExpression = async (options6, logger) => {
|
|
|
858
906
|
const step = {
|
|
859
907
|
expression,
|
|
860
908
|
// The adaptor should have been expanded nicely already, so we don't need intervene here
|
|
861
|
-
adaptors:
|
|
909
|
+
adaptors: options7.adaptors ?? []
|
|
862
910
|
};
|
|
863
911
|
const wfOptions = {};
|
|
864
|
-
maybeAssign(
|
|
912
|
+
maybeAssign(options7, wfOptions, ["timeout"]);
|
|
865
913
|
const plan = {
|
|
866
914
|
workflow: {
|
|
867
915
|
name,
|
|
868
916
|
steps: [step],
|
|
869
|
-
globals:
|
|
917
|
+
globals: options7.globals
|
|
870
918
|
},
|
|
871
919
|
options: wfOptions
|
|
872
920
|
};
|
|
873
|
-
return loadXPlan(plan,
|
|
921
|
+
return loadXPlan(plan, options7, logger);
|
|
874
922
|
} catch (e) {
|
|
875
923
|
abort_default(
|
|
876
924
|
logger,
|
|
@@ -881,7 +929,7 @@ var loadExpression = async (options6, logger) => {
|
|
|
881
929
|
return {};
|
|
882
930
|
}
|
|
883
931
|
};
|
|
884
|
-
var loadOldWorkflow = async (workflow2,
|
|
932
|
+
var loadOldWorkflow = async (workflow2, options7, logger, defaultName = "") => {
|
|
885
933
|
const plan = {
|
|
886
934
|
workflow: {
|
|
887
935
|
steps: workflow2.jobs
|
|
@@ -893,7 +941,7 @@ var loadOldWorkflow = async (workflow2, options6, logger, defaultName = "") => {
|
|
|
893
941
|
if (workflow2.id) {
|
|
894
942
|
plan.id = workflow2.id;
|
|
895
943
|
}
|
|
896
|
-
const final = await loadXPlan(plan,
|
|
944
|
+
const final = await loadXPlan(plan, options7, logger, defaultName);
|
|
897
945
|
logger.warn("Converted workflow into new format:");
|
|
898
946
|
logger.warn(final);
|
|
899
947
|
return final;
|
|
@@ -901,7 +949,7 @@ var loadOldWorkflow = async (workflow2, options6, logger, defaultName = "") => {
|
|
|
901
949
|
var fetchFile = async (fileInfo, log2) => {
|
|
902
950
|
const { rootDir = "", filePath, name } = fileInfo;
|
|
903
951
|
try {
|
|
904
|
-
const fullPath =
|
|
952
|
+
const fullPath = resolve_path_default(filePath, rootDir);
|
|
905
953
|
const result = await fs3.readFile(fullPath, "utf8");
|
|
906
954
|
log2.debug("Loaded file", fullPath);
|
|
907
955
|
return result;
|
|
@@ -985,7 +1033,42 @@ var ensureAdaptors = (plan) => {
|
|
|
985
1033
|
job.adaptors ??= [];
|
|
986
1034
|
});
|
|
987
1035
|
};
|
|
988
|
-
var
|
|
1036
|
+
var ensureCollections = (plan, {
|
|
1037
|
+
endpoint: endpoint2 = "https://app.openfn.org",
|
|
1038
|
+
version = "latest",
|
|
1039
|
+
apiKey: apiKey2 = "null"
|
|
1040
|
+
} = {}, logger) => {
|
|
1041
|
+
let collectionsFound = false;
|
|
1042
|
+
Object.values(plan.workflow.steps).filter((step) => step.expression?.match(/(collections\.)/)).forEach((step) => {
|
|
1043
|
+
const job = step;
|
|
1044
|
+
if (!job.adaptors?.find(
|
|
1045
|
+
(v) => v.startsWith("@openfn/language-collections")
|
|
1046
|
+
)) {
|
|
1047
|
+
collectionsFound = true;
|
|
1048
|
+
job.adaptors ??= [];
|
|
1049
|
+
job.adaptors.push(
|
|
1050
|
+
`@openfn/language-collections@${version || "latest"}`
|
|
1051
|
+
);
|
|
1052
|
+
job.configuration = Object.assign({}, job.configuration, {
|
|
1053
|
+
collections_endpoint: `${endpoint2}/collections`,
|
|
1054
|
+
collections_token: apiKey2
|
|
1055
|
+
});
|
|
1056
|
+
}
|
|
1057
|
+
});
|
|
1058
|
+
if (collectionsFound) {
|
|
1059
|
+
if (!apiKey2 || apiKey2 === "null") {
|
|
1060
|
+
logger?.warn(
|
|
1061
|
+
"WARNING: collections API was not set. Pass --api-key or OPENFN_API_KEY"
|
|
1062
|
+
);
|
|
1063
|
+
}
|
|
1064
|
+
logger?.info(
|
|
1065
|
+
`Configured collections to use endpoint ${endpoint2} and API Key ending with ${apiKey2?.substring(
|
|
1066
|
+
apiKey2.length - 10
|
|
1067
|
+
)}`
|
|
1068
|
+
);
|
|
1069
|
+
}
|
|
1070
|
+
};
|
|
1071
|
+
var loadXPlan = async (plan, options7, logger, defaultName = "") => {
|
|
989
1072
|
if (!plan.options) {
|
|
990
1073
|
plan.options = {};
|
|
991
1074
|
}
|
|
@@ -993,22 +1076,31 @@ var loadXPlan = async (plan, options6, logger, defaultName = "") => {
|
|
|
993
1076
|
plan.workflow.name = defaultName;
|
|
994
1077
|
}
|
|
995
1078
|
ensureAdaptors(plan);
|
|
996
|
-
|
|
997
|
-
plan
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1079
|
+
ensureCollections(
|
|
1080
|
+
plan,
|
|
1081
|
+
{
|
|
1082
|
+
version: options7.collectionsVersion,
|
|
1083
|
+
apiKey: options7.apiKey,
|
|
1084
|
+
endpoint: options7.collectionsEndpoint
|
|
1085
|
+
},
|
|
1086
|
+
logger
|
|
1087
|
+
);
|
|
1088
|
+
if (options7.globals)
|
|
1089
|
+
plan.workflow.globals = options7.globals;
|
|
1090
|
+
await importGlobals(plan, options7.baseDir, logger);
|
|
1091
|
+
await importExpressions(plan, options7.baseDir, logger);
|
|
1092
|
+
if (options7.expandAdaptors) {
|
|
1001
1093
|
expand_adaptors_default(plan);
|
|
1002
1094
|
}
|
|
1003
|
-
await map_adaptors_to_monorepo_default(
|
|
1004
|
-
maybeAssign(
|
|
1095
|
+
await map_adaptors_to_monorepo_default(options7.monorepoPath, plan, logger);
|
|
1096
|
+
maybeAssign(options7, plan.options, ["timeout", "start"]);
|
|
1005
1097
|
logger.info(`Loaded workflow ${plan.workflow.name ?? ""}`);
|
|
1006
1098
|
return plan;
|
|
1007
1099
|
};
|
|
1008
1100
|
|
|
1009
1101
|
// src/util/assert-path.ts
|
|
1010
|
-
var assert_path_default = (
|
|
1011
|
-
if (!
|
|
1102
|
+
var assert_path_default = (path17) => {
|
|
1103
|
+
if (!path17) {
|
|
1012
1104
|
console.error("ERROR: no path provided!");
|
|
1013
1105
|
console.error("\nUsage:");
|
|
1014
1106
|
console.error(" open path/to/job");
|
|
@@ -1044,7 +1136,7 @@ var fuzzy_match_step_default = (plan, stepPattern) => {
|
|
|
1044
1136
|
|
|
1045
1137
|
// src/util/validate-plan.ts
|
|
1046
1138
|
var assertWorkflowStructure = (plan, logger) => {
|
|
1047
|
-
const { workflow: workflow2, options:
|
|
1139
|
+
const { workflow: workflow2, options: options7 } = plan;
|
|
1048
1140
|
if (!workflow2 || typeof workflow2 !== "object") {
|
|
1049
1141
|
throw new Error(`Missing or invalid "workflow" key in execution plan`);
|
|
1050
1142
|
}
|
|
@@ -1057,7 +1149,7 @@ var assertWorkflowStructure = (plan, logger) => {
|
|
|
1057
1149
|
workflow2.steps.forEach((step, index) => {
|
|
1058
1150
|
assertStepStructure(step, index);
|
|
1059
1151
|
});
|
|
1060
|
-
assertOptionsStructure(
|
|
1152
|
+
assertOptionsStructure(options7, logger);
|
|
1061
1153
|
};
|
|
1062
1154
|
var assertStepStructure = (step, index) => {
|
|
1063
1155
|
const allowedKeys = [
|
|
@@ -1084,9 +1176,9 @@ var assertStepStructure = (step, index) => {
|
|
|
1084
1176
|
);
|
|
1085
1177
|
}
|
|
1086
1178
|
};
|
|
1087
|
-
var assertOptionsStructure = (
|
|
1179
|
+
var assertOptionsStructure = (options7 = {}, logger) => {
|
|
1088
1180
|
const allowedKeys = ["timeout", "stepTimeout", "start", "end", "sanitize"];
|
|
1089
|
-
for (const key in
|
|
1181
|
+
for (const key in options7) {
|
|
1090
1182
|
if (!allowedKeys.includes(key)) {
|
|
1091
1183
|
logger.warn(`Unrecognized option "${key}" in options object`);
|
|
1092
1184
|
}
|
|
@@ -1142,17 +1234,41 @@ var matchStep = (plan, stepPattern, stepName, logger) => {
|
|
|
1142
1234
|
}
|
|
1143
1235
|
return "";
|
|
1144
1236
|
};
|
|
1145
|
-
var
|
|
1237
|
+
var loadAndApplyCredentialMap = async (plan, options7, logger) => {
|
|
1238
|
+
let creds = {};
|
|
1239
|
+
if (options7.credentials) {
|
|
1240
|
+
try {
|
|
1241
|
+
const credsRaw = await readFile3(
|
|
1242
|
+
path5.resolve(options7.credentials),
|
|
1243
|
+
"utf8"
|
|
1244
|
+
);
|
|
1245
|
+
if (options7.credentials.endsWith(".json")) {
|
|
1246
|
+
creds = JSON.parse(credsRaw);
|
|
1247
|
+
} else {
|
|
1248
|
+
creds = yamlToJson2(credsRaw);
|
|
1249
|
+
}
|
|
1250
|
+
} catch (e) {
|
|
1251
|
+
logger.error("Error processing credential map:");
|
|
1252
|
+
logger.error(e);
|
|
1253
|
+
process.exitCode = 1;
|
|
1254
|
+
return;
|
|
1255
|
+
}
|
|
1256
|
+
logger.info("Credential map loaded ");
|
|
1257
|
+
}
|
|
1258
|
+
return apply_credential_map_default(plan, creds, logger);
|
|
1259
|
+
};
|
|
1260
|
+
var executeHandler = async (options7, logger) => {
|
|
1146
1261
|
const start = (/* @__PURE__ */ new Date()).getTime();
|
|
1147
|
-
assert_path_default(
|
|
1148
|
-
await validate_adaptors_default(
|
|
1149
|
-
let plan = await load_plan_default(
|
|
1262
|
+
assert_path_default(options7.path);
|
|
1263
|
+
await validate_adaptors_default(options7, logger);
|
|
1264
|
+
let plan = await load_plan_default(options7, logger);
|
|
1150
1265
|
validate_plan_default(plan, logger);
|
|
1151
|
-
|
|
1152
|
-
|
|
1266
|
+
await loadAndApplyCredentialMap(plan, options7, logger);
|
|
1267
|
+
if (options7.cacheSteps) {
|
|
1268
|
+
await clearCache(plan, options7, logger);
|
|
1153
1269
|
}
|
|
1154
1270
|
const moduleResolutions = {};
|
|
1155
|
-
const { repoDir, monorepoPath, autoinstall } =
|
|
1271
|
+
const { repoDir, monorepoPath, autoinstall } = options7;
|
|
1156
1272
|
if (autoinstall) {
|
|
1157
1273
|
if (monorepoPath) {
|
|
1158
1274
|
logger.warn("Skipping auto-install as monorepo is being used");
|
|
@@ -1160,13 +1276,13 @@ var executeHandler = async (options6, logger) => {
|
|
|
1160
1276
|
const autoInstallTargets = get_autoinstall_targets_default(plan);
|
|
1161
1277
|
if (autoInstallTargets.length) {
|
|
1162
1278
|
logger.info("Auto-installing language adaptors");
|
|
1163
|
-
|
|
1279
|
+
options7.adaptors = await install(
|
|
1164
1280
|
{ packages: autoInstallTargets, repoDir },
|
|
1165
1281
|
logger
|
|
1166
1282
|
);
|
|
1167
|
-
if (autoInstallTargets.length ===
|
|
1283
|
+
if (autoInstallTargets.length === options7.adaptors.length) {
|
|
1168
1284
|
for (let i = 0; i < autoInstallTargets.length; i++) {
|
|
1169
|
-
moduleResolutions[autoInstallTargets[i]] =
|
|
1285
|
+
moduleResolutions[autoInstallTargets[i]] = options7.adaptors[i];
|
|
1170
1286
|
}
|
|
1171
1287
|
}
|
|
1172
1288
|
}
|
|
@@ -1174,35 +1290,35 @@ var executeHandler = async (options6, logger) => {
|
|
|
1174
1290
|
}
|
|
1175
1291
|
let customStart;
|
|
1176
1292
|
let customEnd;
|
|
1177
|
-
if (
|
|
1178
|
-
const step = matchStep(plan,
|
|
1293
|
+
if (options7.only) {
|
|
1294
|
+
const step = matchStep(plan, options7.only, "only", logger);
|
|
1179
1295
|
customStart = step;
|
|
1180
1296
|
customEnd = step;
|
|
1181
|
-
logger.always(`Only running workflow step "${
|
|
1297
|
+
logger.always(`Only running workflow step "${options7.start}"`);
|
|
1182
1298
|
} else {
|
|
1183
|
-
if (
|
|
1299
|
+
if (options7.start) {
|
|
1184
1300
|
customStart = matchStep(
|
|
1185
1301
|
plan,
|
|
1186
|
-
|
|
1302
|
+
options7.start ?? plan.options.start,
|
|
1187
1303
|
"start",
|
|
1188
1304
|
logger
|
|
1189
1305
|
);
|
|
1190
|
-
logger.info(`Starting workflow from step "${
|
|
1306
|
+
logger.info(`Starting workflow from step "${options7.start}"`);
|
|
1191
1307
|
}
|
|
1192
|
-
if (
|
|
1308
|
+
if (options7.end) {
|
|
1193
1309
|
customEnd = matchStep(
|
|
1194
1310
|
plan,
|
|
1195
|
-
|
|
1311
|
+
options7.end ?? plan.options.end,
|
|
1196
1312
|
"end",
|
|
1197
1313
|
logger
|
|
1198
1314
|
);
|
|
1199
|
-
logger.always(`Ending workflow at step "${
|
|
1315
|
+
logger.always(`Ending workflow at step "${options7.end}"`);
|
|
1200
1316
|
}
|
|
1201
1317
|
}
|
|
1202
|
-
const state = await load_state_default(plan,
|
|
1318
|
+
const state = await load_state_default(plan, options7, logger, customStart);
|
|
1203
1319
|
plan = override_plan_adaptors_default(plan, moduleResolutions);
|
|
1204
|
-
if (
|
|
1205
|
-
plan = await compile_default(plan,
|
|
1320
|
+
if (options7.compile) {
|
|
1321
|
+
plan = await compile_default(plan, options7, logger);
|
|
1206
1322
|
} else {
|
|
1207
1323
|
logger.info("Skipping compilation as noCompile is set");
|
|
1208
1324
|
}
|
|
@@ -1216,13 +1332,13 @@ var executeHandler = async (options6, logger) => {
|
|
|
1216
1332
|
workflow: plan.workflow
|
|
1217
1333
|
};
|
|
1218
1334
|
try {
|
|
1219
|
-
const result = await execute_default(finalPlan, state,
|
|
1220
|
-
if (
|
|
1335
|
+
const result = await execute_default(finalPlan, state, options7, logger);
|
|
1336
|
+
if (options7.cacheSteps) {
|
|
1221
1337
|
logger.success(
|
|
1222
1338
|
"Cached output written to ./cli-cache (see info logs for details)"
|
|
1223
1339
|
);
|
|
1224
1340
|
}
|
|
1225
|
-
await serialize_output_default(
|
|
1341
|
+
await serialize_output_default(options7, result, logger);
|
|
1226
1342
|
const duration = printDuration((/* @__PURE__ */ new Date()).getTime() - start);
|
|
1227
1343
|
if (result?.errors) {
|
|
1228
1344
|
logger.warn(
|
|
@@ -1245,55 +1361,55 @@ var handler_default2 = executeHandler;
|
|
|
1245
1361
|
|
|
1246
1362
|
// src/compile/handler.ts
|
|
1247
1363
|
import { writeFile as writeFile3 } from "node:fs/promises";
|
|
1248
|
-
var compileHandler = async (
|
|
1249
|
-
assert_path_default(
|
|
1364
|
+
var compileHandler = async (options7, logger) => {
|
|
1365
|
+
assert_path_default(options7.path);
|
|
1250
1366
|
let result;
|
|
1251
|
-
if (
|
|
1252
|
-
const { code } = await compile_default(
|
|
1367
|
+
if (options7.expressionPath) {
|
|
1368
|
+
const { code } = await compile_default(options7.expressionPath, options7, logger);
|
|
1253
1369
|
result = code;
|
|
1254
1370
|
} else {
|
|
1255
|
-
const plan = await load_plan_default(
|
|
1256
|
-
const compiledPlan = await compile_default(plan,
|
|
1371
|
+
const plan = await load_plan_default(options7, logger);
|
|
1372
|
+
const compiledPlan = await compile_default(plan, options7, logger);
|
|
1257
1373
|
result = JSON.stringify(compiledPlan, null, 2);
|
|
1258
1374
|
}
|
|
1259
|
-
if (
|
|
1375
|
+
if (options7.outputStdout) {
|
|
1260
1376
|
logger.success("Result:\n\n" + result);
|
|
1261
1377
|
} else {
|
|
1262
|
-
await writeFile3(
|
|
1263
|
-
logger.success(`Compiled to ${
|
|
1378
|
+
await writeFile3(options7.outputPath, result);
|
|
1379
|
+
logger.success(`Compiled to ${options7.outputPath}`);
|
|
1264
1380
|
}
|
|
1265
1381
|
};
|
|
1266
1382
|
var handler_default3 = compileHandler;
|
|
1267
1383
|
|
|
1268
1384
|
// src/collections/handler.ts
|
|
1269
|
-
import
|
|
1270
|
-
import { readFile as
|
|
1385
|
+
import path7 from "node:path";
|
|
1386
|
+
import { readFile as readFile4, writeFile as writeFile4 } from "node:fs/promises";
|
|
1271
1387
|
|
|
1272
1388
|
// src/collections/request.ts
|
|
1273
|
-
import
|
|
1389
|
+
import path6 from "node:path";
|
|
1274
1390
|
import { request } from "undici";
|
|
1275
1391
|
var DEFAULT_PAGE_SIZE = 1e3;
|
|
1276
|
-
var request_default = async (method,
|
|
1277
|
-
const base =
|
|
1278
|
-
const url2 =
|
|
1392
|
+
var request_default = async (method, options7, logger) => {
|
|
1393
|
+
const base = options7.lightning || process.env.OPENFN_ENDPOINT || "https://app.openfn.org";
|
|
1394
|
+
const url2 = path6.join(base, "/collections", options7.collectionName);
|
|
1279
1395
|
logger.debug("Calling Collections server at ", url2);
|
|
1280
1396
|
const headers = {
|
|
1281
|
-
Authorization: `Bearer ${
|
|
1397
|
+
Authorization: `Bearer ${options7.token}`
|
|
1282
1398
|
};
|
|
1283
1399
|
const query = Object.assign(
|
|
1284
1400
|
{
|
|
1285
|
-
key:
|
|
1286
|
-
limit:
|
|
1401
|
+
key: options7.key,
|
|
1402
|
+
limit: options7.pageSize || DEFAULT_PAGE_SIZE
|
|
1287
1403
|
},
|
|
1288
|
-
|
|
1404
|
+
options7.query
|
|
1289
1405
|
);
|
|
1290
1406
|
const args = {
|
|
1291
1407
|
headers,
|
|
1292
1408
|
method,
|
|
1293
1409
|
query
|
|
1294
1410
|
};
|
|
1295
|
-
if (
|
|
1296
|
-
args.body = JSON.stringify(
|
|
1411
|
+
if (options7.data) {
|
|
1412
|
+
args.body = JSON.stringify(options7.data);
|
|
1297
1413
|
headers["content-type"] = "application/json";
|
|
1298
1414
|
}
|
|
1299
1415
|
let result = {};
|
|
@@ -1304,11 +1420,11 @@ var request_default = async (method, options6, logger) => {
|
|
|
1304
1420
|
if (cursor) {
|
|
1305
1421
|
query.cursor = cursor;
|
|
1306
1422
|
}
|
|
1307
|
-
if (
|
|
1308
|
-
limit =
|
|
1423
|
+
if (options7.limit) {
|
|
1424
|
+
limit = options7.limit;
|
|
1309
1425
|
query.limit = Math.min(
|
|
1310
|
-
|
|
1311
|
-
|
|
1426
|
+
options7.pageSize || DEFAULT_PAGE_SIZE,
|
|
1427
|
+
options7.limit - count
|
|
1312
1428
|
);
|
|
1313
1429
|
}
|
|
1314
1430
|
try {
|
|
@@ -1402,7 +1518,7 @@ var ensureToken = (opts, logger) => {
|
|
|
1402
1518
|
}
|
|
1403
1519
|
}
|
|
1404
1520
|
};
|
|
1405
|
-
var buildQuery = (
|
|
1521
|
+
var buildQuery = (options7) => {
|
|
1406
1522
|
const map = {
|
|
1407
1523
|
createdBefore: "created_before",
|
|
1408
1524
|
createdAfter: "created_after",
|
|
@@ -1411,34 +1527,34 @@ var buildQuery = (options6) => {
|
|
|
1411
1527
|
};
|
|
1412
1528
|
const query = {};
|
|
1413
1529
|
Object.keys(map).forEach((key) => {
|
|
1414
|
-
if (
|
|
1415
|
-
query[map[key]] =
|
|
1530
|
+
if (options7[key]) {
|
|
1531
|
+
query[map[key]] = options7[key];
|
|
1416
1532
|
}
|
|
1417
1533
|
});
|
|
1418
1534
|
return query;
|
|
1419
1535
|
};
|
|
1420
|
-
var get = async (
|
|
1421
|
-
ensureToken(
|
|
1422
|
-
const multiMode =
|
|
1536
|
+
var get = async (options7, logger) => {
|
|
1537
|
+
ensureToken(options7, logger);
|
|
1538
|
+
const multiMode = options7.key.includes("*");
|
|
1423
1539
|
if (multiMode) {
|
|
1424
1540
|
logger.info(
|
|
1425
|
-
`Fetching multiple items from collection "${
|
|
1541
|
+
`Fetching multiple items from collection "${options7.collectionName}" with pattern ${options7.key}`
|
|
1426
1542
|
);
|
|
1427
1543
|
} else {
|
|
1428
1544
|
logger.info(
|
|
1429
|
-
`Fetching "${
|
|
1545
|
+
`Fetching "${options7.key}" from collection "${options7.collectionName}"`
|
|
1430
1546
|
);
|
|
1431
1547
|
}
|
|
1432
1548
|
let result = await request_default(
|
|
1433
1549
|
"GET",
|
|
1434
1550
|
{
|
|
1435
|
-
lightning:
|
|
1436
|
-
token:
|
|
1437
|
-
pageSize:
|
|
1438
|
-
limit:
|
|
1439
|
-
key:
|
|
1440
|
-
collectionName:
|
|
1441
|
-
query: buildQuery(
|
|
1551
|
+
lightning: options7.endpoint,
|
|
1552
|
+
token: options7.token,
|
|
1553
|
+
pageSize: options7.pageSize,
|
|
1554
|
+
limit: options7.limit,
|
|
1555
|
+
key: options7.key,
|
|
1556
|
+
collectionName: options7.collectionName,
|
|
1557
|
+
query: buildQuery(options7)
|
|
1442
1558
|
},
|
|
1443
1559
|
logger
|
|
1444
1560
|
);
|
|
@@ -1446,76 +1562,76 @@ var get = async (options6, logger) => {
|
|
|
1446
1562
|
logger.success(`Fetched ${Object.keys(result).length} items!`);
|
|
1447
1563
|
} else {
|
|
1448
1564
|
result = Object.values(result)[0];
|
|
1449
|
-
logger.success(`Fetched ${
|
|
1565
|
+
logger.success(`Fetched ${options7.key}`);
|
|
1450
1566
|
}
|
|
1451
|
-
if (
|
|
1567
|
+
if (options7.outputPath) {
|
|
1452
1568
|
const content = JSON.stringify(
|
|
1453
1569
|
result,
|
|
1454
1570
|
null,
|
|
1455
|
-
|
|
1571
|
+
options7.pretty ? 2 : void 0
|
|
1456
1572
|
);
|
|
1457
|
-
await writeFile4(
|
|
1458
|
-
logger.always(`Wrote items to ${
|
|
1573
|
+
await writeFile4(options7.outputPath, content);
|
|
1574
|
+
logger.always(`Wrote items to ${options7.outputPath}`);
|
|
1459
1575
|
} else {
|
|
1460
1576
|
logger.print(result);
|
|
1461
1577
|
}
|
|
1462
1578
|
};
|
|
1463
|
-
var set = async (
|
|
1464
|
-
if (
|
|
1579
|
+
var set = async (options7, logger) => {
|
|
1580
|
+
if (options7.key && options7.items) {
|
|
1465
1581
|
throwAbortableError(
|
|
1466
1582
|
"ARGUMENT_ERROR: arguments for key and items were provided",
|
|
1467
1583
|
"If upserting multiple items with --items, do not pass a key"
|
|
1468
1584
|
);
|
|
1469
1585
|
}
|
|
1470
|
-
ensureToken(
|
|
1471
|
-
logger.info(`Upserting items to collection "${
|
|
1586
|
+
ensureToken(options7, logger);
|
|
1587
|
+
logger.info(`Upserting items to collection "${options7.collectionName}"`);
|
|
1472
1588
|
const items = [];
|
|
1473
|
-
if (
|
|
1474
|
-
const resolvedPath =
|
|
1589
|
+
if (options7.items) {
|
|
1590
|
+
const resolvedPath = path7.resolve(options7.items);
|
|
1475
1591
|
logger.debug("Loading items from ", resolvedPath);
|
|
1476
|
-
const data = await
|
|
1592
|
+
const data = await readFile4(resolvedPath, "utf8");
|
|
1477
1593
|
const obj = JSON.parse(data);
|
|
1478
1594
|
Object.entries(obj).forEach(([key, value]) => {
|
|
1479
1595
|
items.push({ key, value: JSON.stringify(value) });
|
|
1480
1596
|
});
|
|
1481
1597
|
logger.info(`Upserting ${items.length} items`);
|
|
1482
|
-
} else if (
|
|
1483
|
-
const resolvedPath =
|
|
1598
|
+
} else if (options7.key && options7.value) {
|
|
1599
|
+
const resolvedPath = path7.resolve(options7.value);
|
|
1484
1600
|
logger.debug("Loading value from ", resolvedPath);
|
|
1485
|
-
const data = await
|
|
1601
|
+
const data = await readFile4(path7.resolve(options7.value), "utf8");
|
|
1486
1602
|
const value = JSON.stringify(JSON.parse(data));
|
|
1487
|
-
items.push({ key:
|
|
1488
|
-
logger.info(`Upserting data to "${
|
|
1603
|
+
items.push({ key: options7.key, value });
|
|
1604
|
+
logger.info(`Upserting data to "${options7.key}"`);
|
|
1489
1605
|
} else {
|
|
1490
1606
|
throw new Error("INVALID_ARGUMENTS");
|
|
1491
1607
|
}
|
|
1492
1608
|
const result = await request_default(
|
|
1493
1609
|
"POST",
|
|
1494
1610
|
{
|
|
1495
|
-
lightning:
|
|
1496
|
-
token:
|
|
1497
|
-
key:
|
|
1498
|
-
collectionName:
|
|
1611
|
+
lightning: options7.endpoint,
|
|
1612
|
+
token: options7.token,
|
|
1613
|
+
key: options7.key,
|
|
1614
|
+
collectionName: options7.collectionName,
|
|
1499
1615
|
data: { items }
|
|
1500
1616
|
},
|
|
1501
1617
|
logger
|
|
1502
1618
|
);
|
|
1503
1619
|
logger.success(`Upserted ${result.upserted} items!`);
|
|
1504
1620
|
};
|
|
1505
|
-
var remove = async (
|
|
1506
|
-
ensureToken(
|
|
1621
|
+
var remove = async (options7, logger) => {
|
|
1622
|
+
ensureToken(options7, logger);
|
|
1507
1623
|
logger.info(
|
|
1508
|
-
`Removing "${
|
|
1624
|
+
`Removing "${options7.key}" from collection "${options7.collectionName}"`
|
|
1509
1625
|
);
|
|
1510
|
-
if (
|
|
1626
|
+
if (options7.dryRun) {
|
|
1511
1627
|
logger.info("--dry-run passed: fetching affected items");
|
|
1512
1628
|
let result = await request_default(
|
|
1513
1629
|
"GET",
|
|
1514
1630
|
{
|
|
1515
|
-
lightning:
|
|
1516
|
-
token:
|
|
1517
|
-
key:
|
|
1518
|
-
collectionName:
|
|
1631
|
+
lightning: options7.endpoint,
|
|
1632
|
+
token: options7.token,
|
|
1633
|
+
key: options7.key,
|
|
1634
|
+
collectionName: options7.collectionName
|
|
1519
1635
|
},
|
|
1520
1636
|
logger
|
|
1521
1637
|
);
|
|
@@ -1527,11 +1643,11 @@ var remove = async (options6, logger) => {
|
|
|
1527
1643
|
let result = await request_default(
|
|
1528
1644
|
"DELETE",
|
|
1529
1645
|
{
|
|
1530
|
-
lightning:
|
|
1531
|
-
token:
|
|
1532
|
-
key:
|
|
1533
|
-
collectionName:
|
|
1534
|
-
query: buildQuery(
|
|
1646
|
+
lightning: options7.endpoint,
|
|
1647
|
+
token: options7.token,
|
|
1648
|
+
key: options7.key,
|
|
1649
|
+
collectionName: options7.collectionName,
|
|
1650
|
+
query: buildQuery(options7)
|
|
1535
1651
|
},
|
|
1536
1652
|
logger
|
|
1537
1653
|
);
|
|
@@ -1545,9 +1661,9 @@ var handler_default4 = {
|
|
|
1545
1661
|
};
|
|
1546
1662
|
|
|
1547
1663
|
// src/test/handler.ts
|
|
1548
|
-
var testHandler = async (
|
|
1664
|
+
var testHandler = async (options7, logger) => {
|
|
1549
1665
|
logger.log("Running test workflow...");
|
|
1550
|
-
const opts = { ...
|
|
1666
|
+
const opts = { ...options7 };
|
|
1551
1667
|
opts.compile = true;
|
|
1552
1668
|
opts.adaptors = [];
|
|
1553
1669
|
const plan = {
|
|
@@ -1614,7 +1730,7 @@ import Project2 from "@openfn/project";
|
|
|
1614
1730
|
import { deployProject } from "@openfn/deploy";
|
|
1615
1731
|
|
|
1616
1732
|
// src/projects/util.ts
|
|
1617
|
-
import
|
|
1733
|
+
import path8 from "node:path";
|
|
1618
1734
|
import { mkdir as mkdir3, writeFile as writeFile5 } from "node:fs/promises";
|
|
1619
1735
|
|
|
1620
1736
|
// src/errors.ts
|
|
@@ -1625,17 +1741,17 @@ var CLIError = class extends Error {
|
|
|
1625
1741
|
};
|
|
1626
1742
|
|
|
1627
1743
|
// src/projects/util.ts
|
|
1628
|
-
var loadAppAuthConfig = (
|
|
1744
|
+
var loadAppAuthConfig = (options7, logger) => {
|
|
1629
1745
|
const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
|
|
1630
1746
|
const config2 = {
|
|
1631
|
-
apiKey:
|
|
1632
|
-
endpoint:
|
|
1747
|
+
apiKey: options7.apiKey,
|
|
1748
|
+
endpoint: options7.endpoint
|
|
1633
1749
|
};
|
|
1634
|
-
if (!
|
|
1750
|
+
if (!options7.apiKey && OPENFN_API_KEY) {
|
|
1635
1751
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
1636
1752
|
config2.apiKey = OPENFN_API_KEY;
|
|
1637
1753
|
}
|
|
1638
|
-
if (!
|
|
1754
|
+
if (!options7.endpoint && OPENFN_ENDPOINT) {
|
|
1639
1755
|
logger.info("Using OPENFN_ENDPOINT environment variable");
|
|
1640
1756
|
config2.endpoint = OPENFN_ENDPOINT;
|
|
1641
1757
|
}
|
|
@@ -1648,7 +1764,7 @@ var ensureExt = (filePath, ext) => {
|
|
|
1648
1764
|
return filePath;
|
|
1649
1765
|
};
|
|
1650
1766
|
var serialize = async (project, outputPath2, formatOverride, dryRun = false) => {
|
|
1651
|
-
const root =
|
|
1767
|
+
const root = path8.dirname(outputPath2);
|
|
1652
1768
|
await mkdir3(root, { recursive: true });
|
|
1653
1769
|
const format = formatOverride ?? project.config?.formats.project;
|
|
1654
1770
|
const output = project?.serialize("project", { format });
|
|
@@ -1667,41 +1783,38 @@ var serialize = async (project, outputPath2, formatOverride, dryRun = false) =>
|
|
|
1667
1783
|
}
|
|
1668
1784
|
return finalPath;
|
|
1669
1785
|
};
|
|
1670
|
-
var getLightningUrl = (
|
|
1786
|
+
var getLightningUrl = (endpoint2, path17 = "", snapshots2) => {
|
|
1671
1787
|
const params = new URLSearchParams();
|
|
1672
1788
|
snapshots2?.forEach((snapshot) => params.append("snapshots[]", snapshot));
|
|
1673
|
-
return new URL(
|
|
1674
|
-
`/api/provision/${path15}?${params.toString()}`,
|
|
1675
|
-
config2.endpoint
|
|
1676
|
-
);
|
|
1789
|
+
return new URL(`/api/provision/${path17}?${params.toString()}`, endpoint2);
|
|
1677
1790
|
};
|
|
1678
|
-
async function
|
|
1679
|
-
const url2 = getLightningUrl(
|
|
1680
|
-
logger
|
|
1791
|
+
async function fetchProject(endpoint2, apiKey2, projectId, logger, snapshots2) {
|
|
1792
|
+
const url2 = getLightningUrl(endpoint2, projectId, snapshots2);
|
|
1793
|
+
logger?.info(`Checking ${url2} for existing project`);
|
|
1681
1794
|
try {
|
|
1682
1795
|
const response = await fetch(url2, {
|
|
1683
1796
|
headers: {
|
|
1684
|
-
Authorization: `Bearer ${
|
|
1797
|
+
Authorization: `Bearer ${apiKey2}`,
|
|
1685
1798
|
Accept: "application/json"
|
|
1686
1799
|
}
|
|
1687
1800
|
});
|
|
1688
1801
|
if (!response.ok) {
|
|
1689
1802
|
if (response.status === 401 || response.status === 403) {
|
|
1690
1803
|
throw new CLIError(
|
|
1691
|
-
`Failed to authorize request with endpoint ${
|
|
1804
|
+
`Failed to authorize request with endpoint ${endpoint2}, got ${response.status} ${response.statusText}`
|
|
1692
1805
|
);
|
|
1693
1806
|
}
|
|
1694
1807
|
if (response.status === 404) {
|
|
1695
|
-
throw new CLIError(`Project not found: ${
|
|
1808
|
+
throw new CLIError(`Project not found: ${projectId}`);
|
|
1696
1809
|
}
|
|
1697
1810
|
throw new CLIError(
|
|
1698
|
-
`Failed to fetch project ${
|
|
1811
|
+
`Failed to fetch project ${projectId}: ${response.statusText}`
|
|
1699
1812
|
);
|
|
1700
1813
|
}
|
|
1701
|
-
logger
|
|
1814
|
+
logger?.info(`Project retrieved from ${endpoint2}`);
|
|
1702
1815
|
return response.json();
|
|
1703
1816
|
} catch (error) {
|
|
1704
|
-
handleCommonErrors(
|
|
1817
|
+
handleCommonErrors({ endpoint: endpoint2, apiKey: apiKey2 }, error);
|
|
1705
1818
|
throw error;
|
|
1706
1819
|
}
|
|
1707
1820
|
}
|
|
@@ -1719,9 +1832,11 @@ var DeployError = class extends Error {
|
|
|
1719
1832
|
};
|
|
1720
1833
|
|
|
1721
1834
|
// src/deploy/beta.ts
|
|
1722
|
-
async function handler(
|
|
1723
|
-
const config2 = loadAppAuthConfig(
|
|
1724
|
-
const project = await Project2.from("fs", {
|
|
1835
|
+
async function handler(options7, logger) {
|
|
1836
|
+
const config2 = loadAppAuthConfig(options7, logger);
|
|
1837
|
+
const project = await Project2.from("fs", {
|
|
1838
|
+
root: options7.workspace || "."
|
|
1839
|
+
});
|
|
1725
1840
|
const state = project.serialize("state", { format: "json" });
|
|
1726
1841
|
logger.debug("Converted local project to app state:");
|
|
1727
1842
|
logger.debug(JSON.stringify(state, null, 2));
|
|
@@ -1733,15 +1848,15 @@ async function handler(options6, logger) {
|
|
|
1733
1848
|
|
|
1734
1849
|
// src/deploy/handler.ts
|
|
1735
1850
|
var actualDeploy = deploy;
|
|
1736
|
-
async function deployHandler(
|
|
1737
|
-
if (
|
|
1738
|
-
return handler(
|
|
1851
|
+
async function deployHandler(options7, logger, deployFn = actualDeploy) {
|
|
1852
|
+
if (options7.beta) {
|
|
1853
|
+
return handler(options7, logger);
|
|
1739
1854
|
}
|
|
1740
1855
|
try {
|
|
1741
|
-
const config2 = mergeOverrides(await getConfig(
|
|
1856
|
+
const config2 = mergeOverrides(await getConfig(options7.configPath), options7);
|
|
1742
1857
|
logger.debug("Deploying with config", JSON.stringify(config2, null, 2));
|
|
1743
|
-
if (
|
|
1744
|
-
config2.requireConfirmation =
|
|
1858
|
+
if (options7.confirm === false) {
|
|
1859
|
+
config2.requireConfirmation = options7.confirm;
|
|
1745
1860
|
}
|
|
1746
1861
|
if (process.env["OPENFN_API_KEY"]) {
|
|
1747
1862
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
@@ -1766,15 +1881,15 @@ async function deployHandler(options6, logger, deployFn = actualDeploy) {
|
|
|
1766
1881
|
throw error;
|
|
1767
1882
|
}
|
|
1768
1883
|
}
|
|
1769
|
-
function mergeOverrides(config2,
|
|
1884
|
+
function mergeOverrides(config2, options7) {
|
|
1770
1885
|
return {
|
|
1771
1886
|
...config2,
|
|
1772
1887
|
apiKey: pickFirst(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
1773
1888
|
endpoint: pickFirst(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
1774
|
-
statePath: pickFirst(
|
|
1775
|
-
specPath: pickFirst(
|
|
1776
|
-
configPath:
|
|
1777
|
-
requireConfirmation: pickFirst(
|
|
1889
|
+
statePath: pickFirst(options7.statePath, config2.statePath),
|
|
1890
|
+
specPath: pickFirst(options7.projectPath, config2.specPath),
|
|
1891
|
+
configPath: options7.configPath,
|
|
1892
|
+
requireConfirmation: pickFirst(options7.confirm, config2.requireConfirmation)
|
|
1778
1893
|
};
|
|
1779
1894
|
}
|
|
1780
1895
|
function pickFirst(...args) {
|
|
@@ -1785,28 +1900,28 @@ var handler_default6 = deployHandler;
|
|
|
1785
1900
|
// src/docgen/handler.ts
|
|
1786
1901
|
import { writeFile as writeFile6 } from "node:fs/promises";
|
|
1787
1902
|
import { readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
|
|
1788
|
-
import
|
|
1903
|
+
import path9 from "node:path";
|
|
1789
1904
|
import { describePackage } from "@openfn/describe-package";
|
|
1790
1905
|
import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
|
|
1791
1906
|
var RETRY_DURATION = 500;
|
|
1792
1907
|
var RETRY_COUNT = 20;
|
|
1793
1908
|
var TIMEOUT_MS = 1e3 * 60;
|
|
1794
1909
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
1795
|
-
var ensurePath = (filePath) => mkdirSync(
|
|
1796
|
-
var generatePlaceholder = (
|
|
1797
|
-
writeFileSync(
|
|
1910
|
+
var ensurePath = (filePath) => mkdirSync(path9.dirname(filePath), { recursive: true });
|
|
1911
|
+
var generatePlaceholder = (path17) => {
|
|
1912
|
+
writeFileSync(path17, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
1798
1913
|
};
|
|
1799
1914
|
var finish = (logger, resultPath) => {
|
|
1800
1915
|
logger.success("Done! Docs can be found at:\n");
|
|
1801
|
-
logger.print(` ${
|
|
1916
|
+
logger.print(` ${path9.resolve(resultPath)}`);
|
|
1802
1917
|
};
|
|
1803
|
-
var generateDocs = async (specifier,
|
|
1918
|
+
var generateDocs = async (specifier, path17, docgen, logger) => {
|
|
1804
1919
|
const result = await docgen(specifier);
|
|
1805
|
-
await writeFile6(
|
|
1806
|
-
finish(logger,
|
|
1807
|
-
return
|
|
1920
|
+
await writeFile6(path17, JSON.stringify(result, null, 2));
|
|
1921
|
+
finish(logger, path17);
|
|
1922
|
+
return path17;
|
|
1808
1923
|
};
|
|
1809
|
-
var waitForDocs = async (docs,
|
|
1924
|
+
var waitForDocs = async (docs, path17, logger, retryDuration = RETRY_DURATION) => {
|
|
1810
1925
|
try {
|
|
1811
1926
|
if (docs.hasOwnProperty("loading")) {
|
|
1812
1927
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -1818,27 +1933,27 @@ var waitForDocs = async (docs, path15, logger, retryDuration = RETRY_DURATION) =
|
|
|
1818
1933
|
clearInterval(i);
|
|
1819
1934
|
reject(new Error("Timed out waiting for docs to load"));
|
|
1820
1935
|
}
|
|
1821
|
-
const updated = JSON.parse(readFileSync(
|
|
1936
|
+
const updated = JSON.parse(readFileSync(path17, "utf8"));
|
|
1822
1937
|
if (!updated.hasOwnProperty("loading")) {
|
|
1823
1938
|
logger.info("Docs found!");
|
|
1824
1939
|
clearInterval(i);
|
|
1825
|
-
resolve(
|
|
1940
|
+
resolve(path17);
|
|
1826
1941
|
}
|
|
1827
1942
|
count++;
|
|
1828
1943
|
}, retryDuration);
|
|
1829
1944
|
});
|
|
1830
1945
|
} else {
|
|
1831
|
-
logger.info(`Docs already written to cache at ${
|
|
1832
|
-
finish(logger,
|
|
1833
|
-
return
|
|
1946
|
+
logger.info(`Docs already written to cache at ${path17}`);
|
|
1947
|
+
finish(logger, path17);
|
|
1948
|
+
return path17;
|
|
1834
1949
|
}
|
|
1835
1950
|
} catch (e) {
|
|
1836
1951
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
1837
1952
|
throw e;
|
|
1838
1953
|
}
|
|
1839
1954
|
};
|
|
1840
|
-
var docgenHandler = (
|
|
1841
|
-
const { specifier, repoDir } =
|
|
1955
|
+
var docgenHandler = (options7, logger, docgen = actualDocGen, retryDuration = RETRY_DURATION) => {
|
|
1956
|
+
const { specifier, repoDir } = options7;
|
|
1842
1957
|
const { version } = getNameAndVersion4(specifier);
|
|
1843
1958
|
if (!version) {
|
|
1844
1959
|
logger.error("Error: No version number detected");
|
|
@@ -1847,28 +1962,28 @@ var docgenHandler = (options6, logger, docgen = actualDocGen, retryDuration = RE
|
|
|
1847
1962
|
process.exit(9);
|
|
1848
1963
|
}
|
|
1849
1964
|
logger.success(`Generating docs for ${specifier}`);
|
|
1850
|
-
const
|
|
1851
|
-
ensurePath(
|
|
1965
|
+
const path17 = `${repoDir}/docs/${specifier}.json`;
|
|
1966
|
+
ensurePath(path17);
|
|
1852
1967
|
const handleError2 = () => {
|
|
1853
1968
|
logger.info("Removing placeholder");
|
|
1854
|
-
rmSync(
|
|
1969
|
+
rmSync(path17);
|
|
1855
1970
|
};
|
|
1856
1971
|
try {
|
|
1857
|
-
const existing = readFileSync(
|
|
1972
|
+
const existing = readFileSync(path17, "utf8");
|
|
1858
1973
|
const json = JSON.parse(existing);
|
|
1859
1974
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
1860
1975
|
logger.info(`Expired placeholder found. Removing.`);
|
|
1861
|
-
rmSync(
|
|
1976
|
+
rmSync(path17);
|
|
1862
1977
|
throw new Error("TIMEOUT");
|
|
1863
1978
|
}
|
|
1864
|
-
return waitForDocs(json,
|
|
1979
|
+
return waitForDocs(json, path17, logger, retryDuration);
|
|
1865
1980
|
} catch (e) {
|
|
1866
1981
|
if (e.message !== "TIMEOUT") {
|
|
1867
|
-
logger.info(`Docs JSON not found at ${
|
|
1982
|
+
logger.info(`Docs JSON not found at ${path17}`);
|
|
1868
1983
|
}
|
|
1869
1984
|
logger.debug("Generating placeholder");
|
|
1870
|
-
generatePlaceholder(
|
|
1871
|
-
return generateDocs(specifier,
|
|
1985
|
+
generatePlaceholder(path17);
|
|
1986
|
+
return generateDocs(specifier, path17, docgen, logger).catch((e2) => {
|
|
1872
1987
|
logger.error("Error generating documentation");
|
|
1873
1988
|
logger.error(e2);
|
|
1874
1989
|
handleError2();
|
|
@@ -1878,7 +1993,7 @@ var docgenHandler = (options6, logger, docgen = actualDocGen, retryDuration = RE
|
|
|
1878
1993
|
var handler_default7 = docgenHandler;
|
|
1879
1994
|
|
|
1880
1995
|
// src/docs/handler.ts
|
|
1881
|
-
import { readFile as
|
|
1996
|
+
import { readFile as readFile5 } from "node:fs/promises";
|
|
1882
1997
|
import c from "chalk";
|
|
1883
1998
|
import { getNameAndVersion as getNameAndVersion5, getLatestVersion } from "@openfn/runtime";
|
|
1884
1999
|
var describeFn = (adaptorName, fn) => [
|
|
@@ -1907,8 +2022,8 @@ ${data.functions.map(
|
|
|
1907
2022
|
(fn) => ` ${c.yellow(fn.name)} (${fn.parameters.map((p) => p.name).join(", ")})`
|
|
1908
2023
|
).sort().join("\n")}
|
|
1909
2024
|
`;
|
|
1910
|
-
var docsHandler = async (
|
|
1911
|
-
const { adaptor, operation, repoDir } =
|
|
2025
|
+
var docsHandler = async (options7, logger) => {
|
|
2026
|
+
const { adaptor, operation, repoDir } = options7;
|
|
1912
2027
|
const adaptors = expand_adaptors_default([adaptor]);
|
|
1913
2028
|
const [adaptorName] = adaptors;
|
|
1914
2029
|
let { name, version } = getNameAndVersion5(adaptorName);
|
|
@@ -1919,7 +2034,7 @@ var docsHandler = async (options6, logger) => {
|
|
|
1919
2034
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
1920
2035
|
}
|
|
1921
2036
|
logger.info("Generating/loading documentation...");
|
|
1922
|
-
const
|
|
2037
|
+
const path17 = await handler_default7(
|
|
1923
2038
|
{
|
|
1924
2039
|
specifier: `${name}@${version}`,
|
|
1925
2040
|
repoDir
|
|
@@ -1928,8 +2043,8 @@ var docsHandler = async (options6, logger) => {
|
|
|
1928
2043
|
createNullLogger()
|
|
1929
2044
|
);
|
|
1930
2045
|
let didError = false;
|
|
1931
|
-
if (
|
|
1932
|
-
const source = await
|
|
2046
|
+
if (path17) {
|
|
2047
|
+
const source = await readFile5(path17, "utf8");
|
|
1933
2048
|
const data = JSON.parse(source);
|
|
1934
2049
|
let desc;
|
|
1935
2050
|
if (operation) {
|
|
@@ -1967,20 +2082,20 @@ var handler_default8 = docsHandler;
|
|
|
1967
2082
|
// src/metadata/cache.ts
|
|
1968
2083
|
import { getNameAndVersion as getNameAndVersion6 } from "@openfn/runtime";
|
|
1969
2084
|
import { createHash } from "node:crypto";
|
|
1970
|
-
import { mkdir as mkdir4, readFile as
|
|
1971
|
-
import
|
|
2085
|
+
import { mkdir as mkdir4, readFile as readFile6, writeFile as writeFile7, readdir, rm } from "node:fs/promises";
|
|
2086
|
+
import path10 from "node:path";
|
|
1972
2087
|
var UNSUPPORTED_FILE_NAME = "unsupported.json";
|
|
1973
2088
|
var getCachePath2 = (repoDir, key) => {
|
|
1974
|
-
const base =
|
|
2089
|
+
const base = path10.join(repoDir, "meta");
|
|
1975
2090
|
if (key) {
|
|
1976
|
-
return
|
|
2091
|
+
return path10.join(base, key.endsWith(".json") ? key : `${key}.json`);
|
|
1977
2092
|
}
|
|
1978
2093
|
return base;
|
|
1979
2094
|
};
|
|
1980
2095
|
var getCache = async (repoDir, key) => {
|
|
1981
2096
|
try {
|
|
1982
2097
|
const cachePath = getCachePath2(repoDir, key);
|
|
1983
|
-
const content = await
|
|
2098
|
+
const content = await readFile6(cachePath, "utf8");
|
|
1984
2099
|
return JSON.parse(content);
|
|
1985
2100
|
} catch (e) {
|
|
1986
2101
|
return null;
|
|
@@ -2007,7 +2122,7 @@ var generateKey = (config2, adaptor) => {
|
|
|
2007
2122
|
var get2 = async (repoPath, key) => {
|
|
2008
2123
|
const p = getCachePath2(repoPath, key);
|
|
2009
2124
|
try {
|
|
2010
|
-
const result = await
|
|
2125
|
+
const result = await readFile6(p, "utf8");
|
|
2011
2126
|
return JSON.parse(result);
|
|
2012
2127
|
} catch (e) {
|
|
2013
2128
|
return null;
|
|
@@ -2015,7 +2130,7 @@ var get2 = async (repoPath, key) => {
|
|
|
2015
2130
|
};
|
|
2016
2131
|
var set2 = async (repoPath, key, result) => {
|
|
2017
2132
|
const p = getCachePath2(repoPath, key);
|
|
2018
|
-
await mkdir4(
|
|
2133
|
+
await mkdir4(path10.dirname(p), { recursive: true });
|
|
2019
2134
|
await writeFile7(p, JSON.stringify(result));
|
|
2020
2135
|
};
|
|
2021
2136
|
var getUnsupportedCachePath = (repoDir) => {
|
|
@@ -2062,7 +2177,7 @@ var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
|
2062
2177
|
const cachePath = getUnsupportedCachePath(repoDir);
|
|
2063
2178
|
let cache = {};
|
|
2064
2179
|
try {
|
|
2065
|
-
const cacheContent = await
|
|
2180
|
+
const cacheContent = await readFile6(cachePath, "utf8");
|
|
2066
2181
|
cache = JSON.parse(cacheContent);
|
|
2067
2182
|
} catch (error) {
|
|
2068
2183
|
}
|
|
@@ -2074,7 +2189,7 @@ var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
|
2074
2189
|
majorMinor: parsed.majorMinor,
|
|
2075
2190
|
timestamp: Date.now()
|
|
2076
2191
|
};
|
|
2077
|
-
await mkdir4(
|
|
2192
|
+
await mkdir4(path10.dirname(cachePath), { recursive: true });
|
|
2078
2193
|
await writeFile7(cachePath, JSON.stringify(cache, null, 2));
|
|
2079
2194
|
}
|
|
2080
2195
|
};
|
|
@@ -2113,8 +2228,8 @@ var getAdaptorPath = async (adaptor, logger, repoDir) => {
|
|
|
2113
2228
|
return adaptorPath;
|
|
2114
2229
|
};
|
|
2115
2230
|
var shouldAutoinstall = (adaptor) => adaptor?.length > 0 && !adaptor.startsWith("/") && !adaptor.includes("=");
|
|
2116
|
-
var metadataHandler = async (
|
|
2117
|
-
const { repoDir, adaptors, keepUnsupported } =
|
|
2231
|
+
var metadataHandler = async (options7, logger) => {
|
|
2232
|
+
const { repoDir, adaptors, keepUnsupported } = options7;
|
|
2118
2233
|
let adaptor = adaptors[0];
|
|
2119
2234
|
if (await isAdaptorUnsupported(adaptor, repoDir)) {
|
|
2120
2235
|
logger.info(
|
|
@@ -2123,7 +2238,7 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2123
2238
|
logger.error("No metadata helper found");
|
|
2124
2239
|
process.exit(1);
|
|
2125
2240
|
}
|
|
2126
|
-
const state = await load_state_default({},
|
|
2241
|
+
const state = await load_state_default({}, options7, logger);
|
|
2127
2242
|
logger.success(`Generating metadata`);
|
|
2128
2243
|
logger.info("config:", state);
|
|
2129
2244
|
const config2 = state.configuration;
|
|
@@ -2136,7 +2251,7 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2136
2251
|
logger.print(getCachePath2(repoDir, id));
|
|
2137
2252
|
};
|
|
2138
2253
|
const id = generateKey(config2, adaptor);
|
|
2139
|
-
if (!
|
|
2254
|
+
if (!options7.force) {
|
|
2140
2255
|
logger.debug("config hash: ", id);
|
|
2141
2256
|
const cached = await get2(repoDir, id);
|
|
2142
2257
|
if (cached) {
|
|
@@ -2154,7 +2269,7 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2154
2269
|
wasAutoInstalled = true;
|
|
2155
2270
|
adaptor = autoinstallResult[0];
|
|
2156
2271
|
}
|
|
2157
|
-
const adaptorPath = await getAdaptorPath(adaptor, logger,
|
|
2272
|
+
const adaptorPath = await getAdaptorPath(adaptor, logger, options7.repoDir);
|
|
2158
2273
|
if (!adaptorPath) {
|
|
2159
2274
|
throw new Error(`Could not resolve adaptor path for ${adaptor}`);
|
|
2160
2275
|
}
|
|
@@ -2196,20 +2311,16 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2196
2311
|
var handler_default9 = metadataHandler;
|
|
2197
2312
|
|
|
2198
2313
|
// src/pull/handler.ts
|
|
2199
|
-
import
|
|
2314
|
+
import path14 from "path";
|
|
2200
2315
|
import fs5 from "node:fs/promises";
|
|
2201
2316
|
import {
|
|
2202
2317
|
getConfig as getConfig2,
|
|
2203
|
-
getProject
|
|
2318
|
+
getProject,
|
|
2204
2319
|
getSpec,
|
|
2205
2320
|
getStateFromProjectPayload,
|
|
2206
2321
|
syncRemoteSpec
|
|
2207
2322
|
} from "@openfn/deploy";
|
|
2208
2323
|
|
|
2209
|
-
// src/projects/fetch.ts
|
|
2210
|
-
import path10 from "node:path";
|
|
2211
|
-
import Project3, { Workspace } from "@openfn/project";
|
|
2212
|
-
|
|
2213
2324
|
// src/util/command-builders.ts
|
|
2214
2325
|
import c2 from "chalk";
|
|
2215
2326
|
var expandYargs = (y) => {
|
|
@@ -2219,13 +2330,17 @@ var expandYargs = (y) => {
|
|
|
2219
2330
|
return y;
|
|
2220
2331
|
};
|
|
2221
2332
|
function build(opts, yargs) {
|
|
2222
|
-
return opts.reduce(
|
|
2223
|
-
|
|
2224
|
-
|
|
2225
|
-
|
|
2333
|
+
return opts.reduce((_y, o) => {
|
|
2334
|
+
if (!o?.name) {
|
|
2335
|
+
console.error(`ERROR: INVALID COMMAND OPTION PASSED`, o);
|
|
2336
|
+
console.error("Check the options passed to the command builder");
|
|
2337
|
+
throw new Error("Invalid command");
|
|
2338
|
+
}
|
|
2339
|
+
return yargs.option(o.name, expandYargs(o.yargs));
|
|
2340
|
+
}, yargs);
|
|
2226
2341
|
}
|
|
2227
|
-
var ensure = (
|
|
2228
|
-
yargs.command =
|
|
2342
|
+
var ensure = (command7, opts) => (yargs) => {
|
|
2343
|
+
yargs.command = command7;
|
|
2229
2344
|
opts.filter((opt) => opt.ensure).forEach((opt) => {
|
|
2230
2345
|
try {
|
|
2231
2346
|
opt.ensure(yargs);
|
|
@@ -2233,7 +2348,7 @@ var ensure = (command6, opts) => (yargs) => {
|
|
|
2233
2348
|
console.log(e);
|
|
2234
2349
|
console.error(
|
|
2235
2350
|
c2.red(`
|
|
2236
|
-
Error parsing command arguments: ${
|
|
2351
|
+
Error parsing command arguments: ${command7}.${opt.name}
|
|
2237
2352
|
`)
|
|
2238
2353
|
);
|
|
2239
2354
|
console.error(c2.red("Aborting"));
|
|
@@ -2242,18 +2357,19 @@ Error parsing command arguments: ${command6}.${opt.name}
|
|
|
2242
2357
|
}
|
|
2243
2358
|
});
|
|
2244
2359
|
};
|
|
2245
|
-
var override = (
|
|
2360
|
+
var override = (command7, yargs) => {
|
|
2246
2361
|
return {
|
|
2247
|
-
...
|
|
2362
|
+
...command7,
|
|
2248
2363
|
yargs: {
|
|
2249
|
-
...
|
|
2364
|
+
...command7.yargs || {},
|
|
2250
2365
|
...yargs
|
|
2251
2366
|
}
|
|
2252
2367
|
};
|
|
2253
2368
|
};
|
|
2254
2369
|
|
|
2255
|
-
// src/
|
|
2256
|
-
import
|
|
2370
|
+
// src/projects/fetch.ts
|
|
2371
|
+
import path12 from "node:path";
|
|
2372
|
+
import Project3, { Workspace } from "@openfn/project";
|
|
2257
2373
|
|
|
2258
2374
|
// src/util/ensure-log-opts.ts
|
|
2259
2375
|
var defaultLoggerOptions = {
|
|
@@ -2312,49 +2428,17 @@ var ensureLogOpts = (opts) => {
|
|
|
2312
2428
|
};
|
|
2313
2429
|
var ensure_log_opts_default = ensureLogOpts;
|
|
2314
2430
|
|
|
2315
|
-
// src/util/get-cli-option-object.ts
|
|
2316
|
-
function getCLIOptionObject(arg) {
|
|
2317
|
-
if (isObject(arg)) {
|
|
2318
|
-
return arg;
|
|
2319
|
-
} else if (typeof arg === "string") {
|
|
2320
|
-
try {
|
|
2321
|
-
const p = JSON.parse(arg);
|
|
2322
|
-
if (isObject(p))
|
|
2323
|
-
return p;
|
|
2324
|
-
} catch (e) {
|
|
2325
|
-
}
|
|
2326
|
-
return Object.fromEntries(
|
|
2327
|
-
arg.split(",").map((pair) => {
|
|
2328
|
-
const [k, v] = pair.split("=");
|
|
2329
|
-
return [k.trim(), v.trim()];
|
|
2330
|
-
})
|
|
2331
|
-
);
|
|
2332
|
-
}
|
|
2333
|
-
}
|
|
2334
|
-
function isObject(arg) {
|
|
2335
|
-
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
2336
|
-
}
|
|
2337
|
-
|
|
2338
2431
|
// src/options.ts
|
|
2339
|
-
var
|
|
2340
|
-
const v = opts[key];
|
|
2341
|
-
if (isNaN(v) && !v) {
|
|
2342
|
-
opts[key] = value;
|
|
2343
|
-
}
|
|
2344
|
-
};
|
|
2345
|
-
var apikey = {
|
|
2432
|
+
var apiKey = {
|
|
2346
2433
|
name: "apikey",
|
|
2347
2434
|
yargs: {
|
|
2348
|
-
alias: ["
|
|
2349
|
-
description: "
|
|
2350
|
-
}
|
|
2351
|
-
|
|
2352
|
-
|
|
2353
|
-
|
|
2354
|
-
|
|
2355
|
-
alias: ["c", "config-path"],
|
|
2356
|
-
description: "The location of your config file",
|
|
2357
|
-
default: "./.config.json"
|
|
2435
|
+
alias: ["pat", "token", "api-key"],
|
|
2436
|
+
description: "API Key, Personal Access Token (PAT), or other access token from Lightning"
|
|
2437
|
+
},
|
|
2438
|
+
ensure: (opts) => {
|
|
2439
|
+
if (!opts.apikey) {
|
|
2440
|
+
opts.apiKey = process.env.OPENFN_API_KEY;
|
|
2441
|
+
}
|
|
2358
2442
|
}
|
|
2359
2443
|
};
|
|
2360
2444
|
var endpoint = {
|
|
@@ -2364,12 +2448,6 @@ var endpoint = {
|
|
|
2364
2448
|
description: "[beta only] URL to Lightning endpoint"
|
|
2365
2449
|
}
|
|
2366
2450
|
};
|
|
2367
|
-
var env = {
|
|
2368
|
-
name: "env",
|
|
2369
|
-
yargs: {
|
|
2370
|
-
description: "[beta only] Environment name (eg staging, prod, branch)"
|
|
2371
|
-
}
|
|
2372
|
-
};
|
|
2373
2451
|
var force = {
|
|
2374
2452
|
name: "force",
|
|
2375
2453
|
yargs: {
|
|
@@ -2379,23 +2457,6 @@ var force = {
|
|
|
2379
2457
|
default: false
|
|
2380
2458
|
}
|
|
2381
2459
|
};
|
|
2382
|
-
var getBaseDir = (opts) => {
|
|
2383
|
-
const basePath = opts.path ?? ".";
|
|
2384
|
-
if (/\.(jso?n?|ya?ml)$/.test(basePath)) {
|
|
2385
|
-
return nodePath.dirname(basePath);
|
|
2386
|
-
}
|
|
2387
|
-
return basePath;
|
|
2388
|
-
};
|
|
2389
|
-
var projectId = {
|
|
2390
|
-
name: "project-id",
|
|
2391
|
-
yargs: {
|
|
2392
|
-
description: "The id or UUID of an openfn project",
|
|
2393
|
-
string: true
|
|
2394
|
-
},
|
|
2395
|
-
ensure: (opts) => {
|
|
2396
|
-
return opts.projectName;
|
|
2397
|
-
}
|
|
2398
|
-
};
|
|
2399
2460
|
var log = {
|
|
2400
2461
|
name: "log",
|
|
2401
2462
|
yargs: {
|
|
@@ -2414,26 +2475,18 @@ var logJson = {
|
|
|
2414
2475
|
boolean: true
|
|
2415
2476
|
}
|
|
2416
2477
|
};
|
|
2417
|
-
var
|
|
2418
|
-
name: "
|
|
2478
|
+
var projectPath = {
|
|
2479
|
+
name: "project-path",
|
|
2419
2480
|
yargs: {
|
|
2420
|
-
|
|
2421
|
-
|
|
2422
|
-
|
|
2423
|
-
|
|
2424
|
-
|
|
2425
|
-
|
|
2426
|
-
|
|
2427
|
-
|
|
2428
|
-
|
|
2429
|
-
if (opts.outputStdout) {
|
|
2430
|
-
delete opts.outputPath;
|
|
2431
|
-
} else {
|
|
2432
|
-
const base = getBaseDir(opts);
|
|
2433
|
-
setDefaultValue(opts, "outputPath", nodePath.join(base, "output.json"));
|
|
2434
|
-
}
|
|
2435
|
-
}
|
|
2436
|
-
delete opts.o;
|
|
2481
|
+
string: true,
|
|
2482
|
+
alias: ["p"],
|
|
2483
|
+
description: "The location of your project.yaml file"
|
|
2484
|
+
}
|
|
2485
|
+
};
|
|
2486
|
+
var path11 = {
|
|
2487
|
+
name: "path",
|
|
2488
|
+
yargs: {
|
|
2489
|
+
description: "Path"
|
|
2437
2490
|
}
|
|
2438
2491
|
};
|
|
2439
2492
|
var snapshots = {
|
|
@@ -2443,16 +2496,6 @@ var snapshots = {
|
|
|
2443
2496
|
array: true
|
|
2444
2497
|
}
|
|
2445
2498
|
};
|
|
2446
|
-
var statePath = {
|
|
2447
|
-
name: "state-path",
|
|
2448
|
-
yargs: {
|
|
2449
|
-
alias: ["s"],
|
|
2450
|
-
description: "Path to the state file"
|
|
2451
|
-
},
|
|
2452
|
-
ensure: (opts) => {
|
|
2453
|
-
delete opts.s;
|
|
2454
|
-
}
|
|
2455
|
-
};
|
|
2456
2499
|
var timeout = {
|
|
2457
2500
|
name: "timeout",
|
|
2458
2501
|
yargs: {
|
|
@@ -2469,6 +2512,44 @@ var workflow = {
|
|
|
2469
2512
|
description: "Name of the workflow to execute"
|
|
2470
2513
|
}
|
|
2471
2514
|
};
|
|
2515
|
+
|
|
2516
|
+
// src/util/get-cli-option-object.ts
|
|
2517
|
+
function getCLIOptionObject(arg) {
|
|
2518
|
+
if (isObject(arg)) {
|
|
2519
|
+
return arg;
|
|
2520
|
+
} else if (typeof arg === "string") {
|
|
2521
|
+
try {
|
|
2522
|
+
const p = JSON.parse(arg);
|
|
2523
|
+
if (isObject(p))
|
|
2524
|
+
return p;
|
|
2525
|
+
} catch (e) {
|
|
2526
|
+
}
|
|
2527
|
+
return Object.fromEntries(
|
|
2528
|
+
arg.split(",").map((pair) => {
|
|
2529
|
+
const [k, v] = pair.split("=");
|
|
2530
|
+
return [k.trim(), v.trim()];
|
|
2531
|
+
})
|
|
2532
|
+
);
|
|
2533
|
+
}
|
|
2534
|
+
}
|
|
2535
|
+
function isObject(arg) {
|
|
2536
|
+
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
2537
|
+
}
|
|
2538
|
+
|
|
2539
|
+
// src/projects/options.ts
|
|
2540
|
+
var env = {
|
|
2541
|
+
name: "env",
|
|
2542
|
+
yargs: {
|
|
2543
|
+
description: "Environment name (eg staging, prod, branch)",
|
|
2544
|
+
hidden: true
|
|
2545
|
+
}
|
|
2546
|
+
};
|
|
2547
|
+
var alias = {
|
|
2548
|
+
name: "alias",
|
|
2549
|
+
yargs: {
|
|
2550
|
+
description: "Environment name (eg staging, prod, branch)"
|
|
2551
|
+
}
|
|
2552
|
+
};
|
|
2472
2553
|
var removeUnmapped = {
|
|
2473
2554
|
name: "remove-unmapped",
|
|
2474
2555
|
yargs: {
|
|
@@ -2484,6 +2565,14 @@ var workflowMappings = {
|
|
|
2484
2565
|
description: "A manual object mapping of which workflows in source and target should be matched for a merge."
|
|
2485
2566
|
}
|
|
2486
2567
|
};
|
|
2568
|
+
var outputPath = {
|
|
2569
|
+
name: "output-path",
|
|
2570
|
+
yargs: {
|
|
2571
|
+
alias: ["output"],
|
|
2572
|
+
type: "string",
|
|
2573
|
+
description: "Path to output the fetched project to"
|
|
2574
|
+
}
|
|
2575
|
+
};
|
|
2487
2576
|
var workspace = {
|
|
2488
2577
|
name: "workspace",
|
|
2489
2578
|
yargs: {
|
|
@@ -2495,129 +2584,228 @@ var workspace = {
|
|
|
2495
2584
|
if (!ws) {
|
|
2496
2585
|
opts.workspace = process.cwd();
|
|
2497
2586
|
} else {
|
|
2498
|
-
opts.workspace =
|
|
2587
|
+
opts.workspace = resolve_path_default(ws);
|
|
2499
2588
|
}
|
|
2500
2589
|
}
|
|
2501
2590
|
};
|
|
2502
2591
|
|
|
2503
2592
|
// src/projects/fetch.ts
|
|
2504
2593
|
var options = [
|
|
2505
|
-
|
|
2506
|
-
|
|
2594
|
+
alias,
|
|
2595
|
+
apiKey,
|
|
2507
2596
|
endpoint,
|
|
2508
|
-
env,
|
|
2509
2597
|
log,
|
|
2510
|
-
override(outputPath, {
|
|
2511
|
-
description: "Path to output the fetched project to"
|
|
2512
|
-
}),
|
|
2513
2598
|
logJson,
|
|
2514
|
-
workspace,
|
|
2515
2599
|
snapshots,
|
|
2516
|
-
|
|
2600
|
+
// TODO need to add support for this
|
|
2517
2601
|
override(force, {
|
|
2518
2602
|
description: "Overwrite local file contents with the fetched contents"
|
|
2519
|
-
})
|
|
2603
|
+
}),
|
|
2604
|
+
outputPath,
|
|
2605
|
+
env,
|
|
2606
|
+
workspace
|
|
2520
2607
|
];
|
|
2521
2608
|
var command = {
|
|
2522
|
-
command: "fetch [
|
|
2523
|
-
describe: `
|
|
2524
|
-
builder: (yargs) => build(options, yargs).positional("
|
|
2525
|
-
describe: "The id of the project
|
|
2526
|
-
demandOption: true
|
|
2609
|
+
command: "fetch [project]",
|
|
2610
|
+
describe: `Download the latest version of a project from a lightning server (does not expand the project, use checkout)`,
|
|
2611
|
+
builder: (yargs) => build(options, yargs).positional("project", {
|
|
2612
|
+
describe: "The id, alias or UUID of the project to fetch. If not set, will default to the active project"
|
|
2527
2613
|
}).example(
|
|
2528
2614
|
"fetch 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2529
2615
|
"Fetch an updated copy of a the above spec and state from a Lightning Instance"
|
|
2530
2616
|
),
|
|
2531
2617
|
handler: ensure("project-fetch", options)
|
|
2532
2618
|
};
|
|
2533
|
-
var
|
|
2534
|
-
|
|
2535
|
-
const
|
|
2536
|
-
|
|
2537
|
-
const
|
|
2538
|
-
const {
|
|
2539
|
-
const
|
|
2540
|
-
|
|
2541
|
-
|
|
2542
|
-
|
|
2543
|
-
endpoint: config2.endpoint,
|
|
2544
|
-
env: options6.env || "project"
|
|
2545
|
-
},
|
|
2546
|
-
workspace2.getConfig()
|
|
2619
|
+
var printProjectName = (project) => `${project.qname} (${project.id})`;
|
|
2620
|
+
var handler2 = async (options7, logger) => {
|
|
2621
|
+
const workspacePath = options7.workspace ?? process.cwd();
|
|
2622
|
+
logger.debug("Using workspace at", workspacePath);
|
|
2623
|
+
const workspace2 = new Workspace(workspacePath, logger, false);
|
|
2624
|
+
const { outputPath: outputPath2 } = options7;
|
|
2625
|
+
const localTargetProject = await resolveOutputProject(
|
|
2626
|
+
workspace2,
|
|
2627
|
+
options7,
|
|
2628
|
+
logger
|
|
2547
2629
|
);
|
|
2548
|
-
const
|
|
2549
|
-
|
|
2550
|
-
const
|
|
2551
|
-
const
|
|
2630
|
+
const remoteProject = await fetchRemoteProject(workspace2, options7, logger);
|
|
2631
|
+
ensureTargetCompatible(options7, remoteProject, localTargetProject);
|
|
2632
|
+
const outputRoot = resolve_path_default(outputPath2 || workspacePath);
|
|
2633
|
+
const projectsDir = remoteProject?.config.dirs.projects ?? ".projects";
|
|
2634
|
+
const finalOutputPath = outputPath2 ?? `${outputRoot}/${projectsDir}/${remoteProject.qname}`;
|
|
2552
2635
|
let format = void 0;
|
|
2553
2636
|
if (outputPath2) {
|
|
2554
|
-
const ext =
|
|
2637
|
+
const ext = path12.extname(outputPath2).substring(1);
|
|
2555
2638
|
if (ext.length) {
|
|
2556
2639
|
format = ext;
|
|
2557
2640
|
}
|
|
2641
|
+
if (options7.alias) {
|
|
2642
|
+
logger.warn(
|
|
2643
|
+
`WARNING: alias "${options7.alias}" was set, but will be ignored as output path was provided`
|
|
2644
|
+
);
|
|
2645
|
+
}
|
|
2558
2646
|
}
|
|
2559
|
-
|
|
2560
|
-
|
|
2561
|
-
finalOutputPath
|
|
2562
|
-
format,
|
|
2563
|
-
true
|
|
2564
|
-
// dry run - this won't trigger an actual write!
|
|
2647
|
+
await serialize(remoteProject, finalOutputPath, format);
|
|
2648
|
+
logger.success(
|
|
2649
|
+
`Fetched project file to ${finalOutputPath}.${format ?? "yaml"}`
|
|
2565
2650
|
);
|
|
2566
|
-
|
|
2567
|
-
|
|
2568
|
-
|
|
2569
|
-
|
|
2651
|
+
return remoteProject;
|
|
2652
|
+
};
|
|
2653
|
+
async function resolveOutputProject(workspace2, options7, logger) {
|
|
2654
|
+
logger.debug("Checking for local copy of project...");
|
|
2655
|
+
if (options7.outputPath) {
|
|
2656
|
+
try {
|
|
2657
|
+
const customProject = await Project3.from("path", options7.outputPath);
|
|
2658
|
+
logger.debug(
|
|
2659
|
+
`Found existing local project ${printProjectName(customProject)} at`,
|
|
2660
|
+
options7.outputPath
|
|
2661
|
+
);
|
|
2662
|
+
return customProject;
|
|
2663
|
+
} catch (e) {
|
|
2664
|
+
logger.debug("No project found at", options7.outputPath);
|
|
2665
|
+
}
|
|
2666
|
+
}
|
|
2667
|
+
if (options7.alias) {
|
|
2668
|
+
const aliasProject = workspace2.get(options7.alias);
|
|
2669
|
+
if (aliasProject) {
|
|
2670
|
+
logger.debug(
|
|
2671
|
+
`Found local project from alias:`,
|
|
2672
|
+
printProjectName(aliasProject)
|
|
2673
|
+
);
|
|
2674
|
+
return aliasProject;
|
|
2675
|
+
} else {
|
|
2676
|
+
logger.debug(`No local project found with alias ${options7.alias}`);
|
|
2677
|
+
}
|
|
2570
2678
|
}
|
|
2571
|
-
const
|
|
2572
|
-
|
|
2679
|
+
const project = workspace2.get(options7.project);
|
|
2680
|
+
if (project) {
|
|
2681
|
+
logger.debug(
|
|
2682
|
+
`Found local project from identifier:`,
|
|
2683
|
+
printProjectName(project)
|
|
2684
|
+
);
|
|
2685
|
+
return project;
|
|
2686
|
+
} else {
|
|
2687
|
+
logger.debug(
|
|
2688
|
+
`No local project found matching identifier: `,
|
|
2689
|
+
options7.project
|
|
2690
|
+
);
|
|
2691
|
+
}
|
|
2692
|
+
}
|
|
2693
|
+
async function fetchRemoteProject(workspace2, options7, logger) {
|
|
2694
|
+
logger.debug(`Fetching latest project data from app`);
|
|
2695
|
+
const config2 = loadAppAuthConfig(options7, logger);
|
|
2696
|
+
let projectUUID = options7.project;
|
|
2697
|
+
const localProject = workspace2.get(options7.project);
|
|
2698
|
+
if (localProject?.openfn?.uuid && localProject.openfn.uuid !== options7.project) {
|
|
2699
|
+
projectUUID = localProject.openfn.uuid;
|
|
2700
|
+
logger.debug(
|
|
2701
|
+
`Resolved ${options7.project} to UUID ${projectUUID} from local project ${printProjectName(
|
|
2702
|
+
localProject
|
|
2703
|
+
)}}`
|
|
2704
|
+
);
|
|
2705
|
+
}
|
|
2706
|
+
const projectEndpoint = localProject?.openfn?.endpoint ?? config2.endpoint;
|
|
2707
|
+
const { data } = await fetchProject(
|
|
2708
|
+
projectEndpoint,
|
|
2709
|
+
config2.apiKey,
|
|
2710
|
+
projectUUID,
|
|
2711
|
+
logger
|
|
2712
|
+
);
|
|
2713
|
+
const project = await Project3.from(
|
|
2714
|
+
"state",
|
|
2715
|
+
data,
|
|
2716
|
+
{
|
|
2717
|
+
endpoint: projectEndpoint
|
|
2718
|
+
},
|
|
2719
|
+
{
|
|
2720
|
+
...workspace2.getConfig(),
|
|
2721
|
+
alias: options7.alias ?? localProject?.alias ?? "main"
|
|
2722
|
+
}
|
|
2723
|
+
);
|
|
2724
|
+
logger.debug(
|
|
2725
|
+
`Loaded remote project ${project.openfn.uuid} with id ${project.id} and alias ${project.alias}`
|
|
2573
2726
|
);
|
|
2574
|
-
const skipVersionCheck = options6.force || // The user forced the checkout
|
|
2575
|
-
!current || // there is no project on disk
|
|
2576
|
-
!hasAnyHistory;
|
|
2577
|
-
if (!skipVersionCheck && !project.canMergeInto(current)) {
|
|
2578
|
-
throw new Error("Error! An incompatible project exists at this location");
|
|
2579
|
-
}
|
|
2580
|
-
await serialize(project, finalOutputPath, format);
|
|
2581
|
-
logger.success(`Fetched project file to ${finalOutput}`);
|
|
2582
2727
|
return project;
|
|
2583
|
-
}
|
|
2728
|
+
}
|
|
2729
|
+
function ensureTargetCompatible(options7, remoteProject, localProject) {
|
|
2730
|
+
if (localProject) {
|
|
2731
|
+
if (!options7.force && localProject.uuid != remoteProject.uuid) {
|
|
2732
|
+
const error = new Error("PROJECT_EXISTS");
|
|
2733
|
+
error.message = "A project with a different UUID exists at this location";
|
|
2734
|
+
error.fix = `You have tried to fetch a remote project into a local project with a different UUID
|
|
2735
|
+
|
|
2736
|
+
Try adding an alias to rename the new project:
|
|
2737
|
+
|
|
2738
|
+
openfn fetch ${options7.project} --alias ${remoteProject.id}
|
|
2739
|
+
|
|
2740
|
+
To ignore this error and override the local file, pass --force (-f)
|
|
2741
|
+
|
|
2742
|
+
openfn fetch ${options7.project} --force
|
|
2743
|
+
`;
|
|
2744
|
+
error.fetched_project = {
|
|
2745
|
+
uuid: remoteProject.uuid,
|
|
2746
|
+
id: remoteProject.id,
|
|
2747
|
+
alias: remoteProject.alias
|
|
2748
|
+
};
|
|
2749
|
+
error.local_project = {
|
|
2750
|
+
uuid: localProject.uuid,
|
|
2751
|
+
id: localProject.id,
|
|
2752
|
+
alias: localProject.alias
|
|
2753
|
+
};
|
|
2754
|
+
delete error.stack;
|
|
2755
|
+
throw error;
|
|
2756
|
+
}
|
|
2757
|
+
const hasAnyHistory = remoteProject.workflows.find(
|
|
2758
|
+
(w) => w.workflow.history?.length
|
|
2759
|
+
);
|
|
2760
|
+
const skipVersionCheck = options7.force || // The user forced the checkout
|
|
2761
|
+
!hasAnyHistory;
|
|
2762
|
+
if (!skipVersionCheck && !remoteProject.canMergeInto(localProject)) {
|
|
2763
|
+
throw new Error("Error! An incompatible project exists at this location");
|
|
2764
|
+
}
|
|
2765
|
+
}
|
|
2766
|
+
}
|
|
2584
2767
|
|
|
2585
2768
|
// src/projects/checkout.ts
|
|
2586
2769
|
import Project4, { Workspace as Workspace2 } from "@openfn/project";
|
|
2587
|
-
import
|
|
2770
|
+
import path13 from "path";
|
|
2588
2771
|
import fs4 from "fs";
|
|
2589
2772
|
import { rimraf } from "rimraf";
|
|
2590
|
-
var options2 = [
|
|
2773
|
+
var options2 = [log, workspace];
|
|
2591
2774
|
var command2 = {
|
|
2592
|
-
command: "checkout <project
|
|
2775
|
+
command: "checkout <project>",
|
|
2593
2776
|
describe: "Switch to a different OpenFn project in the same workspace",
|
|
2594
2777
|
handler: ensure("project-checkout", options2),
|
|
2595
|
-
builder: (yargs) => build(options2, yargs)
|
|
2778
|
+
builder: (yargs) => build(options2, yargs).positional("project", {
|
|
2779
|
+
describe: "The id, alias or UUID of the project to chcekout",
|
|
2780
|
+
demandOption: true
|
|
2781
|
+
})
|
|
2596
2782
|
};
|
|
2597
|
-
var handler3 = async (
|
|
2598
|
-
const
|
|
2599
|
-
const workspacePath =
|
|
2783
|
+
var handler3 = async (options7, logger) => {
|
|
2784
|
+
const projectIdentifier = options7.project;
|
|
2785
|
+
const workspacePath = options7.workspace ?? process.cwd();
|
|
2600
2786
|
const workspace2 = new Workspace2(workspacePath, logger);
|
|
2601
2787
|
const { project: _, ...config2 } = workspace2.getConfig();
|
|
2602
2788
|
let switchProject;
|
|
2603
|
-
if (/\.(yaml|json)$/.test(
|
|
2604
|
-
const filePath =
|
|
2789
|
+
if (/\.(yaml|json)$/.test(projectIdentifier)) {
|
|
2790
|
+
const filePath = projectIdentifier.startsWith("/") ? projectIdentifier : path13.join(workspacePath, projectIdentifier);
|
|
2605
2791
|
logger.debug("Loading project from path ", filePath);
|
|
2606
2792
|
switchProject = await Project4.from("path", filePath, config2);
|
|
2607
2793
|
} else {
|
|
2608
|
-
switchProject = workspace2.get(
|
|
2794
|
+
switchProject = workspace2.get(projectIdentifier);
|
|
2609
2795
|
}
|
|
2610
2796
|
if (!switchProject) {
|
|
2611
|
-
throw new Error(
|
|
2797
|
+
throw new Error(
|
|
2798
|
+
`Project with id ${projectIdentifier} not found in the workspace`
|
|
2799
|
+
);
|
|
2612
2800
|
}
|
|
2613
|
-
await rimraf(
|
|
2801
|
+
await rimraf(path13.join(workspacePath, config2.workflowRoot ?? "workflows"));
|
|
2614
2802
|
const files = switchProject.serialize("fs");
|
|
2615
2803
|
for (const f in files) {
|
|
2616
2804
|
if (files[f]) {
|
|
2617
|
-
fs4.mkdirSync(
|
|
2805
|
+
fs4.mkdirSync(path13.join(workspacePath, path13.dirname(f)), {
|
|
2618
2806
|
recursive: true
|
|
2619
2807
|
});
|
|
2620
|
-
fs4.writeFileSync(
|
|
2808
|
+
fs4.writeFileSync(path13.join(workspacePath, f), files[f]);
|
|
2621
2809
|
} else {
|
|
2622
2810
|
logger.warn("WARNING! No content for file", f);
|
|
2623
2811
|
}
|
|
@@ -2626,27 +2814,48 @@ var handler3 = async (options6, logger) => {
|
|
|
2626
2814
|
};
|
|
2627
2815
|
|
|
2628
2816
|
// src/projects/pull.ts
|
|
2629
|
-
|
|
2630
|
-
|
|
2817
|
+
var options3 = [
|
|
2818
|
+
alias,
|
|
2819
|
+
env,
|
|
2820
|
+
workspace,
|
|
2821
|
+
apiKey,
|
|
2822
|
+
endpoint,
|
|
2823
|
+
log,
|
|
2824
|
+
override(path11, {
|
|
2825
|
+
description: "path to output the project to"
|
|
2826
|
+
}),
|
|
2827
|
+
logJson,
|
|
2828
|
+
projectPath,
|
|
2829
|
+
snapshots,
|
|
2830
|
+
path11,
|
|
2831
|
+
force
|
|
2832
|
+
];
|
|
2833
|
+
var command3 = {
|
|
2834
|
+
command: "pull [project]",
|
|
2835
|
+
describe: `Pull a project from a Lightning Instance and expand to the file system (ie fetch + checkout)`,
|
|
2836
|
+
builder: (yargs) => build(options3, yargs).positional("project", {
|
|
2837
|
+
describe: "The UUID, local id or local alias of the project to pull"
|
|
2838
|
+
}).example(
|
|
2839
|
+
"pull 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2840
|
+
"Pull project with a UUID from a lightning instance"
|
|
2841
|
+
),
|
|
2842
|
+
handler: ensure("project-pull", options3)
|
|
2843
|
+
};
|
|
2844
|
+
async function handler4(options7, logger) {
|
|
2845
|
+
await handler2(options7, logger);
|
|
2631
2846
|
logger.success(`Downloaded latest project version`);
|
|
2632
|
-
await handler3(
|
|
2633
|
-
{
|
|
2634
|
-
...options6,
|
|
2635
|
-
projectId: project.id
|
|
2636
|
-
},
|
|
2637
|
-
logger
|
|
2638
|
-
);
|
|
2847
|
+
await handler3(options7, logger);
|
|
2639
2848
|
logger.success(`Checked out project locally`);
|
|
2640
2849
|
}
|
|
2641
2850
|
var pull_default = handler4;
|
|
2642
2851
|
|
|
2643
2852
|
// src/pull/handler.ts
|
|
2644
|
-
async function pullHandler(
|
|
2645
|
-
if (
|
|
2646
|
-
return pull_default(
|
|
2853
|
+
async function pullHandler(options7, logger) {
|
|
2854
|
+
if (options7.beta) {
|
|
2855
|
+
return pull_default(options7, logger);
|
|
2647
2856
|
}
|
|
2648
2857
|
try {
|
|
2649
|
-
const config2 = mergeOverrides2(await getConfig2(
|
|
2858
|
+
const config2 = mergeOverrides2(await getConfig2(options7.configPath), options7);
|
|
2650
2859
|
if (process.env["OPENFN_API_KEY"]) {
|
|
2651
2860
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
2652
2861
|
config2.apiKey = process.env["OPENFN_API_KEY"];
|
|
@@ -2658,10 +2867,10 @@ async function pullHandler(options6, logger) {
|
|
|
2658
2867
|
logger.always(
|
|
2659
2868
|
"Downloading existing project state (as JSON) from the server."
|
|
2660
2869
|
);
|
|
2661
|
-
const { data: project } = await
|
|
2870
|
+
const { data: project } = await getProject(
|
|
2662
2871
|
config2,
|
|
2663
|
-
|
|
2664
|
-
|
|
2872
|
+
options7.projectId,
|
|
2873
|
+
options7.snapshots
|
|
2665
2874
|
);
|
|
2666
2875
|
if (!project) {
|
|
2667
2876
|
logger.error("ERROR: Project not found.");
|
|
@@ -2674,8 +2883,8 @@ async function pullHandler(options6, logger) {
|
|
|
2674
2883
|
const state = getStateFromProjectPayload(project);
|
|
2675
2884
|
logger.always("Downloading the project spec (as YAML) from the server.");
|
|
2676
2885
|
const queryParams = new URLSearchParams();
|
|
2677
|
-
queryParams.append("id",
|
|
2678
|
-
|
|
2886
|
+
queryParams.append("id", options7.projectId);
|
|
2887
|
+
options7.snapshots?.forEach(
|
|
2679
2888
|
(snapshot) => queryParams.append("snapshots[]", snapshot)
|
|
2680
2889
|
);
|
|
2681
2890
|
const url2 = new URL(
|
|
@@ -2697,7 +2906,7 @@ async function pullHandler(options6, logger) {
|
|
|
2697
2906
|
process.exitCode = 1;
|
|
2698
2907
|
process.exit(1);
|
|
2699
2908
|
}
|
|
2700
|
-
const resolvedPath =
|
|
2909
|
+
const resolvedPath = path14.resolve(config2.specPath);
|
|
2701
2910
|
logger.debug("reading spec from", resolvedPath);
|
|
2702
2911
|
const updatedSpec = await syncRemoteSpec(
|
|
2703
2912
|
await res.text(),
|
|
@@ -2706,7 +2915,7 @@ async function pullHandler(options6, logger) {
|
|
|
2706
2915
|
logger
|
|
2707
2916
|
);
|
|
2708
2917
|
await fs5.writeFile(
|
|
2709
|
-
|
|
2918
|
+
path14.resolve(config2.statePath),
|
|
2710
2919
|
JSON.stringify(state, null, 2)
|
|
2711
2920
|
);
|
|
2712
2921
|
await fs5.writeFile(resolvedPath, updatedSpec);
|
|
@@ -2724,13 +2933,13 @@ async function pullHandler(options6, logger) {
|
|
|
2724
2933
|
throw error;
|
|
2725
2934
|
}
|
|
2726
2935
|
}
|
|
2727
|
-
function mergeOverrides2(config2,
|
|
2936
|
+
function mergeOverrides2(config2, options7) {
|
|
2728
2937
|
return {
|
|
2729
2938
|
...config2,
|
|
2730
2939
|
apiKey: pickFirst2(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
2731
2940
|
endpoint: pickFirst2(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
2732
|
-
configPath:
|
|
2733
|
-
requireConfirmation: pickFirst2(
|
|
2941
|
+
configPath: options7.configPath,
|
|
2942
|
+
requireConfirmation: pickFirst2(options7.confirm, config2.requireConfirmation)
|
|
2734
2943
|
};
|
|
2735
2944
|
}
|
|
2736
2945
|
function pickFirst2(...args) {
|
|
@@ -2745,35 +2954,36 @@ __export(handler_exports, {
|
|
|
2745
2954
|
fetch: () => handler2,
|
|
2746
2955
|
list: () => handler5,
|
|
2747
2956
|
merge: () => handler7,
|
|
2957
|
+
pull: () => handler4,
|
|
2748
2958
|
version: () => handler6
|
|
2749
2959
|
});
|
|
2750
2960
|
|
|
2751
2961
|
// src/projects/list.ts
|
|
2752
2962
|
import { Workspace as Workspace3 } from "@openfn/project";
|
|
2753
|
-
var
|
|
2754
|
-
var
|
|
2963
|
+
var options4 = [log, workspace];
|
|
2964
|
+
var command4 = {
|
|
2755
2965
|
command: "list [project-path]",
|
|
2756
2966
|
describe: "List all the openfn projects available in the current directory",
|
|
2757
2967
|
aliases: ["project", "$0"],
|
|
2758
|
-
handler: ensure("project-list",
|
|
2759
|
-
builder: (yargs) => build(
|
|
2968
|
+
handler: ensure("project-list", options4),
|
|
2969
|
+
builder: (yargs) => build(options4, yargs)
|
|
2760
2970
|
};
|
|
2761
|
-
var handler5 = async (
|
|
2971
|
+
var handler5 = async (options7, logger) => {
|
|
2762
2972
|
logger.info("Searching for projects in workspace at:");
|
|
2763
|
-
logger.info(" ",
|
|
2973
|
+
logger.info(" ", options7.workspace);
|
|
2764
2974
|
logger.break();
|
|
2765
|
-
const workspace2 = new Workspace3(
|
|
2975
|
+
const workspace2 = new Workspace3(options7.workspace);
|
|
2766
2976
|
if (!workspace2.valid) {
|
|
2767
2977
|
throw new Error("No OpenFn projects found");
|
|
2768
2978
|
}
|
|
2769
2979
|
logger.always(`Available openfn projects
|
|
2770
2980
|
|
|
2771
|
-
${workspace2.list().map((p) => describeProject(p, p
|
|
2981
|
+
${workspace2.list().map((p) => describeProject(p, p === workspace2.getActiveProject())).join("\n\n")}
|
|
2772
2982
|
`);
|
|
2773
2983
|
};
|
|
2774
2984
|
function describeProject(project, active = false) {
|
|
2775
2985
|
const uuid = project.openfn?.uuid;
|
|
2776
|
-
return `${project.id} ${active ? "(active)" : ""}
|
|
2986
|
+
return `${project.alias || "(no alias)"} | ${project.id} ${active ? "(active)" : ""}
|
|
2777
2987
|
${uuid || "<project-id>"}
|
|
2778
2988
|
workflows:
|
|
2779
2989
|
${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
@@ -2781,25 +2991,25 @@ ${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
|
2781
2991
|
|
|
2782
2992
|
// src/projects/version.ts
|
|
2783
2993
|
import { Workspace as Workspace4 } from "@openfn/project";
|
|
2784
|
-
var
|
|
2785
|
-
var
|
|
2994
|
+
var options5 = [workflow, workspace, workflowMappings];
|
|
2995
|
+
var command5 = {
|
|
2786
2996
|
command: "version [workflow]",
|
|
2787
2997
|
describe: "Returns the version hash of a given workflow in a workspace",
|
|
2788
|
-
handler: ensure("project-version",
|
|
2789
|
-
builder: (yargs) => build(
|
|
2998
|
+
handler: ensure("project-version", options5),
|
|
2999
|
+
builder: (yargs) => build(options5, yargs)
|
|
2790
3000
|
};
|
|
2791
|
-
var handler6 = async (
|
|
2792
|
-
const workspace2 = new Workspace4(
|
|
3001
|
+
var handler6 = async (options7, logger) => {
|
|
3002
|
+
const workspace2 = new Workspace4(options7.workspace);
|
|
2793
3003
|
if (!workspace2.valid) {
|
|
2794
3004
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2795
3005
|
return;
|
|
2796
3006
|
}
|
|
2797
3007
|
const output = /* @__PURE__ */ new Map();
|
|
2798
3008
|
const activeProject = workspace2.getActiveProject();
|
|
2799
|
-
if (
|
|
2800
|
-
const workflow2 = activeProject?.getWorkflow(
|
|
3009
|
+
if (options7.workflow) {
|
|
3010
|
+
const workflow2 = activeProject?.getWorkflow(options7.workflow);
|
|
2801
3011
|
if (!workflow2) {
|
|
2802
|
-
logger.error(`No workflow found with id ${
|
|
3012
|
+
logger.error(`No workflow found with id ${options7.workflow}`);
|
|
2803
3013
|
return;
|
|
2804
3014
|
}
|
|
2805
3015
|
output.set(workflow2.name || workflow2.id, workflow2.getVersionHash());
|
|
@@ -2813,7 +3023,7 @@ var handler6 = async (options6, logger) => {
|
|
|
2813
3023
|
return;
|
|
2814
3024
|
}
|
|
2815
3025
|
let final;
|
|
2816
|
-
if (
|
|
3026
|
+
if (options7.json) {
|
|
2817
3027
|
final = JSON.stringify(Object.fromEntries(output), void 0, 2);
|
|
2818
3028
|
} else {
|
|
2819
3029
|
final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
|
|
@@ -2825,16 +3035,16 @@ ${final}`);
|
|
|
2825
3035
|
|
|
2826
3036
|
// src/projects/merge.ts
|
|
2827
3037
|
import Project6, { Workspace as Workspace5 } from "@openfn/project";
|
|
2828
|
-
import
|
|
3038
|
+
import path15 from "node:path";
|
|
2829
3039
|
import fs6 from "node:fs/promises";
|
|
2830
|
-
var
|
|
2831
|
-
projectId,
|
|
3040
|
+
var options6 = [
|
|
2832
3041
|
removeUnmapped,
|
|
2833
3042
|
workflowMappings,
|
|
2834
|
-
log,
|
|
2835
3043
|
workspace,
|
|
3044
|
+
log,
|
|
2836
3045
|
// custom output because we don't want defaults or anything
|
|
2837
3046
|
{
|
|
3047
|
+
// TODO presumably if we do this we don't also checkout?
|
|
2838
3048
|
name: "output-path",
|
|
2839
3049
|
yargs: {
|
|
2840
3050
|
alias: "o",
|
|
@@ -2852,22 +3062,22 @@ var options5 = [
|
|
|
2852
3062
|
description: "Force a merge even when workflows are incompatible"
|
|
2853
3063
|
})
|
|
2854
3064
|
];
|
|
2855
|
-
var
|
|
2856
|
-
command: "merge <project
|
|
2857
|
-
describe: "Merges the specified project into the currently checked out project",
|
|
2858
|
-
handler: ensure("project-merge",
|
|
2859
|
-
builder: (yargs) => build(
|
|
2860
|
-
};
|
|
2861
|
-
var handler7 = async (
|
|
2862
|
-
const
|
|
2863
|
-
const workspace2 = new Workspace5(
|
|
3065
|
+
var command6 = {
|
|
3066
|
+
command: "merge <project>",
|
|
3067
|
+
describe: "Merges the specified project (by UUID, id or alias) into the currently checked out project",
|
|
3068
|
+
handler: ensure("project-merge", options6),
|
|
3069
|
+
builder: (yargs) => build(options6, yargs)
|
|
3070
|
+
};
|
|
3071
|
+
var handler7 = async (options7, logger) => {
|
|
3072
|
+
const workspacePath = options7.workspace;
|
|
3073
|
+
const workspace2 = new Workspace5(workspacePath);
|
|
2864
3074
|
if (!workspace2.valid) {
|
|
2865
3075
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2866
3076
|
return;
|
|
2867
3077
|
}
|
|
2868
3078
|
let targetProject;
|
|
2869
|
-
if (
|
|
2870
|
-
const basePath =
|
|
3079
|
+
if (options7.base) {
|
|
3080
|
+
const basePath = path15.resolve(options7.base);
|
|
2871
3081
|
logger.debug("Loading target project from path", basePath);
|
|
2872
3082
|
targetProject = await Project6.from("path", basePath);
|
|
2873
3083
|
} else {
|
|
@@ -2878,17 +3088,22 @@ var handler7 = async (options6, logger) => {
|
|
|
2878
3088
|
}
|
|
2879
3089
|
logger.debug(`Loading target project from workspace (${targetProject.id})`);
|
|
2880
3090
|
}
|
|
3091
|
+
const sourceProjectIdentifier = options7.project;
|
|
2881
3092
|
let sourceProject;
|
|
2882
|
-
if (/\.(
|
|
2883
|
-
const filePath =
|
|
3093
|
+
if (/\.(ya?ml|json)$/.test(sourceProjectIdentifier)) {
|
|
3094
|
+
const filePath = path15.join(workspacePath, sourceProjectIdentifier);
|
|
2884
3095
|
logger.debug("Loading source project from path ", filePath);
|
|
2885
3096
|
sourceProject = await Project6.from("path", filePath);
|
|
2886
3097
|
} else {
|
|
2887
|
-
logger.debug(
|
|
2888
|
-
|
|
3098
|
+
logger.debug(
|
|
3099
|
+
`Loading source project from workspace ${sourceProjectIdentifier}`
|
|
3100
|
+
);
|
|
3101
|
+
sourceProject = workspace2.get(sourceProjectIdentifier);
|
|
2889
3102
|
}
|
|
2890
3103
|
if (!sourceProject) {
|
|
2891
|
-
logger.error(
|
|
3104
|
+
logger.error(
|
|
3105
|
+
`Project "${sourceProjectIdentifier}" not found in the workspace`
|
|
3106
|
+
);
|
|
2892
3107
|
return;
|
|
2893
3108
|
}
|
|
2894
3109
|
if (targetProject.id === sourceProject.id) {
|
|
@@ -2899,20 +3114,20 @@ var handler7 = async (options6, logger) => {
|
|
|
2899
3114
|
logger.error("The checked out project has no id");
|
|
2900
3115
|
return;
|
|
2901
3116
|
}
|
|
2902
|
-
const finalPath =
|
|
3117
|
+
const finalPath = options7.outputPath ?? workspace2.getProjectPath(targetProject.id);
|
|
2903
3118
|
if (!finalPath) {
|
|
2904
3119
|
logger.error("Path to checked out project not found.");
|
|
2905
3120
|
return;
|
|
2906
3121
|
}
|
|
2907
3122
|
const final = Project6.merge(sourceProject, targetProject, {
|
|
2908
|
-
removeUnmapped:
|
|
2909
|
-
workflowMappings:
|
|
2910
|
-
force:
|
|
3123
|
+
removeUnmapped: options7.removeUnmapped,
|
|
3124
|
+
workflowMappings: options7.workflowMappings,
|
|
3125
|
+
force: options7.force
|
|
2911
3126
|
});
|
|
2912
3127
|
let outputFormat = workspace2.config.formats.project;
|
|
2913
|
-
if (
|
|
3128
|
+
if (options7.outputPath?.endsWith(".json")) {
|
|
2914
3129
|
outputFormat = "json";
|
|
2915
|
-
} else if (
|
|
3130
|
+
} else if (options7.outputPath?.endsWith(".yaml")) {
|
|
2916
3131
|
outputFormat = "yaml";
|
|
2917
3132
|
}
|
|
2918
3133
|
let finalState = final.serialize("state", {
|
|
@@ -2926,10 +3141,9 @@ var handler7 = async (options6, logger) => {
|
|
|
2926
3141
|
logger.info("Checking out merged project to filesystem");
|
|
2927
3142
|
await handler3(
|
|
2928
3143
|
{
|
|
2929
|
-
|
|
2930
|
-
|
|
2931
|
-
|
|
2932
|
-
log: options6.log
|
|
3144
|
+
workspace: workspacePath,
|
|
3145
|
+
project: options7.outputPath ? finalPath : final.id,
|
|
3146
|
+
log: options7.log
|
|
2933
3147
|
},
|
|
2934
3148
|
logger
|
|
2935
3149
|
);
|
|
@@ -2940,7 +3154,7 @@ var handler7 = async (options6, logger) => {
|
|
|
2940
3154
|
|
|
2941
3155
|
// src/util/print-versions.ts
|
|
2942
3156
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
2943
|
-
import
|
|
3157
|
+
import path16 from "node:path";
|
|
2944
3158
|
import url from "node:url";
|
|
2945
3159
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
2946
3160
|
import { mainSymbols } from "figures";
|
|
@@ -2952,15 +3166,15 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
2952
3166
|
var loadVersionFromPath = (adaptorPath) => {
|
|
2953
3167
|
try {
|
|
2954
3168
|
const pkg = JSON.parse(
|
|
2955
|
-
readFileSync2(
|
|
3169
|
+
readFileSync2(path16.resolve(adaptorPath, "package.json"), "utf8")
|
|
2956
3170
|
);
|
|
2957
3171
|
return pkg.version;
|
|
2958
3172
|
} catch (e) {
|
|
2959
3173
|
return "unknown";
|
|
2960
3174
|
}
|
|
2961
3175
|
};
|
|
2962
|
-
var printVersions = async (logger,
|
|
2963
|
-
const { adaptors, logJson: logJson2 } =
|
|
3176
|
+
var printVersions = async (logger, options7 = {}, includeComponents = false) => {
|
|
3177
|
+
const { adaptors, logJson: logJson2 } = options7;
|
|
2964
3178
|
let longestAdaptorName = "";
|
|
2965
3179
|
const adaptorList = [];
|
|
2966
3180
|
adaptors?.forEach((adaptor) => {
|
|
@@ -2970,7 +3184,7 @@ var printVersions = async (logger, options6 = {}, includeComponents = false) =>
|
|
|
2970
3184
|
const [namePart, pathPart] = adaptor.split("=");
|
|
2971
3185
|
adaptorVersion = loadVersionFromPath(pathPart);
|
|
2972
3186
|
adaptorName = getNameAndVersion7(namePart).name;
|
|
2973
|
-
} else if (
|
|
3187
|
+
} else if (options7.monorepoPath) {
|
|
2974
3188
|
adaptorName = getNameAndVersion7(adaptor).name;
|
|
2975
3189
|
adaptorVersion = "monorepo";
|
|
2976
3190
|
} else {
|
|
@@ -2987,7 +3201,7 @@ var printVersions = async (logger, options6 = {}, includeComponents = false) =>
|
|
|
2987
3201
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
2988
3202
|
);
|
|
2989
3203
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
2990
|
-
const dirname3 =
|
|
3204
|
+
const dirname3 = path16.dirname(url.fileURLToPath(import.meta.url));
|
|
2991
3205
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
2992
3206
|
const { version, dependencies } = pkg;
|
|
2993
3207
|
const compilerVersion = dependencies["@openfn/compiler"];
|
|
@@ -3068,6 +3282,7 @@ var handlers = {
|
|
|
3068
3282
|
["repo-install"]: install,
|
|
3069
3283
|
["repo-pwd"]: pwd,
|
|
3070
3284
|
["repo-list"]: list,
|
|
3285
|
+
["project-pull"]: handler4,
|
|
3071
3286
|
["project-list"]: handler5,
|
|
3072
3287
|
["project-version"]: handler6,
|
|
3073
3288
|
["project-merge"]: handler7,
|
|
@@ -3075,13 +3290,13 @@ var handlers = {
|
|
|
3075
3290
|
["project-fetch"]: handler2,
|
|
3076
3291
|
version: async (opts, logger) => print_versions_default(logger, opts, true)
|
|
3077
3292
|
};
|
|
3078
|
-
var parse = async (
|
|
3079
|
-
const logger = log2 || logger_default(CLI,
|
|
3080
|
-
if (
|
|
3081
|
-
await print_versions_default(logger,
|
|
3293
|
+
var parse = async (options7, log2) => {
|
|
3294
|
+
const logger = log2 || logger_default(CLI, options7);
|
|
3295
|
+
if (options7.command === "execute" || options7.command === "test") {
|
|
3296
|
+
await print_versions_default(logger, options7);
|
|
3082
3297
|
}
|
|
3083
3298
|
report(logger);
|
|
3084
|
-
const { monorepoPath } =
|
|
3299
|
+
const { monorepoPath } = options7;
|
|
3085
3300
|
if (monorepoPath) {
|
|
3086
3301
|
if (monorepoPath === "ERR") {
|
|
3087
3302
|
logger.error(
|
|
@@ -3092,19 +3307,19 @@ var parse = async (options6, log2) => {
|
|
|
3092
3307
|
}
|
|
3093
3308
|
await validateMonoRepo(monorepoPath, logger);
|
|
3094
3309
|
logger.success(`Loading adaptors from monorepo at ${monorepoPath}`);
|
|
3095
|
-
|
|
3310
|
+
options7.adaptors = map_adaptors_to_monorepo_default(
|
|
3096
3311
|
monorepoPath,
|
|
3097
|
-
|
|
3312
|
+
options7.adaptors,
|
|
3098
3313
|
logger
|
|
3099
3314
|
);
|
|
3100
3315
|
}
|
|
3101
|
-
const handler8 = handlers[
|
|
3316
|
+
const handler8 = handlers[options7.command];
|
|
3102
3317
|
if (!handler8) {
|
|
3103
|
-
logger.error(`Unrecognised command: ${
|
|
3318
|
+
logger.error(`Unrecognised command: ${options7.command}`);
|
|
3104
3319
|
process.exit(1);
|
|
3105
3320
|
}
|
|
3106
3321
|
try {
|
|
3107
|
-
return await handler8(
|
|
3322
|
+
return await handler8(options7, logger);
|
|
3108
3323
|
} catch (e) {
|
|
3109
3324
|
if (!process.exitCode) {
|
|
3110
3325
|
process.exitCode = e.exitCode || 1;
|