@openfn/cli 1.20.3 → 1.22.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +188 -117
- package/dist/process/runner.js +783 -571
- package/package.json +5 -5
package/dist/process/runner.js
CHANGED
|
@@ -21,13 +21,13 @@ var urlMap = {
|
|
|
21
21
|
["local"]: LOCAL_URL
|
|
22
22
|
};
|
|
23
23
|
var DEFAULT_ENV = "staging";
|
|
24
|
-
var getURL = (
|
|
25
|
-
if (
|
|
26
|
-
if (
|
|
27
|
-
return urlMap[
|
|
24
|
+
var getURL = (options7) => {
|
|
25
|
+
if (options7.apolloUrl) {
|
|
26
|
+
if (options7.apolloUrl in urlMap) {
|
|
27
|
+
return urlMap[options7.apolloUrl];
|
|
28
28
|
}
|
|
29
|
-
if (
|
|
30
|
-
return
|
|
29
|
+
if (options7.apolloUrl.startsWith("http")) {
|
|
30
|
+
return options7.apolloUrl;
|
|
31
31
|
}
|
|
32
32
|
throw new Error(`Unrecognised apollo URL`);
|
|
33
33
|
}
|
|
@@ -52,14 +52,14 @@ var outputFiles = (files, logger) => {
|
|
|
52
52
|
};
|
|
53
53
|
|
|
54
54
|
// src/apollo/handler.ts
|
|
55
|
-
var apolloHandler = async (
|
|
56
|
-
logger.always(`Calling Apollo service: ${
|
|
57
|
-
const json = await loadPayload(logger,
|
|
58
|
-
const url2 = getURL(
|
|
55
|
+
var apolloHandler = async (options7, logger) => {
|
|
56
|
+
logger.always(`Calling Apollo service: ${options7.service}`);
|
|
57
|
+
const json = await loadPayload(logger, options7.payload);
|
|
58
|
+
const url2 = getURL(options7);
|
|
59
59
|
logger.success(`Using apollo server at`, url2);
|
|
60
|
-
const result = await callApollo(url2,
|
|
60
|
+
const result = await callApollo(url2, options7.service, json, logger);
|
|
61
61
|
if (result) {
|
|
62
|
-
await serializeOutput(
|
|
62
|
+
await serializeOutput(options7, result, logger);
|
|
63
63
|
} else {
|
|
64
64
|
logger.warn("No output returned from Apollo");
|
|
65
65
|
}
|
|
@@ -79,15 +79,15 @@ var write = async (basePath, filePath, content, logger) => {
|
|
|
79
79
|
await writeFile(dest, content);
|
|
80
80
|
logger.success(`Wrote content to ${dest}`);
|
|
81
81
|
};
|
|
82
|
-
var serializeOutput = async (
|
|
83
|
-
if (
|
|
84
|
-
if (result.files && !
|
|
82
|
+
var serializeOutput = async (options7, result, logger) => {
|
|
83
|
+
if (options7.outputPath) {
|
|
84
|
+
if (result.files && !options7.outputPath.endsWith(".json")) {
|
|
85
85
|
for (const p in result.files) {
|
|
86
|
-
await write(
|
|
86
|
+
await write(options7.outputPath, p, result.files[p], logger);
|
|
87
87
|
}
|
|
88
88
|
} else {
|
|
89
89
|
await write(
|
|
90
|
-
|
|
90
|
+
options7.outputPath,
|
|
91
91
|
"",
|
|
92
92
|
JSON.stringify(result, null, 2),
|
|
93
93
|
logger
|
|
@@ -133,14 +133,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
133
133
|
});
|
|
134
134
|
});
|
|
135
135
|
};
|
|
136
|
-
var loadPayload = async (logger,
|
|
137
|
-
if (!
|
|
136
|
+
var loadPayload = async (logger, path17) => {
|
|
137
|
+
if (!path17) {
|
|
138
138
|
logger.warn("No JSON payload provided");
|
|
139
139
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
140
140
|
return {};
|
|
141
141
|
}
|
|
142
|
-
if (
|
|
143
|
-
const str = await readFile(
|
|
142
|
+
if (path17.endsWith(".json")) {
|
|
143
|
+
const str = await readFile(path17, "utf8");
|
|
144
144
|
const json = JSON.parse(str);
|
|
145
145
|
logger.debug("Loaded JSON payload");
|
|
146
146
|
return json;
|
|
@@ -148,6 +148,11 @@ var loadPayload = async (logger, path15) => {
|
|
|
148
148
|
};
|
|
149
149
|
var handler_default = apolloHandler;
|
|
150
150
|
|
|
151
|
+
// src/execute/handler.ts
|
|
152
|
+
import { yamlToJson as yamlToJson2 } from "@openfn/project";
|
|
153
|
+
import { readFile as readFile3 } from "node:fs/promises";
|
|
154
|
+
import path5 from "node:path";
|
|
155
|
+
|
|
151
156
|
// src/execute/execute.ts
|
|
152
157
|
import run, { NOTIFY_JOB_COMPLETE, getNameAndVersion } from "@openfn/runtime";
|
|
153
158
|
|
|
@@ -164,17 +169,17 @@ var namespaces = {
|
|
|
164
169
|
[COMPILER]: "CMP",
|
|
165
170
|
[JOB]: "JOB"
|
|
166
171
|
};
|
|
167
|
-
var createLogger2 = (name = "",
|
|
168
|
-
const logOptions =
|
|
172
|
+
var createLogger2 = (name = "", options7) => {
|
|
173
|
+
const logOptions = options7.log || {};
|
|
169
174
|
let json = false;
|
|
170
175
|
let level = logOptions[name] || logOptions.default || "default";
|
|
171
|
-
if (
|
|
176
|
+
if (options7.logJson) {
|
|
172
177
|
json = true;
|
|
173
178
|
}
|
|
174
179
|
return actualCreateLogger(namespaces[name] || name, {
|
|
175
180
|
level,
|
|
176
181
|
json,
|
|
177
|
-
sanitize:
|
|
182
|
+
sanitize: options7.sanitize || "none",
|
|
178
183
|
...logOptions
|
|
179
184
|
});
|
|
180
185
|
};
|
|
@@ -185,8 +190,8 @@ var createNullLogger = () => createLogger2(void 0, { log: { default: "none" } })
|
|
|
185
190
|
import fs from "node:fs";
|
|
186
191
|
import path2 from "node:path";
|
|
187
192
|
import { rmdir } from "node:fs/promises";
|
|
188
|
-
var getCachePath = async (plan,
|
|
189
|
-
const { baseDir } =
|
|
193
|
+
var getCachePath = async (plan, options7, stepId) => {
|
|
194
|
+
const { baseDir } = options7;
|
|
190
195
|
const { name } = plan.workflow;
|
|
191
196
|
const basePath = `${baseDir}/.cli-cache/${name}`;
|
|
192
197
|
if (stepId) {
|
|
@@ -194,10 +199,10 @@ var getCachePath = async (plan, options6, stepId) => {
|
|
|
194
199
|
}
|
|
195
200
|
return path2.resolve(basePath);
|
|
196
201
|
};
|
|
197
|
-
var ensureGitIgnore = (
|
|
198
|
-
if (!
|
|
202
|
+
var ensureGitIgnore = (options7) => {
|
|
203
|
+
if (!options7._hasGitIgnore) {
|
|
199
204
|
const ignorePath = path2.resolve(
|
|
200
|
-
|
|
205
|
+
options7.baseDir,
|
|
201
206
|
".cli-cache",
|
|
202
207
|
".gitignore"
|
|
203
208
|
);
|
|
@@ -207,19 +212,19 @@ var ensureGitIgnore = (options6) => {
|
|
|
207
212
|
fs.writeFileSync(ignorePath, "*");
|
|
208
213
|
}
|
|
209
214
|
}
|
|
210
|
-
|
|
215
|
+
options7._hasGitIgnore = true;
|
|
211
216
|
};
|
|
212
|
-
var saveToCache = async (plan, stepId, output,
|
|
213
|
-
if (
|
|
214
|
-
const cachePath = await getCachePath(plan,
|
|
217
|
+
var saveToCache = async (plan, stepId, output, options7, logger) => {
|
|
218
|
+
if (options7.cacheSteps) {
|
|
219
|
+
const cachePath = await getCachePath(plan, options7, stepId);
|
|
215
220
|
fs.mkdirSync(path2.dirname(cachePath), { recursive: true });
|
|
216
|
-
ensureGitIgnore(
|
|
221
|
+
ensureGitIgnore(options7);
|
|
217
222
|
logger.info(`Writing ${stepId} output to ${cachePath}`);
|
|
218
223
|
fs.writeFileSync(cachePath, JSON.stringify(output));
|
|
219
224
|
}
|
|
220
225
|
};
|
|
221
|
-
var clearCache = async (plan,
|
|
222
|
-
const cacheDir = await getCachePath(plan,
|
|
226
|
+
var clearCache = async (plan, options7, logger) => {
|
|
227
|
+
const cacheDir = await getCachePath(plan, options7);
|
|
223
228
|
try {
|
|
224
229
|
await rmdir(cacheDir, { recursive: true });
|
|
225
230
|
logger.info(`Cleared cache at ${cacheDir}`);
|
|
@@ -262,13 +267,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
262
267
|
};
|
|
263
268
|
function parseAdaptors(plan) {
|
|
264
269
|
const extractInfo = (specifier) => {
|
|
265
|
-
const [module,
|
|
270
|
+
const [module, path17] = specifier.split("=");
|
|
266
271
|
const { name, version } = getNameAndVersion(module);
|
|
267
272
|
const info = {
|
|
268
273
|
name
|
|
269
274
|
};
|
|
270
|
-
if (
|
|
271
|
-
info.path =
|
|
275
|
+
if (path17) {
|
|
276
|
+
info.path = path17;
|
|
272
277
|
}
|
|
273
278
|
if (version) {
|
|
274
279
|
info.version = version;
|
|
@@ -289,7 +294,7 @@ function parseAdaptors(plan) {
|
|
|
289
294
|
// src/execute/serialize-output.ts
|
|
290
295
|
import { mkdir as mkdir2, writeFile as writeFile2 } from "node:fs/promises";
|
|
291
296
|
import { dirname } from "node:path";
|
|
292
|
-
var serializeOutput2 = async (
|
|
297
|
+
var serializeOutput2 = async (options7, result, logger) => {
|
|
293
298
|
let output = result;
|
|
294
299
|
if (output && (output.configuration || output.data)) {
|
|
295
300
|
const { configuration, ...rest } = result;
|
|
@@ -300,14 +305,14 @@ var serializeOutput2 = async (options6, result, logger) => {
|
|
|
300
305
|
} else {
|
|
301
306
|
output = JSON.stringify(output, void 0, 2);
|
|
302
307
|
}
|
|
303
|
-
if (
|
|
308
|
+
if (options7.outputStdout) {
|
|
304
309
|
logger.success(`Result: `);
|
|
305
310
|
logger.always(output);
|
|
306
|
-
} else if (
|
|
307
|
-
await mkdir2(dirname(
|
|
308
|
-
logger.debug(`Writing output to ${
|
|
309
|
-
await writeFile2(
|
|
310
|
-
logger.success(`State written to ${
|
|
311
|
+
} else if (options7.outputPath) {
|
|
312
|
+
await mkdir2(dirname(options7.outputPath), { recursive: true });
|
|
313
|
+
logger.debug(`Writing output to ${options7.outputPath}`);
|
|
314
|
+
await writeFile2(options7.outputPath, output);
|
|
315
|
+
logger.success(`State written to ${options7.outputPath}`);
|
|
311
316
|
}
|
|
312
317
|
return output;
|
|
313
318
|
};
|
|
@@ -326,6 +331,39 @@ var getAutoinstallTargets = (plan) => {
|
|
|
326
331
|
};
|
|
327
332
|
var get_autoinstall_targets_default = getAutoinstallTargets;
|
|
328
333
|
|
|
334
|
+
// src/execute/apply-credential-map.ts
|
|
335
|
+
var applyCredentialMap = (plan, map = {}, logger) => {
|
|
336
|
+
const stepsWithCredentialIds = plan.workflow.steps.filter(
|
|
337
|
+
(step) => typeof step.configuration === "string" && !step.configuration.endsWith(".json")
|
|
338
|
+
);
|
|
339
|
+
const unmapped = {};
|
|
340
|
+
for (const step of stepsWithCredentialIds) {
|
|
341
|
+
if (map[step.configuration]) {
|
|
342
|
+
logger?.debug(
|
|
343
|
+
`Applying credential ${step.configuration} to "${step.name ?? step.id}"`
|
|
344
|
+
);
|
|
345
|
+
step.configuration = map[step.configuration];
|
|
346
|
+
} else {
|
|
347
|
+
unmapped[step.configuration] = true;
|
|
348
|
+
delete step.configuration;
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
if (Object.keys(unmapped).length) {
|
|
352
|
+
logger?.warn(
|
|
353
|
+
`WARNING: credential IDs were found in the workflow, but values have not been provided:`
|
|
354
|
+
);
|
|
355
|
+
logger?.warn(" ", Object.keys(unmapped).join(","));
|
|
356
|
+
if (map) {
|
|
357
|
+
logger?.warn(
|
|
358
|
+
"If the workflow fails, add these credentials to the credential map"
|
|
359
|
+
);
|
|
360
|
+
} else {
|
|
361
|
+
logger?.warn("Pass a credential map with --credentials");
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
};
|
|
365
|
+
var apply_credential_map_default = applyCredentialMap;
|
|
366
|
+
|
|
329
367
|
// src/repo/handler.ts
|
|
330
368
|
import { exec } from "node:child_process";
|
|
331
369
|
import treeify from "treeify";
|
|
@@ -371,16 +409,16 @@ var removePackage = async (packageSpecifier, repoDir, logger) => {
|
|
|
371
409
|
logger.warn(`Failed to remove ${aliasedName}: ${error.message}`);
|
|
372
410
|
}
|
|
373
411
|
};
|
|
374
|
-
var clean = async (
|
|
375
|
-
if (
|
|
412
|
+
var clean = async (options7, logger) => {
|
|
413
|
+
if (options7.repoDir) {
|
|
376
414
|
const doIt = await logger.confirm(
|
|
377
|
-
`This will remove everything at ${
|
|
378
|
-
|
|
415
|
+
`This will remove everything at ${options7.repoDir}. Do you wish to proceed?`,
|
|
416
|
+
options7.force
|
|
379
417
|
);
|
|
380
418
|
if (doIt) {
|
|
381
419
|
return new Promise((resolve) => {
|
|
382
|
-
logger.info(`Cleaning repo at ${
|
|
383
|
-
exec(`npm exec rimraf ${
|
|
420
|
+
logger.info(`Cleaning repo at ${options7.repoDir} `);
|
|
421
|
+
exec(`npm exec rimraf ${options7.repoDir}`, () => {
|
|
384
422
|
logger.success("Repo cleaned");
|
|
385
423
|
resolve();
|
|
386
424
|
});
|
|
@@ -391,12 +429,12 @@ var clean = async (options6, logger) => {
|
|
|
391
429
|
logger.error("No repoDir path detected");
|
|
392
430
|
}
|
|
393
431
|
};
|
|
394
|
-
var pwd = async (
|
|
432
|
+
var pwd = async (options7, logger) => {
|
|
395
433
|
logger.info(`OPENFN_REPO_DIR is set to ${process.env.OPENFN_REPO_DIR}`);
|
|
396
|
-
logger.success(`Repo working directory is: ${
|
|
434
|
+
logger.success(`Repo working directory is: ${options7.repoDir}`);
|
|
397
435
|
};
|
|
398
|
-
var getDependencyList = async (
|
|
399
|
-
const pkg = await loadRepoPkg(
|
|
436
|
+
var getDependencyList = async (options7, _logger) => {
|
|
437
|
+
const pkg = await loadRepoPkg(options7.repoDir);
|
|
400
438
|
const result = {};
|
|
401
439
|
if (pkg) {
|
|
402
440
|
Object.keys(pkg.dependencies).forEach((key) => {
|
|
@@ -409,9 +447,9 @@ var getDependencyList = async (options6, _logger) => {
|
|
|
409
447
|
}
|
|
410
448
|
return result;
|
|
411
449
|
};
|
|
412
|
-
var list = async (
|
|
413
|
-
const tree = await getDependencyList(
|
|
414
|
-
await pwd(
|
|
450
|
+
var list = async (options7, logger) => {
|
|
451
|
+
const tree = await getDependencyList(options7, logger);
|
|
452
|
+
await pwd(options7, logger);
|
|
415
453
|
const output = {};
|
|
416
454
|
Object.keys(tree).forEach((key) => {
|
|
417
455
|
const versions = tree[key];
|
|
@@ -531,10 +569,10 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
531
569
|
return specifier;
|
|
532
570
|
};
|
|
533
571
|
var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
534
|
-
const [specifier,
|
|
535
|
-
if (
|
|
536
|
-
log2.debug(`Resolved ${specifier} to path: ${
|
|
537
|
-
return
|
|
572
|
+
const [specifier, path17] = pattern.split("=");
|
|
573
|
+
if (path17) {
|
|
574
|
+
log2.debug(`Resolved ${specifier} to path: ${path17}`);
|
|
575
|
+
return path17;
|
|
538
576
|
}
|
|
539
577
|
const repoPath = await getModulePath(specifier, repoDir, log2);
|
|
540
578
|
if (repoPath) {
|
|
@@ -543,7 +581,7 @@ var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
|
543
581
|
return null;
|
|
544
582
|
};
|
|
545
583
|
var loadTransformOptions = async (opts, log2) => {
|
|
546
|
-
const
|
|
584
|
+
const options7 = {
|
|
547
585
|
logger: log2 || logger_default(COMPILER, opts),
|
|
548
586
|
trace: opts.trace
|
|
549
587
|
};
|
|
@@ -553,12 +591,12 @@ var loadTransformOptions = async (opts, log2) => {
|
|
|
553
591
|
let exports;
|
|
554
592
|
const [specifier] = adaptorInput.split("=");
|
|
555
593
|
log2.debug(`Trying to preload types for ${specifier}`);
|
|
556
|
-
const
|
|
557
|
-
if (
|
|
594
|
+
const path17 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log2);
|
|
595
|
+
if (path17) {
|
|
558
596
|
try {
|
|
559
|
-
exports = await preloadAdaptorExports(
|
|
597
|
+
exports = await preloadAdaptorExports(path17, log2);
|
|
560
598
|
} catch (e) {
|
|
561
|
-
log2.error(`Failed to load adaptor typedefs from path ${
|
|
599
|
+
log2.error(`Failed to load adaptor typedefs from path ${path17}`);
|
|
562
600
|
log2.error(e);
|
|
563
601
|
}
|
|
564
602
|
}
|
|
@@ -571,12 +609,12 @@ var loadTransformOptions = async (opts, log2) => {
|
|
|
571
609
|
exportAll: true
|
|
572
610
|
});
|
|
573
611
|
}
|
|
574
|
-
|
|
612
|
+
options7["add-imports"] = {
|
|
575
613
|
ignore: opts.ignoreImports,
|
|
576
614
|
adaptors: adaptorsConfig
|
|
577
615
|
};
|
|
578
616
|
}
|
|
579
|
-
return
|
|
617
|
+
return options7;
|
|
580
618
|
};
|
|
581
619
|
|
|
582
620
|
// src/util/load-state.ts
|
|
@@ -595,7 +633,7 @@ var getUpstreamStepId = (plan, stepId) => {
|
|
|
595
633
|
}
|
|
596
634
|
};
|
|
597
635
|
var load_state_default = async (plan, opts, log2, start) => {
|
|
598
|
-
const { stateStdin, statePath
|
|
636
|
+
const { stateStdin, statePath } = opts;
|
|
599
637
|
log2.debug("Loading state...");
|
|
600
638
|
if (stateStdin) {
|
|
601
639
|
try {
|
|
@@ -610,15 +648,15 @@ var load_state_default = async (plan, opts, log2, start) => {
|
|
|
610
648
|
process.exit(1);
|
|
611
649
|
}
|
|
612
650
|
}
|
|
613
|
-
if (
|
|
651
|
+
if (statePath) {
|
|
614
652
|
try {
|
|
615
|
-
const str = await fs2.readFile(
|
|
653
|
+
const str = await fs2.readFile(statePath, "utf8");
|
|
616
654
|
const json = JSON.parse(str);
|
|
617
|
-
log2.success(`Loaded state from ${
|
|
655
|
+
log2.success(`Loaded state from ${statePath}`);
|
|
618
656
|
log2.debug("state:", json);
|
|
619
657
|
return json;
|
|
620
658
|
} catch (e) {
|
|
621
|
-
log2.warn(`Error loading state from ${
|
|
659
|
+
log2.warn(`Error loading state from ${statePath}`);
|
|
622
660
|
log2.warn(e);
|
|
623
661
|
}
|
|
624
662
|
}
|
|
@@ -669,12 +707,12 @@ var load_state_default = async (plan, opts, log2, start) => {
|
|
|
669
707
|
};
|
|
670
708
|
|
|
671
709
|
// src/util/validate-adaptors.ts
|
|
672
|
-
var validateAdaptors = async (
|
|
673
|
-
if (
|
|
710
|
+
var validateAdaptors = async (options7, logger) => {
|
|
711
|
+
if (options7.skipAdaptorValidation) {
|
|
674
712
|
return;
|
|
675
713
|
}
|
|
676
|
-
const isPlan =
|
|
677
|
-
const hasDeclaredAdaptors =
|
|
714
|
+
const isPlan = options7.planPath || options7.workflowPath || options7.workflow;
|
|
715
|
+
const hasDeclaredAdaptors = options7.adaptors && options7.adaptors.length > 0;
|
|
678
716
|
if (isPlan && hasDeclaredAdaptors) {
|
|
679
717
|
logger.error("ERROR: adaptor and workflow provided");
|
|
680
718
|
logger.error(
|
|
@@ -776,40 +814,47 @@ var mapAdaptorsToMonorepo = (monorepoPath = "", input = [], log2) => {
|
|
|
776
814
|
};
|
|
777
815
|
var map_adaptors_to_monorepo_default = mapAdaptorsToMonorepo;
|
|
778
816
|
|
|
817
|
+
// src/util/resolve-path.ts
|
|
818
|
+
import nodepath from "node:path";
|
|
819
|
+
import os from "node:os";
|
|
820
|
+
var resolve_path_default = (path17, root) => {
|
|
821
|
+
return path17.startsWith("~") ? path17.replace(`~`, os.homedir) : nodepath.resolve(root ?? "", path17);
|
|
822
|
+
};
|
|
823
|
+
|
|
779
824
|
// src/util/load-plan.ts
|
|
780
|
-
var loadPlan = async (
|
|
781
|
-
const { workflowPath, planPath, expressionPath } =
|
|
782
|
-
if (
|
|
783
|
-
const content = await fs3.readFile(path4.resolve(
|
|
825
|
+
var loadPlan = async (options7, logger) => {
|
|
826
|
+
const { workflowPath, planPath, expressionPath } = options7;
|
|
827
|
+
if (options7.path && /ya?ml$/.test(options7.path)) {
|
|
828
|
+
const content = await fs3.readFile(path4.resolve(options7.path), "utf-8");
|
|
784
829
|
const workflow2 = yamlToJson(content);
|
|
785
|
-
|
|
786
|
-
return loadXPlan({ workflow: workflow2 },
|
|
830
|
+
options7.baseDir = dirname2(options7.path);
|
|
831
|
+
return loadXPlan({ workflow: workflow2 }, options7, logger);
|
|
787
832
|
}
|
|
788
|
-
if (
|
|
789
|
-
|
|
790
|
-
return fromProject(
|
|
833
|
+
if (options7.path && options7.workflow) {
|
|
834
|
+
options7.baseDir = options7.path;
|
|
835
|
+
return fromProject(options7.path, options7.workflow, options7, logger);
|
|
791
836
|
}
|
|
792
|
-
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(
|
|
793
|
-
const workflow2 =
|
|
794
|
-
return fromProject(path4.resolve("."), workflow2,
|
|
837
|
+
if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(options7.path || "") && !options7.workflow) {
|
|
838
|
+
const workflow2 = options7.path;
|
|
839
|
+
return fromProject(path4.resolve("."), workflow2, options7, logger);
|
|
795
840
|
}
|
|
796
841
|
if (expressionPath) {
|
|
797
|
-
return loadExpression(
|
|
842
|
+
return loadExpression(options7, logger);
|
|
798
843
|
}
|
|
799
844
|
const jsonPath = planPath || workflowPath;
|
|
800
|
-
if (!
|
|
801
|
-
|
|
845
|
+
if (!options7.baseDir) {
|
|
846
|
+
options7.baseDir = path4.dirname(jsonPath);
|
|
802
847
|
}
|
|
803
848
|
const json = await loadJson(jsonPath, logger);
|
|
804
849
|
const defaultName = path4.parse(jsonPath).name;
|
|
805
850
|
if (json.workflow) {
|
|
806
|
-
return loadXPlan(json,
|
|
851
|
+
return loadXPlan(json, options7, logger, defaultName);
|
|
807
852
|
} else {
|
|
808
|
-
return loadOldWorkflow(json,
|
|
853
|
+
return loadOldWorkflow(json, options7, logger, defaultName);
|
|
809
854
|
}
|
|
810
855
|
};
|
|
811
856
|
var load_plan_default = loadPlan;
|
|
812
|
-
var fromProject = async (rootDir, workflowName,
|
|
857
|
+
var fromProject = async (rootDir, workflowName, options7, logger) => {
|
|
813
858
|
logger.debug("Loading Repo from ", path4.resolve(rootDir));
|
|
814
859
|
const project = await Project.from("fs", { root: rootDir });
|
|
815
860
|
logger.debug("Loading workflow ", workflowName);
|
|
@@ -817,7 +862,7 @@ var fromProject = async (rootDir, workflowName, options6, logger) => {
|
|
|
817
862
|
if (!workflow2) {
|
|
818
863
|
throw new Error(`Workflow "${workflowName}" not found`);
|
|
819
864
|
}
|
|
820
|
-
return loadXPlan({ workflow: workflow2 },
|
|
865
|
+
return loadXPlan({ workflow: workflow2 }, options7, logger);
|
|
821
866
|
};
|
|
822
867
|
var loadJson = async (workflowPath, logger) => {
|
|
823
868
|
let text;
|
|
@@ -852,8 +897,8 @@ var maybeAssign = (a, b, keys) => {
|
|
|
852
897
|
}
|
|
853
898
|
});
|
|
854
899
|
};
|
|
855
|
-
var loadExpression = async (
|
|
856
|
-
const expressionPath =
|
|
900
|
+
var loadExpression = async (options7, logger) => {
|
|
901
|
+
const expressionPath = options7.expressionPath;
|
|
857
902
|
logger.debug(`Loading expression from ${expressionPath}`);
|
|
858
903
|
try {
|
|
859
904
|
const expression = await fs3.readFile(expressionPath, "utf8");
|
|
@@ -861,19 +906,19 @@ var loadExpression = async (options6, logger) => {
|
|
|
861
906
|
const step = {
|
|
862
907
|
expression,
|
|
863
908
|
// The adaptor should have been expanded nicely already, so we don't need intervene here
|
|
864
|
-
adaptors:
|
|
909
|
+
adaptors: options7.adaptors ?? []
|
|
865
910
|
};
|
|
866
911
|
const wfOptions = {};
|
|
867
|
-
maybeAssign(
|
|
912
|
+
maybeAssign(options7, wfOptions, ["timeout"]);
|
|
868
913
|
const plan = {
|
|
869
914
|
workflow: {
|
|
870
915
|
name,
|
|
871
916
|
steps: [step],
|
|
872
|
-
globals:
|
|
917
|
+
globals: options7.globals
|
|
873
918
|
},
|
|
874
919
|
options: wfOptions
|
|
875
920
|
};
|
|
876
|
-
return loadXPlan(plan,
|
|
921
|
+
return loadXPlan(plan, options7, logger);
|
|
877
922
|
} catch (e) {
|
|
878
923
|
abort_default(
|
|
879
924
|
logger,
|
|
@@ -884,7 +929,7 @@ var loadExpression = async (options6, logger) => {
|
|
|
884
929
|
return {};
|
|
885
930
|
}
|
|
886
931
|
};
|
|
887
|
-
var loadOldWorkflow = async (workflow2,
|
|
932
|
+
var loadOldWorkflow = async (workflow2, options7, logger, defaultName = "") => {
|
|
888
933
|
const plan = {
|
|
889
934
|
workflow: {
|
|
890
935
|
steps: workflow2.jobs
|
|
@@ -896,7 +941,7 @@ var loadOldWorkflow = async (workflow2, options6, logger, defaultName = "") => {
|
|
|
896
941
|
if (workflow2.id) {
|
|
897
942
|
plan.id = workflow2.id;
|
|
898
943
|
}
|
|
899
|
-
const final = await loadXPlan(plan,
|
|
944
|
+
const final = await loadXPlan(plan, options7, logger, defaultName);
|
|
900
945
|
logger.warn("Converted workflow into new format:");
|
|
901
946
|
logger.warn(final);
|
|
902
947
|
return final;
|
|
@@ -904,7 +949,7 @@ var loadOldWorkflow = async (workflow2, options6, logger, defaultName = "") => {
|
|
|
904
949
|
var fetchFile = async (fileInfo, log2) => {
|
|
905
950
|
const { rootDir = "", filePath, name } = fileInfo;
|
|
906
951
|
try {
|
|
907
|
-
const fullPath =
|
|
952
|
+
const fullPath = resolve_path_default(filePath, rootDir);
|
|
908
953
|
const result = await fs3.readFile(fullPath, "utf8");
|
|
909
954
|
log2.debug("Loaded file", fullPath);
|
|
910
955
|
return result;
|
|
@@ -988,7 +1033,42 @@ var ensureAdaptors = (plan) => {
|
|
|
988
1033
|
job.adaptors ??= [];
|
|
989
1034
|
});
|
|
990
1035
|
};
|
|
991
|
-
var
|
|
1036
|
+
var ensureCollections = (plan, {
|
|
1037
|
+
endpoint: endpoint2 = "https://app.openfn.org",
|
|
1038
|
+
version = "latest",
|
|
1039
|
+
apiKey: apiKey2 = "null"
|
|
1040
|
+
} = {}, logger) => {
|
|
1041
|
+
let collectionsFound = false;
|
|
1042
|
+
Object.values(plan.workflow.steps).filter((step) => step.expression?.match(/(collections\.)/)).forEach((step) => {
|
|
1043
|
+
const job = step;
|
|
1044
|
+
if (!job.adaptors?.find(
|
|
1045
|
+
(v) => v.startsWith("@openfn/language-collections")
|
|
1046
|
+
)) {
|
|
1047
|
+
collectionsFound = true;
|
|
1048
|
+
job.adaptors ??= [];
|
|
1049
|
+
job.adaptors.push(
|
|
1050
|
+
`@openfn/language-collections@${version || "latest"}`
|
|
1051
|
+
);
|
|
1052
|
+
job.configuration = Object.assign({}, job.configuration, {
|
|
1053
|
+
collections_endpoint: `${endpoint2}/collections`,
|
|
1054
|
+
collections_token: apiKey2
|
|
1055
|
+
});
|
|
1056
|
+
}
|
|
1057
|
+
});
|
|
1058
|
+
if (collectionsFound) {
|
|
1059
|
+
if (!apiKey2 || apiKey2 === "null") {
|
|
1060
|
+
logger?.warn(
|
|
1061
|
+
"WARNING: collections API was not set. Pass --api-key or OPENFN_API_KEY"
|
|
1062
|
+
);
|
|
1063
|
+
}
|
|
1064
|
+
logger?.info(
|
|
1065
|
+
`Configured collections to use endpoint ${endpoint2} and API Key ending with ${apiKey2?.substring(
|
|
1066
|
+
apiKey2.length - 10
|
|
1067
|
+
)}`
|
|
1068
|
+
);
|
|
1069
|
+
}
|
|
1070
|
+
};
|
|
1071
|
+
var loadXPlan = async (plan, options7, logger, defaultName = "") => {
|
|
992
1072
|
if (!plan.options) {
|
|
993
1073
|
plan.options = {};
|
|
994
1074
|
}
|
|
@@ -996,22 +1076,31 @@ var loadXPlan = async (plan, options6, logger, defaultName = "") => {
|
|
|
996
1076
|
plan.workflow.name = defaultName;
|
|
997
1077
|
}
|
|
998
1078
|
ensureAdaptors(plan);
|
|
999
|
-
|
|
1000
|
-
plan
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1079
|
+
ensureCollections(
|
|
1080
|
+
plan,
|
|
1081
|
+
{
|
|
1082
|
+
version: options7.collectionsVersion,
|
|
1083
|
+
apiKey: options7.apiKey,
|
|
1084
|
+
endpoint: options7.collectionsEndpoint
|
|
1085
|
+
},
|
|
1086
|
+
logger
|
|
1087
|
+
);
|
|
1088
|
+
if (options7.globals)
|
|
1089
|
+
plan.workflow.globals = options7.globals;
|
|
1090
|
+
await importGlobals(plan, options7.baseDir, logger);
|
|
1091
|
+
await importExpressions(plan, options7.baseDir, logger);
|
|
1092
|
+
if (options7.expandAdaptors) {
|
|
1004
1093
|
expand_adaptors_default(plan);
|
|
1005
1094
|
}
|
|
1006
|
-
await map_adaptors_to_monorepo_default(
|
|
1007
|
-
maybeAssign(
|
|
1095
|
+
await map_adaptors_to_monorepo_default(options7.monorepoPath, plan, logger);
|
|
1096
|
+
maybeAssign(options7, plan.options, ["timeout", "start"]);
|
|
1008
1097
|
logger.info(`Loaded workflow ${plan.workflow.name ?? ""}`);
|
|
1009
1098
|
return plan;
|
|
1010
1099
|
};
|
|
1011
1100
|
|
|
1012
1101
|
// src/util/assert-path.ts
|
|
1013
|
-
var assert_path_default = (
|
|
1014
|
-
if (!
|
|
1102
|
+
var assert_path_default = (path17) => {
|
|
1103
|
+
if (!path17) {
|
|
1015
1104
|
console.error("ERROR: no path provided!");
|
|
1016
1105
|
console.error("\nUsage:");
|
|
1017
1106
|
console.error(" open path/to/job");
|
|
@@ -1047,7 +1136,7 @@ var fuzzy_match_step_default = (plan, stepPattern) => {
|
|
|
1047
1136
|
|
|
1048
1137
|
// src/util/validate-plan.ts
|
|
1049
1138
|
var assertWorkflowStructure = (plan, logger) => {
|
|
1050
|
-
const { workflow: workflow2, options:
|
|
1139
|
+
const { workflow: workflow2, options: options7 } = plan;
|
|
1051
1140
|
if (!workflow2 || typeof workflow2 !== "object") {
|
|
1052
1141
|
throw new Error(`Missing or invalid "workflow" key in execution plan`);
|
|
1053
1142
|
}
|
|
@@ -1060,7 +1149,7 @@ var assertWorkflowStructure = (plan, logger) => {
|
|
|
1060
1149
|
workflow2.steps.forEach((step, index) => {
|
|
1061
1150
|
assertStepStructure(step, index);
|
|
1062
1151
|
});
|
|
1063
|
-
assertOptionsStructure(
|
|
1152
|
+
assertOptionsStructure(options7, logger);
|
|
1064
1153
|
};
|
|
1065
1154
|
var assertStepStructure = (step, index) => {
|
|
1066
1155
|
const allowedKeys = [
|
|
@@ -1087,9 +1176,9 @@ var assertStepStructure = (step, index) => {
|
|
|
1087
1176
|
);
|
|
1088
1177
|
}
|
|
1089
1178
|
};
|
|
1090
|
-
var assertOptionsStructure = (
|
|
1179
|
+
var assertOptionsStructure = (options7 = {}, logger) => {
|
|
1091
1180
|
const allowedKeys = ["timeout", "stepTimeout", "start", "end", "sanitize"];
|
|
1092
|
-
for (const key in
|
|
1181
|
+
for (const key in options7) {
|
|
1093
1182
|
if (!allowedKeys.includes(key)) {
|
|
1094
1183
|
logger.warn(`Unrecognized option "${key}" in options object`);
|
|
1095
1184
|
}
|
|
@@ -1145,17 +1234,41 @@ var matchStep = (plan, stepPattern, stepName, logger) => {
|
|
|
1145
1234
|
}
|
|
1146
1235
|
return "";
|
|
1147
1236
|
};
|
|
1148
|
-
var
|
|
1237
|
+
var loadAndApplyCredentialMap = async (plan, options7, logger) => {
|
|
1238
|
+
let creds = {};
|
|
1239
|
+
if (options7.credentials) {
|
|
1240
|
+
try {
|
|
1241
|
+
const credsRaw = await readFile3(
|
|
1242
|
+
path5.resolve(options7.credentials),
|
|
1243
|
+
"utf8"
|
|
1244
|
+
);
|
|
1245
|
+
if (options7.credentials.endsWith(".json")) {
|
|
1246
|
+
creds = JSON.parse(credsRaw);
|
|
1247
|
+
} else {
|
|
1248
|
+
creds = yamlToJson2(credsRaw);
|
|
1249
|
+
}
|
|
1250
|
+
} catch (e) {
|
|
1251
|
+
logger.error("Error processing credential map:");
|
|
1252
|
+
logger.error(e);
|
|
1253
|
+
process.exitCode = 1;
|
|
1254
|
+
return;
|
|
1255
|
+
}
|
|
1256
|
+
logger.info("Credential map loaded ");
|
|
1257
|
+
}
|
|
1258
|
+
return apply_credential_map_default(plan, creds, logger);
|
|
1259
|
+
};
|
|
1260
|
+
var executeHandler = async (options7, logger) => {
|
|
1149
1261
|
const start = (/* @__PURE__ */ new Date()).getTime();
|
|
1150
|
-
assert_path_default(
|
|
1151
|
-
await validate_adaptors_default(
|
|
1152
|
-
let plan = await load_plan_default(
|
|
1262
|
+
assert_path_default(options7.path);
|
|
1263
|
+
await validate_adaptors_default(options7, logger);
|
|
1264
|
+
let plan = await load_plan_default(options7, logger);
|
|
1153
1265
|
validate_plan_default(plan, logger);
|
|
1154
|
-
|
|
1155
|
-
|
|
1266
|
+
await loadAndApplyCredentialMap(plan, options7, logger);
|
|
1267
|
+
if (options7.cacheSteps) {
|
|
1268
|
+
await clearCache(plan, options7, logger);
|
|
1156
1269
|
}
|
|
1157
1270
|
const moduleResolutions = {};
|
|
1158
|
-
const { repoDir, monorepoPath, autoinstall } =
|
|
1271
|
+
const { repoDir, monorepoPath, autoinstall } = options7;
|
|
1159
1272
|
if (autoinstall) {
|
|
1160
1273
|
if (monorepoPath) {
|
|
1161
1274
|
logger.warn("Skipping auto-install as monorepo is being used");
|
|
@@ -1163,13 +1276,13 @@ var executeHandler = async (options6, logger) => {
|
|
|
1163
1276
|
const autoInstallTargets = get_autoinstall_targets_default(plan);
|
|
1164
1277
|
if (autoInstallTargets.length) {
|
|
1165
1278
|
logger.info("Auto-installing language adaptors");
|
|
1166
|
-
|
|
1279
|
+
options7.adaptors = await install(
|
|
1167
1280
|
{ packages: autoInstallTargets, repoDir },
|
|
1168
1281
|
logger
|
|
1169
1282
|
);
|
|
1170
|
-
if (autoInstallTargets.length ===
|
|
1283
|
+
if (autoInstallTargets.length === options7.adaptors.length) {
|
|
1171
1284
|
for (let i = 0; i < autoInstallTargets.length; i++) {
|
|
1172
|
-
moduleResolutions[autoInstallTargets[i]] =
|
|
1285
|
+
moduleResolutions[autoInstallTargets[i]] = options7.adaptors[i];
|
|
1173
1286
|
}
|
|
1174
1287
|
}
|
|
1175
1288
|
}
|
|
@@ -1177,35 +1290,35 @@ var executeHandler = async (options6, logger) => {
|
|
|
1177
1290
|
}
|
|
1178
1291
|
let customStart;
|
|
1179
1292
|
let customEnd;
|
|
1180
|
-
if (
|
|
1181
|
-
const step = matchStep(plan,
|
|
1293
|
+
if (options7.only) {
|
|
1294
|
+
const step = matchStep(plan, options7.only, "only", logger);
|
|
1182
1295
|
customStart = step;
|
|
1183
1296
|
customEnd = step;
|
|
1184
|
-
logger.always(`Only running workflow step "${
|
|
1297
|
+
logger.always(`Only running workflow step "${options7.start}"`);
|
|
1185
1298
|
} else {
|
|
1186
|
-
if (
|
|
1299
|
+
if (options7.start) {
|
|
1187
1300
|
customStart = matchStep(
|
|
1188
1301
|
plan,
|
|
1189
|
-
|
|
1302
|
+
options7.start ?? plan.options.start,
|
|
1190
1303
|
"start",
|
|
1191
1304
|
logger
|
|
1192
1305
|
);
|
|
1193
|
-
logger.info(`Starting workflow from step "${
|
|
1306
|
+
logger.info(`Starting workflow from step "${options7.start}"`);
|
|
1194
1307
|
}
|
|
1195
|
-
if (
|
|
1308
|
+
if (options7.end) {
|
|
1196
1309
|
customEnd = matchStep(
|
|
1197
1310
|
plan,
|
|
1198
|
-
|
|
1311
|
+
options7.end ?? plan.options.end,
|
|
1199
1312
|
"end",
|
|
1200
1313
|
logger
|
|
1201
1314
|
);
|
|
1202
|
-
logger.always(`Ending workflow at step "${
|
|
1315
|
+
logger.always(`Ending workflow at step "${options7.end}"`);
|
|
1203
1316
|
}
|
|
1204
1317
|
}
|
|
1205
|
-
const state = await load_state_default(plan,
|
|
1318
|
+
const state = await load_state_default(plan, options7, logger, customStart);
|
|
1206
1319
|
plan = override_plan_adaptors_default(plan, moduleResolutions);
|
|
1207
|
-
if (
|
|
1208
|
-
plan = await compile_default(plan,
|
|
1320
|
+
if (options7.compile) {
|
|
1321
|
+
plan = await compile_default(plan, options7, logger);
|
|
1209
1322
|
} else {
|
|
1210
1323
|
logger.info("Skipping compilation as noCompile is set");
|
|
1211
1324
|
}
|
|
@@ -1219,13 +1332,13 @@ var executeHandler = async (options6, logger) => {
|
|
|
1219
1332
|
workflow: plan.workflow
|
|
1220
1333
|
};
|
|
1221
1334
|
try {
|
|
1222
|
-
const result = await execute_default(finalPlan, state,
|
|
1223
|
-
if (
|
|
1335
|
+
const result = await execute_default(finalPlan, state, options7, logger);
|
|
1336
|
+
if (options7.cacheSteps) {
|
|
1224
1337
|
logger.success(
|
|
1225
1338
|
"Cached output written to ./cli-cache (see info logs for details)"
|
|
1226
1339
|
);
|
|
1227
1340
|
}
|
|
1228
|
-
await serialize_output_default(
|
|
1341
|
+
await serialize_output_default(options7, result, logger);
|
|
1229
1342
|
const duration = printDuration((/* @__PURE__ */ new Date()).getTime() - start);
|
|
1230
1343
|
if (result?.errors) {
|
|
1231
1344
|
logger.warn(
|
|
@@ -1248,55 +1361,55 @@ var handler_default2 = executeHandler;
|
|
|
1248
1361
|
|
|
1249
1362
|
// src/compile/handler.ts
|
|
1250
1363
|
import { writeFile as writeFile3 } from "node:fs/promises";
|
|
1251
|
-
var compileHandler = async (
|
|
1252
|
-
assert_path_default(
|
|
1364
|
+
var compileHandler = async (options7, logger) => {
|
|
1365
|
+
assert_path_default(options7.path);
|
|
1253
1366
|
let result;
|
|
1254
|
-
if (
|
|
1255
|
-
const { code } = await compile_default(
|
|
1367
|
+
if (options7.expressionPath) {
|
|
1368
|
+
const { code } = await compile_default(options7.expressionPath, options7, logger);
|
|
1256
1369
|
result = code;
|
|
1257
1370
|
} else {
|
|
1258
|
-
const plan = await load_plan_default(
|
|
1259
|
-
const compiledPlan = await compile_default(plan,
|
|
1371
|
+
const plan = await load_plan_default(options7, logger);
|
|
1372
|
+
const compiledPlan = await compile_default(plan, options7, logger);
|
|
1260
1373
|
result = JSON.stringify(compiledPlan, null, 2);
|
|
1261
1374
|
}
|
|
1262
|
-
if (
|
|
1375
|
+
if (options7.outputStdout) {
|
|
1263
1376
|
logger.success("Result:\n\n" + result);
|
|
1264
1377
|
} else {
|
|
1265
|
-
await writeFile3(
|
|
1266
|
-
logger.success(`Compiled to ${
|
|
1378
|
+
await writeFile3(options7.outputPath, result);
|
|
1379
|
+
logger.success(`Compiled to ${options7.outputPath}`);
|
|
1267
1380
|
}
|
|
1268
1381
|
};
|
|
1269
1382
|
var handler_default3 = compileHandler;
|
|
1270
1383
|
|
|
1271
1384
|
// src/collections/handler.ts
|
|
1272
|
-
import
|
|
1273
|
-
import { readFile as
|
|
1385
|
+
import path7 from "node:path";
|
|
1386
|
+
import { readFile as readFile4, writeFile as writeFile4 } from "node:fs/promises";
|
|
1274
1387
|
|
|
1275
1388
|
// src/collections/request.ts
|
|
1276
|
-
import
|
|
1389
|
+
import path6 from "node:path";
|
|
1277
1390
|
import { request } from "undici";
|
|
1278
1391
|
var DEFAULT_PAGE_SIZE = 1e3;
|
|
1279
|
-
var request_default = async (method,
|
|
1280
|
-
const base =
|
|
1281
|
-
const url2 =
|
|
1392
|
+
var request_default = async (method, options7, logger) => {
|
|
1393
|
+
const base = options7.lightning || process.env.OPENFN_ENDPOINT || "https://app.openfn.org";
|
|
1394
|
+
const url2 = path6.join(base, "/collections", options7.collectionName);
|
|
1282
1395
|
logger.debug("Calling Collections server at ", url2);
|
|
1283
1396
|
const headers = {
|
|
1284
|
-
Authorization: `Bearer ${
|
|
1397
|
+
Authorization: `Bearer ${options7.token}`
|
|
1285
1398
|
};
|
|
1286
1399
|
const query = Object.assign(
|
|
1287
1400
|
{
|
|
1288
|
-
key:
|
|
1289
|
-
limit:
|
|
1401
|
+
key: options7.key,
|
|
1402
|
+
limit: options7.pageSize || DEFAULT_PAGE_SIZE
|
|
1290
1403
|
},
|
|
1291
|
-
|
|
1404
|
+
options7.query
|
|
1292
1405
|
);
|
|
1293
1406
|
const args = {
|
|
1294
1407
|
headers,
|
|
1295
1408
|
method,
|
|
1296
1409
|
query
|
|
1297
1410
|
};
|
|
1298
|
-
if (
|
|
1299
|
-
args.body = JSON.stringify(
|
|
1411
|
+
if (options7.data) {
|
|
1412
|
+
args.body = JSON.stringify(options7.data);
|
|
1300
1413
|
headers["content-type"] = "application/json";
|
|
1301
1414
|
}
|
|
1302
1415
|
let result = {};
|
|
@@ -1307,11 +1420,11 @@ var request_default = async (method, options6, logger) => {
|
|
|
1307
1420
|
if (cursor) {
|
|
1308
1421
|
query.cursor = cursor;
|
|
1309
1422
|
}
|
|
1310
|
-
if (
|
|
1311
|
-
limit =
|
|
1423
|
+
if (options7.limit) {
|
|
1424
|
+
limit = options7.limit;
|
|
1312
1425
|
query.limit = Math.min(
|
|
1313
|
-
|
|
1314
|
-
|
|
1426
|
+
options7.pageSize || DEFAULT_PAGE_SIZE,
|
|
1427
|
+
options7.limit - count
|
|
1315
1428
|
);
|
|
1316
1429
|
}
|
|
1317
1430
|
try {
|
|
@@ -1405,7 +1518,7 @@ var ensureToken = (opts, logger) => {
|
|
|
1405
1518
|
}
|
|
1406
1519
|
}
|
|
1407
1520
|
};
|
|
1408
|
-
var buildQuery = (
|
|
1521
|
+
var buildQuery = (options7) => {
|
|
1409
1522
|
const map = {
|
|
1410
1523
|
createdBefore: "created_before",
|
|
1411
1524
|
createdAfter: "created_after",
|
|
@@ -1414,34 +1527,34 @@ var buildQuery = (options6) => {
|
|
|
1414
1527
|
};
|
|
1415
1528
|
const query = {};
|
|
1416
1529
|
Object.keys(map).forEach((key) => {
|
|
1417
|
-
if (
|
|
1418
|
-
query[map[key]] =
|
|
1530
|
+
if (options7[key]) {
|
|
1531
|
+
query[map[key]] = options7[key];
|
|
1419
1532
|
}
|
|
1420
1533
|
});
|
|
1421
1534
|
return query;
|
|
1422
1535
|
};
|
|
1423
|
-
var get = async (
|
|
1424
|
-
ensureToken(
|
|
1425
|
-
const multiMode =
|
|
1536
|
+
var get = async (options7, logger) => {
|
|
1537
|
+
ensureToken(options7, logger);
|
|
1538
|
+
const multiMode = options7.key.includes("*");
|
|
1426
1539
|
if (multiMode) {
|
|
1427
1540
|
logger.info(
|
|
1428
|
-
`Fetching multiple items from collection "${
|
|
1541
|
+
`Fetching multiple items from collection "${options7.collectionName}" with pattern ${options7.key}`
|
|
1429
1542
|
);
|
|
1430
1543
|
} else {
|
|
1431
1544
|
logger.info(
|
|
1432
|
-
`Fetching "${
|
|
1545
|
+
`Fetching "${options7.key}" from collection "${options7.collectionName}"`
|
|
1433
1546
|
);
|
|
1434
1547
|
}
|
|
1435
1548
|
let result = await request_default(
|
|
1436
1549
|
"GET",
|
|
1437
1550
|
{
|
|
1438
|
-
lightning:
|
|
1439
|
-
token:
|
|
1440
|
-
pageSize:
|
|
1441
|
-
limit:
|
|
1442
|
-
key:
|
|
1443
|
-
collectionName:
|
|
1444
|
-
query: buildQuery(
|
|
1551
|
+
lightning: options7.endpoint,
|
|
1552
|
+
token: options7.token,
|
|
1553
|
+
pageSize: options7.pageSize,
|
|
1554
|
+
limit: options7.limit,
|
|
1555
|
+
key: options7.key,
|
|
1556
|
+
collectionName: options7.collectionName,
|
|
1557
|
+
query: buildQuery(options7)
|
|
1445
1558
|
},
|
|
1446
1559
|
logger
|
|
1447
1560
|
);
|
|
@@ -1449,76 +1562,76 @@ var get = async (options6, logger) => {
|
|
|
1449
1562
|
logger.success(`Fetched ${Object.keys(result).length} items!`);
|
|
1450
1563
|
} else {
|
|
1451
1564
|
result = Object.values(result)[0];
|
|
1452
|
-
logger.success(`Fetched ${
|
|
1565
|
+
logger.success(`Fetched ${options7.key}`);
|
|
1453
1566
|
}
|
|
1454
|
-
if (
|
|
1567
|
+
if (options7.outputPath) {
|
|
1455
1568
|
const content = JSON.stringify(
|
|
1456
1569
|
result,
|
|
1457
1570
|
null,
|
|
1458
|
-
|
|
1571
|
+
options7.pretty ? 2 : void 0
|
|
1459
1572
|
);
|
|
1460
|
-
await writeFile4(
|
|
1461
|
-
logger.always(`Wrote items to ${
|
|
1573
|
+
await writeFile4(options7.outputPath, content);
|
|
1574
|
+
logger.always(`Wrote items to ${options7.outputPath}`);
|
|
1462
1575
|
} else {
|
|
1463
1576
|
logger.print(result);
|
|
1464
1577
|
}
|
|
1465
1578
|
};
|
|
1466
|
-
var set = async (
|
|
1467
|
-
if (
|
|
1579
|
+
var set = async (options7, logger) => {
|
|
1580
|
+
if (options7.key && options7.items) {
|
|
1468
1581
|
throwAbortableError(
|
|
1469
1582
|
"ARGUMENT_ERROR: arguments for key and items were provided",
|
|
1470
1583
|
"If upserting multiple items with --items, do not pass a key"
|
|
1471
1584
|
);
|
|
1472
1585
|
}
|
|
1473
|
-
ensureToken(
|
|
1474
|
-
logger.info(`Upserting items to collection "${
|
|
1586
|
+
ensureToken(options7, logger);
|
|
1587
|
+
logger.info(`Upserting items to collection "${options7.collectionName}"`);
|
|
1475
1588
|
const items = [];
|
|
1476
|
-
if (
|
|
1477
|
-
const resolvedPath =
|
|
1589
|
+
if (options7.items) {
|
|
1590
|
+
const resolvedPath = path7.resolve(options7.items);
|
|
1478
1591
|
logger.debug("Loading items from ", resolvedPath);
|
|
1479
|
-
const data = await
|
|
1592
|
+
const data = await readFile4(resolvedPath, "utf8");
|
|
1480
1593
|
const obj = JSON.parse(data);
|
|
1481
1594
|
Object.entries(obj).forEach(([key, value]) => {
|
|
1482
1595
|
items.push({ key, value: JSON.stringify(value) });
|
|
1483
1596
|
});
|
|
1484
1597
|
logger.info(`Upserting ${items.length} items`);
|
|
1485
|
-
} else if (
|
|
1486
|
-
const resolvedPath =
|
|
1598
|
+
} else if (options7.key && options7.value) {
|
|
1599
|
+
const resolvedPath = path7.resolve(options7.value);
|
|
1487
1600
|
logger.debug("Loading value from ", resolvedPath);
|
|
1488
|
-
const data = await
|
|
1601
|
+
const data = await readFile4(path7.resolve(options7.value), "utf8");
|
|
1489
1602
|
const value = JSON.stringify(JSON.parse(data));
|
|
1490
|
-
items.push({ key:
|
|
1491
|
-
logger.info(`Upserting data to "${
|
|
1603
|
+
items.push({ key: options7.key, value });
|
|
1604
|
+
logger.info(`Upserting data to "${options7.key}"`);
|
|
1492
1605
|
} else {
|
|
1493
1606
|
throw new Error("INVALID_ARGUMENTS");
|
|
1494
1607
|
}
|
|
1495
1608
|
const result = await request_default(
|
|
1496
1609
|
"POST",
|
|
1497
1610
|
{
|
|
1498
|
-
lightning:
|
|
1499
|
-
token:
|
|
1500
|
-
key:
|
|
1501
|
-
collectionName:
|
|
1611
|
+
lightning: options7.endpoint,
|
|
1612
|
+
token: options7.token,
|
|
1613
|
+
key: options7.key,
|
|
1614
|
+
collectionName: options7.collectionName,
|
|
1502
1615
|
data: { items }
|
|
1503
1616
|
},
|
|
1504
1617
|
logger
|
|
1505
1618
|
);
|
|
1506
1619
|
logger.success(`Upserted ${result.upserted} items!`);
|
|
1507
1620
|
};
|
|
1508
|
-
var remove = async (
|
|
1509
|
-
ensureToken(
|
|
1621
|
+
var remove = async (options7, logger) => {
|
|
1622
|
+
ensureToken(options7, logger);
|
|
1510
1623
|
logger.info(
|
|
1511
|
-
`Removing "${
|
|
1624
|
+
`Removing "${options7.key}" from collection "${options7.collectionName}"`
|
|
1512
1625
|
);
|
|
1513
|
-
if (
|
|
1626
|
+
if (options7.dryRun) {
|
|
1514
1627
|
logger.info("--dry-run passed: fetching affected items");
|
|
1515
1628
|
let result = await request_default(
|
|
1516
1629
|
"GET",
|
|
1517
1630
|
{
|
|
1518
|
-
lightning:
|
|
1519
|
-
token:
|
|
1520
|
-
key:
|
|
1521
|
-
collectionName:
|
|
1631
|
+
lightning: options7.endpoint,
|
|
1632
|
+
token: options7.token,
|
|
1633
|
+
key: options7.key,
|
|
1634
|
+
collectionName: options7.collectionName
|
|
1522
1635
|
},
|
|
1523
1636
|
logger
|
|
1524
1637
|
);
|
|
@@ -1530,11 +1643,11 @@ var remove = async (options6, logger) => {
|
|
|
1530
1643
|
let result = await request_default(
|
|
1531
1644
|
"DELETE",
|
|
1532
1645
|
{
|
|
1533
|
-
lightning:
|
|
1534
|
-
token:
|
|
1535
|
-
key:
|
|
1536
|
-
collectionName:
|
|
1537
|
-
query: buildQuery(
|
|
1646
|
+
lightning: options7.endpoint,
|
|
1647
|
+
token: options7.token,
|
|
1648
|
+
key: options7.key,
|
|
1649
|
+
collectionName: options7.collectionName,
|
|
1650
|
+
query: buildQuery(options7)
|
|
1538
1651
|
},
|
|
1539
1652
|
logger
|
|
1540
1653
|
);
|
|
@@ -1548,9 +1661,9 @@ var handler_default4 = {
|
|
|
1548
1661
|
};
|
|
1549
1662
|
|
|
1550
1663
|
// src/test/handler.ts
|
|
1551
|
-
var testHandler = async (
|
|
1664
|
+
var testHandler = async (options7, logger) => {
|
|
1552
1665
|
logger.log("Running test workflow...");
|
|
1553
|
-
const opts = { ...
|
|
1666
|
+
const opts = { ...options7 };
|
|
1554
1667
|
opts.compile = true;
|
|
1555
1668
|
opts.adaptors = [];
|
|
1556
1669
|
const plan = {
|
|
@@ -1617,7 +1730,7 @@ import Project2 from "@openfn/project";
|
|
|
1617
1730
|
import { deployProject } from "@openfn/deploy";
|
|
1618
1731
|
|
|
1619
1732
|
// src/projects/util.ts
|
|
1620
|
-
import
|
|
1733
|
+
import path8 from "node:path";
|
|
1621
1734
|
import { mkdir as mkdir3, writeFile as writeFile5 } from "node:fs/promises";
|
|
1622
1735
|
|
|
1623
1736
|
// src/errors.ts
|
|
@@ -1628,17 +1741,17 @@ var CLIError = class extends Error {
|
|
|
1628
1741
|
};
|
|
1629
1742
|
|
|
1630
1743
|
// src/projects/util.ts
|
|
1631
|
-
var loadAppAuthConfig = (
|
|
1744
|
+
var loadAppAuthConfig = (options7, logger) => {
|
|
1632
1745
|
const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
|
|
1633
1746
|
const config2 = {
|
|
1634
|
-
apiKey:
|
|
1635
|
-
endpoint:
|
|
1747
|
+
apiKey: options7.apiKey,
|
|
1748
|
+
endpoint: options7.endpoint
|
|
1636
1749
|
};
|
|
1637
|
-
if (!
|
|
1750
|
+
if (!options7.apiKey && OPENFN_API_KEY) {
|
|
1638
1751
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
1639
1752
|
config2.apiKey = OPENFN_API_KEY;
|
|
1640
1753
|
}
|
|
1641
|
-
if (!
|
|
1754
|
+
if (!options7.endpoint && OPENFN_ENDPOINT) {
|
|
1642
1755
|
logger.info("Using OPENFN_ENDPOINT environment variable");
|
|
1643
1756
|
config2.endpoint = OPENFN_ENDPOINT;
|
|
1644
1757
|
}
|
|
@@ -1651,7 +1764,7 @@ var ensureExt = (filePath, ext) => {
|
|
|
1651
1764
|
return filePath;
|
|
1652
1765
|
};
|
|
1653
1766
|
var serialize = async (project, outputPath2, formatOverride, dryRun = false) => {
|
|
1654
|
-
const root =
|
|
1767
|
+
const root = path8.dirname(outputPath2);
|
|
1655
1768
|
await mkdir3(root, { recursive: true });
|
|
1656
1769
|
const format = formatOverride ?? project.config?.formats.project;
|
|
1657
1770
|
const output = project?.serialize("project", { format });
|
|
@@ -1670,41 +1783,38 @@ var serialize = async (project, outputPath2, formatOverride, dryRun = false) =>
|
|
|
1670
1783
|
}
|
|
1671
1784
|
return finalPath;
|
|
1672
1785
|
};
|
|
1673
|
-
var getLightningUrl = (
|
|
1786
|
+
var getLightningUrl = (endpoint2, path17 = "", snapshots2) => {
|
|
1674
1787
|
const params = new URLSearchParams();
|
|
1675
1788
|
snapshots2?.forEach((snapshot) => params.append("snapshots[]", snapshot));
|
|
1676
|
-
return new URL(
|
|
1677
|
-
`/api/provision/${path15}?${params.toString()}`,
|
|
1678
|
-
config2.endpoint
|
|
1679
|
-
);
|
|
1789
|
+
return new URL(`/api/provision/${path17}?${params.toString()}`, endpoint2);
|
|
1680
1790
|
};
|
|
1681
|
-
async function
|
|
1682
|
-
const url2 = getLightningUrl(
|
|
1683
|
-
logger
|
|
1791
|
+
async function fetchProject(endpoint2, apiKey2, projectId, logger, snapshots2) {
|
|
1792
|
+
const url2 = getLightningUrl(endpoint2, projectId, snapshots2);
|
|
1793
|
+
logger?.info(`Checking ${url2} for existing project`);
|
|
1684
1794
|
try {
|
|
1685
1795
|
const response = await fetch(url2, {
|
|
1686
1796
|
headers: {
|
|
1687
|
-
Authorization: `Bearer ${
|
|
1797
|
+
Authorization: `Bearer ${apiKey2}`,
|
|
1688
1798
|
Accept: "application/json"
|
|
1689
1799
|
}
|
|
1690
1800
|
});
|
|
1691
1801
|
if (!response.ok) {
|
|
1692
1802
|
if (response.status === 401 || response.status === 403) {
|
|
1693
1803
|
throw new CLIError(
|
|
1694
|
-
`Failed to authorize request with endpoint ${
|
|
1804
|
+
`Failed to authorize request with endpoint ${endpoint2}, got ${response.status} ${response.statusText}`
|
|
1695
1805
|
);
|
|
1696
1806
|
}
|
|
1697
1807
|
if (response.status === 404) {
|
|
1698
|
-
throw new CLIError(`Project not found: ${
|
|
1808
|
+
throw new CLIError(`Project not found: ${projectId}`);
|
|
1699
1809
|
}
|
|
1700
1810
|
throw new CLIError(
|
|
1701
|
-
`Failed to fetch project ${
|
|
1811
|
+
`Failed to fetch project ${projectId}: ${response.statusText}`
|
|
1702
1812
|
);
|
|
1703
1813
|
}
|
|
1704
|
-
logger
|
|
1814
|
+
logger?.info(`Project retrieved from ${endpoint2}`);
|
|
1705
1815
|
return response.json();
|
|
1706
1816
|
} catch (error) {
|
|
1707
|
-
handleCommonErrors(
|
|
1817
|
+
handleCommonErrors({ endpoint: endpoint2, apiKey: apiKey2 }, error);
|
|
1708
1818
|
throw error;
|
|
1709
1819
|
}
|
|
1710
1820
|
}
|
|
@@ -1722,9 +1832,11 @@ var DeployError = class extends Error {
|
|
|
1722
1832
|
};
|
|
1723
1833
|
|
|
1724
1834
|
// src/deploy/beta.ts
|
|
1725
|
-
async function handler(
|
|
1726
|
-
const config2 = loadAppAuthConfig(
|
|
1727
|
-
const project = await Project2.from("fs", {
|
|
1835
|
+
async function handler(options7, logger) {
|
|
1836
|
+
const config2 = loadAppAuthConfig(options7, logger);
|
|
1837
|
+
const project = await Project2.from("fs", {
|
|
1838
|
+
root: options7.workspace || "."
|
|
1839
|
+
});
|
|
1728
1840
|
const state = project.serialize("state", { format: "json" });
|
|
1729
1841
|
logger.debug("Converted local project to app state:");
|
|
1730
1842
|
logger.debug(JSON.stringify(state, null, 2));
|
|
@@ -1736,15 +1848,15 @@ async function handler(options6, logger) {
|
|
|
1736
1848
|
|
|
1737
1849
|
// src/deploy/handler.ts
|
|
1738
1850
|
var actualDeploy = deploy;
|
|
1739
|
-
async function deployHandler(
|
|
1740
|
-
if (
|
|
1741
|
-
return handler(
|
|
1851
|
+
async function deployHandler(options7, logger, deployFn = actualDeploy) {
|
|
1852
|
+
if (options7.beta) {
|
|
1853
|
+
return handler(options7, logger);
|
|
1742
1854
|
}
|
|
1743
1855
|
try {
|
|
1744
|
-
const config2 = mergeOverrides(await getConfig(
|
|
1856
|
+
const config2 = mergeOverrides(await getConfig(options7.configPath), options7);
|
|
1745
1857
|
logger.debug("Deploying with config", JSON.stringify(config2, null, 2));
|
|
1746
|
-
if (
|
|
1747
|
-
config2.requireConfirmation =
|
|
1858
|
+
if (options7.confirm === false) {
|
|
1859
|
+
config2.requireConfirmation = options7.confirm;
|
|
1748
1860
|
}
|
|
1749
1861
|
if (process.env["OPENFN_API_KEY"]) {
|
|
1750
1862
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
@@ -1769,15 +1881,15 @@ async function deployHandler(options6, logger, deployFn = actualDeploy) {
|
|
|
1769
1881
|
throw error;
|
|
1770
1882
|
}
|
|
1771
1883
|
}
|
|
1772
|
-
function mergeOverrides(config2,
|
|
1884
|
+
function mergeOverrides(config2, options7) {
|
|
1773
1885
|
return {
|
|
1774
1886
|
...config2,
|
|
1775
1887
|
apiKey: pickFirst(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
1776
1888
|
endpoint: pickFirst(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
1777
|
-
statePath: pickFirst(
|
|
1778
|
-
specPath: pickFirst(
|
|
1779
|
-
configPath:
|
|
1780
|
-
requireConfirmation: pickFirst(
|
|
1889
|
+
statePath: pickFirst(options7.statePath, config2.statePath),
|
|
1890
|
+
specPath: pickFirst(options7.projectPath, config2.specPath),
|
|
1891
|
+
configPath: options7.configPath,
|
|
1892
|
+
requireConfirmation: pickFirst(options7.confirm, config2.requireConfirmation)
|
|
1781
1893
|
};
|
|
1782
1894
|
}
|
|
1783
1895
|
function pickFirst(...args) {
|
|
@@ -1788,28 +1900,28 @@ var handler_default6 = deployHandler;
|
|
|
1788
1900
|
// src/docgen/handler.ts
|
|
1789
1901
|
import { writeFile as writeFile6 } from "node:fs/promises";
|
|
1790
1902
|
import { readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
|
|
1791
|
-
import
|
|
1903
|
+
import path9 from "node:path";
|
|
1792
1904
|
import { describePackage } from "@openfn/describe-package";
|
|
1793
1905
|
import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
|
|
1794
1906
|
var RETRY_DURATION = 500;
|
|
1795
1907
|
var RETRY_COUNT = 20;
|
|
1796
1908
|
var TIMEOUT_MS = 1e3 * 60;
|
|
1797
1909
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
1798
|
-
var ensurePath = (filePath) => mkdirSync(
|
|
1799
|
-
var generatePlaceholder = (
|
|
1800
|
-
writeFileSync(
|
|
1910
|
+
var ensurePath = (filePath) => mkdirSync(path9.dirname(filePath), { recursive: true });
|
|
1911
|
+
var generatePlaceholder = (path17) => {
|
|
1912
|
+
writeFileSync(path17, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
1801
1913
|
};
|
|
1802
1914
|
var finish = (logger, resultPath) => {
|
|
1803
1915
|
logger.success("Done! Docs can be found at:\n");
|
|
1804
|
-
logger.print(` ${
|
|
1916
|
+
logger.print(` ${path9.resolve(resultPath)}`);
|
|
1805
1917
|
};
|
|
1806
|
-
var generateDocs = async (specifier,
|
|
1918
|
+
var generateDocs = async (specifier, path17, docgen, logger) => {
|
|
1807
1919
|
const result = await docgen(specifier);
|
|
1808
|
-
await writeFile6(
|
|
1809
|
-
finish(logger,
|
|
1810
|
-
return
|
|
1920
|
+
await writeFile6(path17, JSON.stringify(result, null, 2));
|
|
1921
|
+
finish(logger, path17);
|
|
1922
|
+
return path17;
|
|
1811
1923
|
};
|
|
1812
|
-
var waitForDocs = async (docs,
|
|
1924
|
+
var waitForDocs = async (docs, path17, logger, retryDuration = RETRY_DURATION) => {
|
|
1813
1925
|
try {
|
|
1814
1926
|
if (docs.hasOwnProperty("loading")) {
|
|
1815
1927
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -1821,27 +1933,27 @@ var waitForDocs = async (docs, path15, logger, retryDuration = RETRY_DURATION) =
|
|
|
1821
1933
|
clearInterval(i);
|
|
1822
1934
|
reject(new Error("Timed out waiting for docs to load"));
|
|
1823
1935
|
}
|
|
1824
|
-
const updated = JSON.parse(readFileSync(
|
|
1936
|
+
const updated = JSON.parse(readFileSync(path17, "utf8"));
|
|
1825
1937
|
if (!updated.hasOwnProperty("loading")) {
|
|
1826
1938
|
logger.info("Docs found!");
|
|
1827
1939
|
clearInterval(i);
|
|
1828
|
-
resolve(
|
|
1940
|
+
resolve(path17);
|
|
1829
1941
|
}
|
|
1830
1942
|
count++;
|
|
1831
1943
|
}, retryDuration);
|
|
1832
1944
|
});
|
|
1833
1945
|
} else {
|
|
1834
|
-
logger.info(`Docs already written to cache at ${
|
|
1835
|
-
finish(logger,
|
|
1836
|
-
return
|
|
1946
|
+
logger.info(`Docs already written to cache at ${path17}`);
|
|
1947
|
+
finish(logger, path17);
|
|
1948
|
+
return path17;
|
|
1837
1949
|
}
|
|
1838
1950
|
} catch (e) {
|
|
1839
1951
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
1840
1952
|
throw e;
|
|
1841
1953
|
}
|
|
1842
1954
|
};
|
|
1843
|
-
var docgenHandler = (
|
|
1844
|
-
const { specifier, repoDir } =
|
|
1955
|
+
var docgenHandler = (options7, logger, docgen = actualDocGen, retryDuration = RETRY_DURATION) => {
|
|
1956
|
+
const { specifier, repoDir } = options7;
|
|
1845
1957
|
const { version } = getNameAndVersion4(specifier);
|
|
1846
1958
|
if (!version) {
|
|
1847
1959
|
logger.error("Error: No version number detected");
|
|
@@ -1850,28 +1962,28 @@ var docgenHandler = (options6, logger, docgen = actualDocGen, retryDuration = RE
|
|
|
1850
1962
|
process.exit(9);
|
|
1851
1963
|
}
|
|
1852
1964
|
logger.success(`Generating docs for ${specifier}`);
|
|
1853
|
-
const
|
|
1854
|
-
ensurePath(
|
|
1965
|
+
const path17 = `${repoDir}/docs/${specifier}.json`;
|
|
1966
|
+
ensurePath(path17);
|
|
1855
1967
|
const handleError2 = () => {
|
|
1856
1968
|
logger.info("Removing placeholder");
|
|
1857
|
-
rmSync(
|
|
1969
|
+
rmSync(path17);
|
|
1858
1970
|
};
|
|
1859
1971
|
try {
|
|
1860
|
-
const existing = readFileSync(
|
|
1972
|
+
const existing = readFileSync(path17, "utf8");
|
|
1861
1973
|
const json = JSON.parse(existing);
|
|
1862
1974
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
1863
1975
|
logger.info(`Expired placeholder found. Removing.`);
|
|
1864
|
-
rmSync(
|
|
1976
|
+
rmSync(path17);
|
|
1865
1977
|
throw new Error("TIMEOUT");
|
|
1866
1978
|
}
|
|
1867
|
-
return waitForDocs(json,
|
|
1979
|
+
return waitForDocs(json, path17, logger, retryDuration);
|
|
1868
1980
|
} catch (e) {
|
|
1869
1981
|
if (e.message !== "TIMEOUT") {
|
|
1870
|
-
logger.info(`Docs JSON not found at ${
|
|
1982
|
+
logger.info(`Docs JSON not found at ${path17}`);
|
|
1871
1983
|
}
|
|
1872
1984
|
logger.debug("Generating placeholder");
|
|
1873
|
-
generatePlaceholder(
|
|
1874
|
-
return generateDocs(specifier,
|
|
1985
|
+
generatePlaceholder(path17);
|
|
1986
|
+
return generateDocs(specifier, path17, docgen, logger).catch((e2) => {
|
|
1875
1987
|
logger.error("Error generating documentation");
|
|
1876
1988
|
logger.error(e2);
|
|
1877
1989
|
handleError2();
|
|
@@ -1881,7 +1993,7 @@ var docgenHandler = (options6, logger, docgen = actualDocGen, retryDuration = RE
|
|
|
1881
1993
|
var handler_default7 = docgenHandler;
|
|
1882
1994
|
|
|
1883
1995
|
// src/docs/handler.ts
|
|
1884
|
-
import { readFile as
|
|
1996
|
+
import { readFile as readFile5 } from "node:fs/promises";
|
|
1885
1997
|
import c from "chalk";
|
|
1886
1998
|
import { getNameAndVersion as getNameAndVersion5, getLatestVersion } from "@openfn/runtime";
|
|
1887
1999
|
var describeFn = (adaptorName, fn) => [
|
|
@@ -1910,8 +2022,8 @@ ${data.functions.map(
|
|
|
1910
2022
|
(fn) => ` ${c.yellow(fn.name)} (${fn.parameters.map((p) => p.name).join(", ")})`
|
|
1911
2023
|
).sort().join("\n")}
|
|
1912
2024
|
`;
|
|
1913
|
-
var docsHandler = async (
|
|
1914
|
-
const { adaptor, operation, repoDir } =
|
|
2025
|
+
var docsHandler = async (options7, logger) => {
|
|
2026
|
+
const { adaptor, operation, repoDir } = options7;
|
|
1915
2027
|
const adaptors = expand_adaptors_default([adaptor]);
|
|
1916
2028
|
const [adaptorName] = adaptors;
|
|
1917
2029
|
let { name, version } = getNameAndVersion5(adaptorName);
|
|
@@ -1922,7 +2034,7 @@ var docsHandler = async (options6, logger) => {
|
|
|
1922
2034
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
1923
2035
|
}
|
|
1924
2036
|
logger.info("Generating/loading documentation...");
|
|
1925
|
-
const
|
|
2037
|
+
const path17 = await handler_default7(
|
|
1926
2038
|
{
|
|
1927
2039
|
specifier: `${name}@${version}`,
|
|
1928
2040
|
repoDir
|
|
@@ -1931,8 +2043,8 @@ var docsHandler = async (options6, logger) => {
|
|
|
1931
2043
|
createNullLogger()
|
|
1932
2044
|
);
|
|
1933
2045
|
let didError = false;
|
|
1934
|
-
if (
|
|
1935
|
-
const source = await
|
|
2046
|
+
if (path17) {
|
|
2047
|
+
const source = await readFile5(path17, "utf8");
|
|
1936
2048
|
const data = JSON.parse(source);
|
|
1937
2049
|
let desc;
|
|
1938
2050
|
if (operation) {
|
|
@@ -1970,20 +2082,20 @@ var handler_default8 = docsHandler;
|
|
|
1970
2082
|
// src/metadata/cache.ts
|
|
1971
2083
|
import { getNameAndVersion as getNameAndVersion6 } from "@openfn/runtime";
|
|
1972
2084
|
import { createHash } from "node:crypto";
|
|
1973
|
-
import { mkdir as mkdir4, readFile as
|
|
1974
|
-
import
|
|
2085
|
+
import { mkdir as mkdir4, readFile as readFile6, writeFile as writeFile7, readdir, rm } from "node:fs/promises";
|
|
2086
|
+
import path10 from "node:path";
|
|
1975
2087
|
var UNSUPPORTED_FILE_NAME = "unsupported.json";
|
|
1976
2088
|
var getCachePath2 = (repoDir, key) => {
|
|
1977
|
-
const base =
|
|
2089
|
+
const base = path10.join(repoDir, "meta");
|
|
1978
2090
|
if (key) {
|
|
1979
|
-
return
|
|
2091
|
+
return path10.join(base, key.endsWith(".json") ? key : `${key}.json`);
|
|
1980
2092
|
}
|
|
1981
2093
|
return base;
|
|
1982
2094
|
};
|
|
1983
2095
|
var getCache = async (repoDir, key) => {
|
|
1984
2096
|
try {
|
|
1985
2097
|
const cachePath = getCachePath2(repoDir, key);
|
|
1986
|
-
const content = await
|
|
2098
|
+
const content = await readFile6(cachePath, "utf8");
|
|
1987
2099
|
return JSON.parse(content);
|
|
1988
2100
|
} catch (e) {
|
|
1989
2101
|
return null;
|
|
@@ -2010,7 +2122,7 @@ var generateKey = (config2, adaptor) => {
|
|
|
2010
2122
|
var get2 = async (repoPath, key) => {
|
|
2011
2123
|
const p = getCachePath2(repoPath, key);
|
|
2012
2124
|
try {
|
|
2013
|
-
const result = await
|
|
2125
|
+
const result = await readFile6(p, "utf8");
|
|
2014
2126
|
return JSON.parse(result);
|
|
2015
2127
|
} catch (e) {
|
|
2016
2128
|
return null;
|
|
@@ -2018,7 +2130,7 @@ var get2 = async (repoPath, key) => {
|
|
|
2018
2130
|
};
|
|
2019
2131
|
var set2 = async (repoPath, key, result) => {
|
|
2020
2132
|
const p = getCachePath2(repoPath, key);
|
|
2021
|
-
await mkdir4(
|
|
2133
|
+
await mkdir4(path10.dirname(p), { recursive: true });
|
|
2022
2134
|
await writeFile7(p, JSON.stringify(result));
|
|
2023
2135
|
};
|
|
2024
2136
|
var getUnsupportedCachePath = (repoDir) => {
|
|
@@ -2065,7 +2177,7 @@ var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
|
2065
2177
|
const cachePath = getUnsupportedCachePath(repoDir);
|
|
2066
2178
|
let cache = {};
|
|
2067
2179
|
try {
|
|
2068
|
-
const cacheContent = await
|
|
2180
|
+
const cacheContent = await readFile6(cachePath, "utf8");
|
|
2069
2181
|
cache = JSON.parse(cacheContent);
|
|
2070
2182
|
} catch (error) {
|
|
2071
2183
|
}
|
|
@@ -2077,7 +2189,7 @@ var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
|
2077
2189
|
majorMinor: parsed.majorMinor,
|
|
2078
2190
|
timestamp: Date.now()
|
|
2079
2191
|
};
|
|
2080
|
-
await mkdir4(
|
|
2192
|
+
await mkdir4(path10.dirname(cachePath), { recursive: true });
|
|
2081
2193
|
await writeFile7(cachePath, JSON.stringify(cache, null, 2));
|
|
2082
2194
|
}
|
|
2083
2195
|
};
|
|
@@ -2116,8 +2228,8 @@ var getAdaptorPath = async (adaptor, logger, repoDir) => {
|
|
|
2116
2228
|
return adaptorPath;
|
|
2117
2229
|
};
|
|
2118
2230
|
var shouldAutoinstall = (adaptor) => adaptor?.length > 0 && !adaptor.startsWith("/") && !adaptor.includes("=");
|
|
2119
|
-
var metadataHandler = async (
|
|
2120
|
-
const { repoDir, adaptors, keepUnsupported } =
|
|
2231
|
+
var metadataHandler = async (options7, logger) => {
|
|
2232
|
+
const { repoDir, adaptors, keepUnsupported } = options7;
|
|
2121
2233
|
let adaptor = adaptors[0];
|
|
2122
2234
|
if (await isAdaptorUnsupported(adaptor, repoDir)) {
|
|
2123
2235
|
logger.info(
|
|
@@ -2126,7 +2238,7 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2126
2238
|
logger.error("No metadata helper found");
|
|
2127
2239
|
process.exit(1);
|
|
2128
2240
|
}
|
|
2129
|
-
const state = await load_state_default({},
|
|
2241
|
+
const state = await load_state_default({}, options7, logger);
|
|
2130
2242
|
logger.success(`Generating metadata`);
|
|
2131
2243
|
logger.info("config:", state);
|
|
2132
2244
|
const config2 = state.configuration;
|
|
@@ -2139,7 +2251,7 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2139
2251
|
logger.print(getCachePath2(repoDir, id));
|
|
2140
2252
|
};
|
|
2141
2253
|
const id = generateKey(config2, adaptor);
|
|
2142
|
-
if (!
|
|
2254
|
+
if (!options7.force) {
|
|
2143
2255
|
logger.debug("config hash: ", id);
|
|
2144
2256
|
const cached = await get2(repoDir, id);
|
|
2145
2257
|
if (cached) {
|
|
@@ -2157,7 +2269,7 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2157
2269
|
wasAutoInstalled = true;
|
|
2158
2270
|
adaptor = autoinstallResult[0];
|
|
2159
2271
|
}
|
|
2160
|
-
const adaptorPath = await getAdaptorPath(adaptor, logger,
|
|
2272
|
+
const adaptorPath = await getAdaptorPath(adaptor, logger, options7.repoDir);
|
|
2161
2273
|
if (!adaptorPath) {
|
|
2162
2274
|
throw new Error(`Could not resolve adaptor path for ${adaptor}`);
|
|
2163
2275
|
}
|
|
@@ -2199,20 +2311,16 @@ var metadataHandler = async (options6, logger) => {
|
|
|
2199
2311
|
var handler_default9 = metadataHandler;
|
|
2200
2312
|
|
|
2201
2313
|
// src/pull/handler.ts
|
|
2202
|
-
import
|
|
2314
|
+
import path14 from "path";
|
|
2203
2315
|
import fs5 from "node:fs/promises";
|
|
2204
2316
|
import {
|
|
2205
2317
|
getConfig as getConfig2,
|
|
2206
|
-
getProject
|
|
2318
|
+
getProject,
|
|
2207
2319
|
getSpec,
|
|
2208
2320
|
getStateFromProjectPayload,
|
|
2209
2321
|
syncRemoteSpec
|
|
2210
2322
|
} from "@openfn/deploy";
|
|
2211
2323
|
|
|
2212
|
-
// src/projects/fetch.ts
|
|
2213
|
-
import path10 from "node:path";
|
|
2214
|
-
import Project3, { Workspace } from "@openfn/project";
|
|
2215
|
-
|
|
2216
2324
|
// src/util/command-builders.ts
|
|
2217
2325
|
import c2 from "chalk";
|
|
2218
2326
|
var expandYargs = (y) => {
|
|
@@ -2222,13 +2330,17 @@ var expandYargs = (y) => {
|
|
|
2222
2330
|
return y;
|
|
2223
2331
|
};
|
|
2224
2332
|
function build(opts, yargs) {
|
|
2225
|
-
return opts.reduce(
|
|
2226
|
-
|
|
2227
|
-
|
|
2228
|
-
|
|
2333
|
+
return opts.reduce((_y, o) => {
|
|
2334
|
+
if (!o?.name) {
|
|
2335
|
+
console.error(`ERROR: INVALID COMMAND OPTION PASSED`, o);
|
|
2336
|
+
console.error("Check the options passed to the command builder");
|
|
2337
|
+
throw new Error("Invalid command");
|
|
2338
|
+
}
|
|
2339
|
+
return yargs.option(o.name, expandYargs(o.yargs));
|
|
2340
|
+
}, yargs);
|
|
2229
2341
|
}
|
|
2230
|
-
var ensure = (
|
|
2231
|
-
yargs.command =
|
|
2342
|
+
var ensure = (command7, opts) => (yargs) => {
|
|
2343
|
+
yargs.command = command7;
|
|
2232
2344
|
opts.filter((opt) => opt.ensure).forEach((opt) => {
|
|
2233
2345
|
try {
|
|
2234
2346
|
opt.ensure(yargs);
|
|
@@ -2236,7 +2348,7 @@ var ensure = (command6, opts) => (yargs) => {
|
|
|
2236
2348
|
console.log(e);
|
|
2237
2349
|
console.error(
|
|
2238
2350
|
c2.red(`
|
|
2239
|
-
Error parsing command arguments: ${
|
|
2351
|
+
Error parsing command arguments: ${command7}.${opt.name}
|
|
2240
2352
|
`)
|
|
2241
2353
|
);
|
|
2242
2354
|
console.error(c2.red("Aborting"));
|
|
@@ -2245,18 +2357,19 @@ Error parsing command arguments: ${command6}.${opt.name}
|
|
|
2245
2357
|
}
|
|
2246
2358
|
});
|
|
2247
2359
|
};
|
|
2248
|
-
var override = (
|
|
2360
|
+
var override = (command7, yargs) => {
|
|
2249
2361
|
return {
|
|
2250
|
-
...
|
|
2362
|
+
...command7,
|
|
2251
2363
|
yargs: {
|
|
2252
|
-
...
|
|
2364
|
+
...command7.yargs || {},
|
|
2253
2365
|
...yargs
|
|
2254
2366
|
}
|
|
2255
2367
|
};
|
|
2256
2368
|
};
|
|
2257
2369
|
|
|
2258
|
-
// src/
|
|
2259
|
-
import
|
|
2370
|
+
// src/projects/fetch.ts
|
|
2371
|
+
import path12 from "node:path";
|
|
2372
|
+
import Project3, { Workspace } from "@openfn/project";
|
|
2260
2373
|
|
|
2261
2374
|
// src/util/ensure-log-opts.ts
|
|
2262
2375
|
var defaultLoggerOptions = {
|
|
@@ -2315,49 +2428,17 @@ var ensureLogOpts = (opts) => {
|
|
|
2315
2428
|
};
|
|
2316
2429
|
var ensure_log_opts_default = ensureLogOpts;
|
|
2317
2430
|
|
|
2318
|
-
// src/util/get-cli-option-object.ts
|
|
2319
|
-
function getCLIOptionObject(arg) {
|
|
2320
|
-
if (isObject(arg)) {
|
|
2321
|
-
return arg;
|
|
2322
|
-
} else if (typeof arg === "string") {
|
|
2323
|
-
try {
|
|
2324
|
-
const p = JSON.parse(arg);
|
|
2325
|
-
if (isObject(p))
|
|
2326
|
-
return p;
|
|
2327
|
-
} catch (e) {
|
|
2328
|
-
}
|
|
2329
|
-
return Object.fromEntries(
|
|
2330
|
-
arg.split(",").map((pair) => {
|
|
2331
|
-
const [k, v] = pair.split("=");
|
|
2332
|
-
return [k.trim(), v.trim()];
|
|
2333
|
-
})
|
|
2334
|
-
);
|
|
2335
|
-
}
|
|
2336
|
-
}
|
|
2337
|
-
function isObject(arg) {
|
|
2338
|
-
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
2339
|
-
}
|
|
2340
|
-
|
|
2341
2431
|
// src/options.ts
|
|
2342
|
-
var
|
|
2343
|
-
const v = opts[key];
|
|
2344
|
-
if (isNaN(v) && !v) {
|
|
2345
|
-
opts[key] = value;
|
|
2346
|
-
}
|
|
2347
|
-
};
|
|
2348
|
-
var apikey = {
|
|
2432
|
+
var apiKey = {
|
|
2349
2433
|
name: "apikey",
|
|
2350
2434
|
yargs: {
|
|
2351
|
-
alias: ["
|
|
2352
|
-
description: "
|
|
2353
|
-
}
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
alias: ["c", "config-path"],
|
|
2359
|
-
description: "The location of your config file",
|
|
2360
|
-
default: "./.config.json"
|
|
2435
|
+
alias: ["pat", "token", "api-key"],
|
|
2436
|
+
description: "API Key, Personal Access Token (PAT), or other access token from Lightning"
|
|
2437
|
+
},
|
|
2438
|
+
ensure: (opts) => {
|
|
2439
|
+
if (!opts.apikey) {
|
|
2440
|
+
opts.apiKey = process.env.OPENFN_API_KEY;
|
|
2441
|
+
}
|
|
2361
2442
|
}
|
|
2362
2443
|
};
|
|
2363
2444
|
var endpoint = {
|
|
@@ -2367,12 +2448,6 @@ var endpoint = {
|
|
|
2367
2448
|
description: "[beta only] URL to Lightning endpoint"
|
|
2368
2449
|
}
|
|
2369
2450
|
};
|
|
2370
|
-
var env = {
|
|
2371
|
-
name: "env",
|
|
2372
|
-
yargs: {
|
|
2373
|
-
description: "[beta only] Environment name (eg staging, prod, branch)"
|
|
2374
|
-
}
|
|
2375
|
-
};
|
|
2376
2451
|
var force = {
|
|
2377
2452
|
name: "force",
|
|
2378
2453
|
yargs: {
|
|
@@ -2382,23 +2457,6 @@ var force = {
|
|
|
2382
2457
|
default: false
|
|
2383
2458
|
}
|
|
2384
2459
|
};
|
|
2385
|
-
var getBaseDir = (opts) => {
|
|
2386
|
-
const basePath = opts.path ?? ".";
|
|
2387
|
-
if (/\.(jso?n?|ya?ml)$/.test(basePath)) {
|
|
2388
|
-
return nodePath.dirname(basePath);
|
|
2389
|
-
}
|
|
2390
|
-
return basePath;
|
|
2391
|
-
};
|
|
2392
|
-
var projectId = {
|
|
2393
|
-
name: "project-id",
|
|
2394
|
-
yargs: {
|
|
2395
|
-
description: "The id or UUID of an openfn project",
|
|
2396
|
-
string: true
|
|
2397
|
-
},
|
|
2398
|
-
ensure: (opts) => {
|
|
2399
|
-
return opts.projectName;
|
|
2400
|
-
}
|
|
2401
|
-
};
|
|
2402
2460
|
var log = {
|
|
2403
2461
|
name: "log",
|
|
2404
2462
|
yargs: {
|
|
@@ -2417,26 +2475,18 @@ var logJson = {
|
|
|
2417
2475
|
boolean: true
|
|
2418
2476
|
}
|
|
2419
2477
|
};
|
|
2420
|
-
var
|
|
2421
|
-
name: "
|
|
2478
|
+
var projectPath = {
|
|
2479
|
+
name: "project-path",
|
|
2422
2480
|
yargs: {
|
|
2423
|
-
|
|
2424
|
-
|
|
2425
|
-
|
|
2426
|
-
|
|
2427
|
-
|
|
2428
|
-
|
|
2429
|
-
|
|
2430
|
-
|
|
2431
|
-
|
|
2432
|
-
if (opts.outputStdout) {
|
|
2433
|
-
delete opts.outputPath;
|
|
2434
|
-
} else {
|
|
2435
|
-
const base = getBaseDir(opts);
|
|
2436
|
-
setDefaultValue(opts, "outputPath", nodePath.join(base, "output.json"));
|
|
2437
|
-
}
|
|
2438
|
-
}
|
|
2439
|
-
delete opts.o;
|
|
2481
|
+
string: true,
|
|
2482
|
+
alias: ["p"],
|
|
2483
|
+
description: "The location of your project.yaml file"
|
|
2484
|
+
}
|
|
2485
|
+
};
|
|
2486
|
+
var path11 = {
|
|
2487
|
+
name: "path",
|
|
2488
|
+
yargs: {
|
|
2489
|
+
description: "Path"
|
|
2440
2490
|
}
|
|
2441
2491
|
};
|
|
2442
2492
|
var snapshots = {
|
|
@@ -2446,16 +2496,6 @@ var snapshots = {
|
|
|
2446
2496
|
array: true
|
|
2447
2497
|
}
|
|
2448
2498
|
};
|
|
2449
|
-
var statePath = {
|
|
2450
|
-
name: "state-path",
|
|
2451
|
-
yargs: {
|
|
2452
|
-
alias: ["s"],
|
|
2453
|
-
description: "Path to the state file"
|
|
2454
|
-
},
|
|
2455
|
-
ensure: (opts) => {
|
|
2456
|
-
delete opts.s;
|
|
2457
|
-
}
|
|
2458
|
-
};
|
|
2459
2499
|
var timeout = {
|
|
2460
2500
|
name: "timeout",
|
|
2461
2501
|
yargs: {
|
|
@@ -2472,6 +2512,44 @@ var workflow = {
|
|
|
2472
2512
|
description: "Name of the workflow to execute"
|
|
2473
2513
|
}
|
|
2474
2514
|
};
|
|
2515
|
+
|
|
2516
|
+
// src/util/get-cli-option-object.ts
|
|
2517
|
+
function getCLIOptionObject(arg) {
|
|
2518
|
+
if (isObject(arg)) {
|
|
2519
|
+
return arg;
|
|
2520
|
+
} else if (typeof arg === "string") {
|
|
2521
|
+
try {
|
|
2522
|
+
const p = JSON.parse(arg);
|
|
2523
|
+
if (isObject(p))
|
|
2524
|
+
return p;
|
|
2525
|
+
} catch (e) {
|
|
2526
|
+
}
|
|
2527
|
+
return Object.fromEntries(
|
|
2528
|
+
arg.split(",").map((pair) => {
|
|
2529
|
+
const [k, v] = pair.split("=");
|
|
2530
|
+
return [k.trim(), v.trim()];
|
|
2531
|
+
})
|
|
2532
|
+
);
|
|
2533
|
+
}
|
|
2534
|
+
}
|
|
2535
|
+
function isObject(arg) {
|
|
2536
|
+
return typeof arg === "object" && arg !== null && !Array.isArray(arg);
|
|
2537
|
+
}
|
|
2538
|
+
|
|
2539
|
+
// src/projects/options.ts
|
|
2540
|
+
var env = {
|
|
2541
|
+
name: "env",
|
|
2542
|
+
yargs: {
|
|
2543
|
+
description: "Environment name (eg staging, prod, branch)",
|
|
2544
|
+
hidden: true
|
|
2545
|
+
}
|
|
2546
|
+
};
|
|
2547
|
+
var alias = {
|
|
2548
|
+
name: "alias",
|
|
2549
|
+
yargs: {
|
|
2550
|
+
description: "Environment name (eg staging, prod, branch)"
|
|
2551
|
+
}
|
|
2552
|
+
};
|
|
2475
2553
|
var removeUnmapped = {
|
|
2476
2554
|
name: "remove-unmapped",
|
|
2477
2555
|
yargs: {
|
|
@@ -2487,6 +2565,14 @@ var workflowMappings = {
|
|
|
2487
2565
|
description: "A manual object mapping of which workflows in source and target should be matched for a merge."
|
|
2488
2566
|
}
|
|
2489
2567
|
};
|
|
2568
|
+
var outputPath = {
|
|
2569
|
+
name: "output-path",
|
|
2570
|
+
yargs: {
|
|
2571
|
+
alias: ["output"],
|
|
2572
|
+
type: "string",
|
|
2573
|
+
description: "Path to output the fetched project to"
|
|
2574
|
+
}
|
|
2575
|
+
};
|
|
2490
2576
|
var workspace = {
|
|
2491
2577
|
name: "workspace",
|
|
2492
2578
|
yargs: {
|
|
@@ -2498,129 +2584,228 @@ var workspace = {
|
|
|
2498
2584
|
if (!ws) {
|
|
2499
2585
|
opts.workspace = process.cwd();
|
|
2500
2586
|
} else {
|
|
2501
|
-
opts.workspace =
|
|
2587
|
+
opts.workspace = resolve_path_default(ws);
|
|
2502
2588
|
}
|
|
2503
2589
|
}
|
|
2504
2590
|
};
|
|
2505
2591
|
|
|
2506
2592
|
// src/projects/fetch.ts
|
|
2507
2593
|
var options = [
|
|
2508
|
-
|
|
2509
|
-
|
|
2594
|
+
alias,
|
|
2595
|
+
apiKey,
|
|
2510
2596
|
endpoint,
|
|
2511
|
-
env,
|
|
2512
2597
|
log,
|
|
2513
|
-
override(outputPath, {
|
|
2514
|
-
description: "Path to output the fetched project to"
|
|
2515
|
-
}),
|
|
2516
2598
|
logJson,
|
|
2517
|
-
workspace,
|
|
2518
2599
|
snapshots,
|
|
2519
|
-
|
|
2600
|
+
// TODO need to add support for this
|
|
2520
2601
|
override(force, {
|
|
2521
2602
|
description: "Overwrite local file contents with the fetched contents"
|
|
2522
|
-
})
|
|
2603
|
+
}),
|
|
2604
|
+
outputPath,
|
|
2605
|
+
env,
|
|
2606
|
+
workspace
|
|
2523
2607
|
];
|
|
2524
2608
|
var command = {
|
|
2525
|
-
command: "fetch [
|
|
2526
|
-
describe: `
|
|
2527
|
-
builder: (yargs) => build(options, yargs).positional("
|
|
2528
|
-
describe: "The id of the project
|
|
2529
|
-
demandOption: true
|
|
2609
|
+
command: "fetch [project]",
|
|
2610
|
+
describe: `Download the latest version of a project from a lightning server (does not expand the project, use checkout)`,
|
|
2611
|
+
builder: (yargs) => build(options, yargs).positional("project", {
|
|
2612
|
+
describe: "The id, alias or UUID of the project to fetch. If not set, will default to the active project"
|
|
2530
2613
|
}).example(
|
|
2531
2614
|
"fetch 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2532
2615
|
"Fetch an updated copy of a the above spec and state from a Lightning Instance"
|
|
2533
2616
|
),
|
|
2534
2617
|
handler: ensure("project-fetch", options)
|
|
2535
2618
|
};
|
|
2536
|
-
var
|
|
2537
|
-
|
|
2538
|
-
const
|
|
2539
|
-
|
|
2540
|
-
const
|
|
2541
|
-
const {
|
|
2542
|
-
const
|
|
2543
|
-
|
|
2544
|
-
|
|
2545
|
-
|
|
2546
|
-
endpoint: config2.endpoint,
|
|
2547
|
-
env: options6.env || "project"
|
|
2548
|
-
},
|
|
2549
|
-
workspace2.getConfig()
|
|
2619
|
+
var printProjectName = (project) => `${project.qname} (${project.id})`;
|
|
2620
|
+
var handler2 = async (options7, logger) => {
|
|
2621
|
+
const workspacePath = options7.workspace ?? process.cwd();
|
|
2622
|
+
logger.debug("Using workspace at", workspacePath);
|
|
2623
|
+
const workspace2 = new Workspace(workspacePath, logger, false);
|
|
2624
|
+
const { outputPath: outputPath2 } = options7;
|
|
2625
|
+
const localTargetProject = await resolveOutputProject(
|
|
2626
|
+
workspace2,
|
|
2627
|
+
options7,
|
|
2628
|
+
logger
|
|
2550
2629
|
);
|
|
2551
|
-
const
|
|
2552
|
-
|
|
2553
|
-
const
|
|
2554
|
-
const
|
|
2630
|
+
const remoteProject = await fetchRemoteProject(workspace2, options7, logger);
|
|
2631
|
+
ensureTargetCompatible(options7, remoteProject, localTargetProject);
|
|
2632
|
+
const outputRoot = resolve_path_default(outputPath2 || workspacePath);
|
|
2633
|
+
const projectsDir = remoteProject?.config.dirs.projects ?? ".projects";
|
|
2634
|
+
const finalOutputPath = outputPath2 ?? `${outputRoot}/${projectsDir}/${remoteProject.qname}`;
|
|
2555
2635
|
let format = void 0;
|
|
2556
2636
|
if (outputPath2) {
|
|
2557
|
-
const ext =
|
|
2637
|
+
const ext = path12.extname(outputPath2).substring(1);
|
|
2558
2638
|
if (ext.length) {
|
|
2559
2639
|
format = ext;
|
|
2560
2640
|
}
|
|
2641
|
+
if (options7.alias) {
|
|
2642
|
+
logger.warn(
|
|
2643
|
+
`WARNING: alias "${options7.alias}" was set, but will be ignored as output path was provided`
|
|
2644
|
+
);
|
|
2645
|
+
}
|
|
2561
2646
|
}
|
|
2562
|
-
|
|
2563
|
-
|
|
2564
|
-
finalOutputPath
|
|
2565
|
-
format,
|
|
2566
|
-
true
|
|
2567
|
-
// dry run - this won't trigger an actual write!
|
|
2647
|
+
await serialize(remoteProject, finalOutputPath, format);
|
|
2648
|
+
logger.success(
|
|
2649
|
+
`Fetched project file to ${finalOutputPath}.${format ?? "yaml"}`
|
|
2568
2650
|
);
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
|
|
2572
|
-
|
|
2651
|
+
return remoteProject;
|
|
2652
|
+
};
|
|
2653
|
+
async function resolveOutputProject(workspace2, options7, logger) {
|
|
2654
|
+
logger.debug("Checking for local copy of project...");
|
|
2655
|
+
if (options7.outputPath) {
|
|
2656
|
+
try {
|
|
2657
|
+
const customProject = await Project3.from("path", options7.outputPath);
|
|
2658
|
+
logger.debug(
|
|
2659
|
+
`Found existing local project ${printProjectName(customProject)} at`,
|
|
2660
|
+
options7.outputPath
|
|
2661
|
+
);
|
|
2662
|
+
return customProject;
|
|
2663
|
+
} catch (e) {
|
|
2664
|
+
logger.debug("No project found at", options7.outputPath);
|
|
2665
|
+
}
|
|
2666
|
+
}
|
|
2667
|
+
if (options7.alias) {
|
|
2668
|
+
const aliasProject = workspace2.get(options7.alias);
|
|
2669
|
+
if (aliasProject) {
|
|
2670
|
+
logger.debug(
|
|
2671
|
+
`Found local project from alias:`,
|
|
2672
|
+
printProjectName(aliasProject)
|
|
2673
|
+
);
|
|
2674
|
+
return aliasProject;
|
|
2675
|
+
} else {
|
|
2676
|
+
logger.debug(`No local project found with alias ${options7.alias}`);
|
|
2677
|
+
}
|
|
2678
|
+
}
|
|
2679
|
+
const project = workspace2.get(options7.project);
|
|
2680
|
+
if (project) {
|
|
2681
|
+
logger.debug(
|
|
2682
|
+
`Found local project from identifier:`,
|
|
2683
|
+
printProjectName(project)
|
|
2684
|
+
);
|
|
2685
|
+
return project;
|
|
2686
|
+
} else {
|
|
2687
|
+
logger.debug(
|
|
2688
|
+
`No local project found matching identifier: `,
|
|
2689
|
+
options7.project
|
|
2690
|
+
);
|
|
2573
2691
|
}
|
|
2574
|
-
|
|
2575
|
-
|
|
2692
|
+
}
|
|
2693
|
+
async function fetchRemoteProject(workspace2, options7, logger) {
|
|
2694
|
+
logger.debug(`Fetching latest project data from app`);
|
|
2695
|
+
const config2 = loadAppAuthConfig(options7, logger);
|
|
2696
|
+
let projectUUID = options7.project;
|
|
2697
|
+
const localProject = workspace2.get(options7.project);
|
|
2698
|
+
if (localProject?.openfn?.uuid && localProject.openfn.uuid !== options7.project) {
|
|
2699
|
+
projectUUID = localProject.openfn.uuid;
|
|
2700
|
+
logger.debug(
|
|
2701
|
+
`Resolved ${options7.project} to UUID ${projectUUID} from local project ${printProjectName(
|
|
2702
|
+
localProject
|
|
2703
|
+
)}}`
|
|
2704
|
+
);
|
|
2705
|
+
}
|
|
2706
|
+
const projectEndpoint = localProject?.openfn?.endpoint ?? config2.endpoint;
|
|
2707
|
+
const { data } = await fetchProject(
|
|
2708
|
+
projectEndpoint,
|
|
2709
|
+
config2.apiKey,
|
|
2710
|
+
projectUUID,
|
|
2711
|
+
logger
|
|
2712
|
+
);
|
|
2713
|
+
const project = await Project3.from(
|
|
2714
|
+
"state",
|
|
2715
|
+
data,
|
|
2716
|
+
{
|
|
2717
|
+
endpoint: projectEndpoint
|
|
2718
|
+
},
|
|
2719
|
+
{
|
|
2720
|
+
...workspace2.getConfig(),
|
|
2721
|
+
alias: options7.alias ?? localProject?.alias ?? "main"
|
|
2722
|
+
}
|
|
2723
|
+
);
|
|
2724
|
+
logger.debug(
|
|
2725
|
+
`Loaded remote project ${project.openfn.uuid} with id ${project.id} and alias ${project.alias}`
|
|
2576
2726
|
);
|
|
2577
|
-
const skipVersionCheck = options6.force || // The user forced the checkout
|
|
2578
|
-
!current || // there is no project on disk
|
|
2579
|
-
!hasAnyHistory;
|
|
2580
|
-
if (!skipVersionCheck && !project.canMergeInto(current)) {
|
|
2581
|
-
throw new Error("Error! An incompatible project exists at this location");
|
|
2582
|
-
}
|
|
2583
|
-
await serialize(project, finalOutputPath, format);
|
|
2584
|
-
logger.success(`Fetched project file to ${finalOutput}`);
|
|
2585
2727
|
return project;
|
|
2586
|
-
}
|
|
2728
|
+
}
|
|
2729
|
+
function ensureTargetCompatible(options7, remoteProject, localProject) {
|
|
2730
|
+
if (localProject) {
|
|
2731
|
+
if (!options7.force && localProject.uuid != remoteProject.uuid) {
|
|
2732
|
+
const error = new Error("PROJECT_EXISTS");
|
|
2733
|
+
error.message = "A project with a different UUID exists at this location";
|
|
2734
|
+
error.fix = `You have tried to fetch a remote project into a local project with a different UUID
|
|
2735
|
+
|
|
2736
|
+
Try adding an alias to rename the new project:
|
|
2737
|
+
|
|
2738
|
+
openfn fetch ${options7.project} --alias ${remoteProject.id}
|
|
2739
|
+
|
|
2740
|
+
To ignore this error and override the local file, pass --force (-f)
|
|
2741
|
+
|
|
2742
|
+
openfn fetch ${options7.project} --force
|
|
2743
|
+
`;
|
|
2744
|
+
error.fetched_project = {
|
|
2745
|
+
uuid: remoteProject.uuid,
|
|
2746
|
+
id: remoteProject.id,
|
|
2747
|
+
alias: remoteProject.alias
|
|
2748
|
+
};
|
|
2749
|
+
error.local_project = {
|
|
2750
|
+
uuid: localProject.uuid,
|
|
2751
|
+
id: localProject.id,
|
|
2752
|
+
alias: localProject.alias
|
|
2753
|
+
};
|
|
2754
|
+
delete error.stack;
|
|
2755
|
+
throw error;
|
|
2756
|
+
}
|
|
2757
|
+
const hasAnyHistory = remoteProject.workflows.find(
|
|
2758
|
+
(w) => w.workflow.history?.length
|
|
2759
|
+
);
|
|
2760
|
+
const skipVersionCheck = options7.force || // The user forced the checkout
|
|
2761
|
+
!hasAnyHistory;
|
|
2762
|
+
if (!skipVersionCheck && !remoteProject.canMergeInto(localProject)) {
|
|
2763
|
+
throw new Error("Error! An incompatible project exists at this location");
|
|
2764
|
+
}
|
|
2765
|
+
}
|
|
2766
|
+
}
|
|
2587
2767
|
|
|
2588
2768
|
// src/projects/checkout.ts
|
|
2589
2769
|
import Project4, { Workspace as Workspace2 } from "@openfn/project";
|
|
2590
|
-
import
|
|
2770
|
+
import path13 from "path";
|
|
2591
2771
|
import fs4 from "fs";
|
|
2592
2772
|
import { rimraf } from "rimraf";
|
|
2593
|
-
var options2 = [
|
|
2773
|
+
var options2 = [log, workspace];
|
|
2594
2774
|
var command2 = {
|
|
2595
|
-
command: "checkout <project
|
|
2775
|
+
command: "checkout <project>",
|
|
2596
2776
|
describe: "Switch to a different OpenFn project in the same workspace",
|
|
2597
2777
|
handler: ensure("project-checkout", options2),
|
|
2598
|
-
builder: (yargs) => build(options2, yargs)
|
|
2778
|
+
builder: (yargs) => build(options2, yargs).positional("project", {
|
|
2779
|
+
describe: "The id, alias or UUID of the project to chcekout",
|
|
2780
|
+
demandOption: true
|
|
2781
|
+
})
|
|
2599
2782
|
};
|
|
2600
|
-
var handler3 = async (
|
|
2601
|
-
const
|
|
2602
|
-
const workspacePath =
|
|
2783
|
+
var handler3 = async (options7, logger) => {
|
|
2784
|
+
const projectIdentifier = options7.project;
|
|
2785
|
+
const workspacePath = options7.workspace ?? process.cwd();
|
|
2603
2786
|
const workspace2 = new Workspace2(workspacePath, logger);
|
|
2604
2787
|
const { project: _, ...config2 } = workspace2.getConfig();
|
|
2605
2788
|
let switchProject;
|
|
2606
|
-
if (/\.(yaml|json)$/.test(
|
|
2607
|
-
const filePath =
|
|
2789
|
+
if (/\.(yaml|json)$/.test(projectIdentifier)) {
|
|
2790
|
+
const filePath = projectIdentifier.startsWith("/") ? projectIdentifier : path13.join(workspacePath, projectIdentifier);
|
|
2608
2791
|
logger.debug("Loading project from path ", filePath);
|
|
2609
2792
|
switchProject = await Project4.from("path", filePath, config2);
|
|
2610
2793
|
} else {
|
|
2611
|
-
switchProject = workspace2.get(
|
|
2794
|
+
switchProject = workspace2.get(projectIdentifier);
|
|
2612
2795
|
}
|
|
2613
2796
|
if (!switchProject) {
|
|
2614
|
-
throw new Error(
|
|
2797
|
+
throw new Error(
|
|
2798
|
+
`Project with id ${projectIdentifier} not found in the workspace`
|
|
2799
|
+
);
|
|
2615
2800
|
}
|
|
2616
|
-
await rimraf(
|
|
2801
|
+
await rimraf(path13.join(workspacePath, config2.workflowRoot ?? "workflows"));
|
|
2617
2802
|
const files = switchProject.serialize("fs");
|
|
2618
2803
|
for (const f in files) {
|
|
2619
2804
|
if (files[f]) {
|
|
2620
|
-
fs4.mkdirSync(
|
|
2805
|
+
fs4.mkdirSync(path13.join(workspacePath, path13.dirname(f)), {
|
|
2621
2806
|
recursive: true
|
|
2622
2807
|
});
|
|
2623
|
-
fs4.writeFileSync(
|
|
2808
|
+
fs4.writeFileSync(path13.join(workspacePath, f), files[f]);
|
|
2624
2809
|
} else {
|
|
2625
2810
|
logger.warn("WARNING! No content for file", f);
|
|
2626
2811
|
}
|
|
@@ -2629,27 +2814,48 @@ var handler3 = async (options6, logger) => {
|
|
|
2629
2814
|
};
|
|
2630
2815
|
|
|
2631
2816
|
// src/projects/pull.ts
|
|
2632
|
-
|
|
2633
|
-
|
|
2817
|
+
var options3 = [
|
|
2818
|
+
alias,
|
|
2819
|
+
env,
|
|
2820
|
+
workspace,
|
|
2821
|
+
apiKey,
|
|
2822
|
+
endpoint,
|
|
2823
|
+
log,
|
|
2824
|
+
override(path11, {
|
|
2825
|
+
description: "path to output the project to"
|
|
2826
|
+
}),
|
|
2827
|
+
logJson,
|
|
2828
|
+
projectPath,
|
|
2829
|
+
snapshots,
|
|
2830
|
+
path11,
|
|
2831
|
+
force
|
|
2832
|
+
];
|
|
2833
|
+
var command3 = {
|
|
2834
|
+
command: "pull [project]",
|
|
2835
|
+
describe: `Pull a project from a Lightning Instance and expand to the file system (ie fetch + checkout)`,
|
|
2836
|
+
builder: (yargs) => build(options3, yargs).positional("project", {
|
|
2837
|
+
describe: "The UUID, local id or local alias of the project to pull"
|
|
2838
|
+
}).example(
|
|
2839
|
+
"pull 57862287-23e6-4650-8d79-e1dd88b24b1c",
|
|
2840
|
+
"Pull project with a UUID from a lightning instance"
|
|
2841
|
+
),
|
|
2842
|
+
handler: ensure("project-pull", options3)
|
|
2843
|
+
};
|
|
2844
|
+
async function handler4(options7, logger) {
|
|
2845
|
+
await handler2(options7, logger);
|
|
2634
2846
|
logger.success(`Downloaded latest project version`);
|
|
2635
|
-
await handler3(
|
|
2636
|
-
{
|
|
2637
|
-
...options6,
|
|
2638
|
-
projectId: project.id
|
|
2639
|
-
},
|
|
2640
|
-
logger
|
|
2641
|
-
);
|
|
2847
|
+
await handler3(options7, logger);
|
|
2642
2848
|
logger.success(`Checked out project locally`);
|
|
2643
2849
|
}
|
|
2644
2850
|
var pull_default = handler4;
|
|
2645
2851
|
|
|
2646
2852
|
// src/pull/handler.ts
|
|
2647
|
-
async function pullHandler(
|
|
2648
|
-
if (
|
|
2649
|
-
return pull_default(
|
|
2853
|
+
async function pullHandler(options7, logger) {
|
|
2854
|
+
if (options7.beta) {
|
|
2855
|
+
return pull_default(options7, logger);
|
|
2650
2856
|
}
|
|
2651
2857
|
try {
|
|
2652
|
-
const config2 = mergeOverrides2(await getConfig2(
|
|
2858
|
+
const config2 = mergeOverrides2(await getConfig2(options7.configPath), options7);
|
|
2653
2859
|
if (process.env["OPENFN_API_KEY"]) {
|
|
2654
2860
|
logger.info("Using OPENFN_API_KEY environment variable");
|
|
2655
2861
|
config2.apiKey = process.env["OPENFN_API_KEY"];
|
|
@@ -2661,10 +2867,10 @@ async function pullHandler(options6, logger) {
|
|
|
2661
2867
|
logger.always(
|
|
2662
2868
|
"Downloading existing project state (as JSON) from the server."
|
|
2663
2869
|
);
|
|
2664
|
-
const { data: project } = await
|
|
2870
|
+
const { data: project } = await getProject(
|
|
2665
2871
|
config2,
|
|
2666
|
-
|
|
2667
|
-
|
|
2872
|
+
options7.projectId,
|
|
2873
|
+
options7.snapshots
|
|
2668
2874
|
);
|
|
2669
2875
|
if (!project) {
|
|
2670
2876
|
logger.error("ERROR: Project not found.");
|
|
@@ -2677,8 +2883,8 @@ async function pullHandler(options6, logger) {
|
|
|
2677
2883
|
const state = getStateFromProjectPayload(project);
|
|
2678
2884
|
logger.always("Downloading the project spec (as YAML) from the server.");
|
|
2679
2885
|
const queryParams = new URLSearchParams();
|
|
2680
|
-
queryParams.append("id",
|
|
2681
|
-
|
|
2886
|
+
queryParams.append("id", options7.projectId);
|
|
2887
|
+
options7.snapshots?.forEach(
|
|
2682
2888
|
(snapshot) => queryParams.append("snapshots[]", snapshot)
|
|
2683
2889
|
);
|
|
2684
2890
|
const url2 = new URL(
|
|
@@ -2700,7 +2906,7 @@ async function pullHandler(options6, logger) {
|
|
|
2700
2906
|
process.exitCode = 1;
|
|
2701
2907
|
process.exit(1);
|
|
2702
2908
|
}
|
|
2703
|
-
const resolvedPath =
|
|
2909
|
+
const resolvedPath = path14.resolve(config2.specPath);
|
|
2704
2910
|
logger.debug("reading spec from", resolvedPath);
|
|
2705
2911
|
const updatedSpec = await syncRemoteSpec(
|
|
2706
2912
|
await res.text(),
|
|
@@ -2709,7 +2915,7 @@ async function pullHandler(options6, logger) {
|
|
|
2709
2915
|
logger
|
|
2710
2916
|
);
|
|
2711
2917
|
await fs5.writeFile(
|
|
2712
|
-
|
|
2918
|
+
path14.resolve(config2.statePath),
|
|
2713
2919
|
JSON.stringify(state, null, 2)
|
|
2714
2920
|
);
|
|
2715
2921
|
await fs5.writeFile(resolvedPath, updatedSpec);
|
|
@@ -2727,13 +2933,13 @@ async function pullHandler(options6, logger) {
|
|
|
2727
2933
|
throw error;
|
|
2728
2934
|
}
|
|
2729
2935
|
}
|
|
2730
|
-
function mergeOverrides2(config2,
|
|
2936
|
+
function mergeOverrides2(config2, options7) {
|
|
2731
2937
|
return {
|
|
2732
2938
|
...config2,
|
|
2733
2939
|
apiKey: pickFirst2(process.env["OPENFN_API_KEY"], config2.apiKey),
|
|
2734
2940
|
endpoint: pickFirst2(process.env["OPENFN_ENDPOINT"], config2.endpoint),
|
|
2735
|
-
configPath:
|
|
2736
|
-
requireConfirmation: pickFirst2(
|
|
2941
|
+
configPath: options7.configPath,
|
|
2942
|
+
requireConfirmation: pickFirst2(options7.confirm, config2.requireConfirmation)
|
|
2737
2943
|
};
|
|
2738
2944
|
}
|
|
2739
2945
|
function pickFirst2(...args) {
|
|
@@ -2748,35 +2954,36 @@ __export(handler_exports, {
|
|
|
2748
2954
|
fetch: () => handler2,
|
|
2749
2955
|
list: () => handler5,
|
|
2750
2956
|
merge: () => handler7,
|
|
2957
|
+
pull: () => handler4,
|
|
2751
2958
|
version: () => handler6
|
|
2752
2959
|
});
|
|
2753
2960
|
|
|
2754
2961
|
// src/projects/list.ts
|
|
2755
2962
|
import { Workspace as Workspace3 } from "@openfn/project";
|
|
2756
|
-
var
|
|
2757
|
-
var
|
|
2963
|
+
var options4 = [log, workspace];
|
|
2964
|
+
var command4 = {
|
|
2758
2965
|
command: "list [project-path]",
|
|
2759
2966
|
describe: "List all the openfn projects available in the current directory",
|
|
2760
2967
|
aliases: ["project", "$0"],
|
|
2761
|
-
handler: ensure("project-list",
|
|
2762
|
-
builder: (yargs) => build(
|
|
2968
|
+
handler: ensure("project-list", options4),
|
|
2969
|
+
builder: (yargs) => build(options4, yargs)
|
|
2763
2970
|
};
|
|
2764
|
-
var handler5 = async (
|
|
2971
|
+
var handler5 = async (options7, logger) => {
|
|
2765
2972
|
logger.info("Searching for projects in workspace at:");
|
|
2766
|
-
logger.info(" ",
|
|
2973
|
+
logger.info(" ", options7.workspace);
|
|
2767
2974
|
logger.break();
|
|
2768
|
-
const workspace2 = new Workspace3(
|
|
2975
|
+
const workspace2 = new Workspace3(options7.workspace);
|
|
2769
2976
|
if (!workspace2.valid) {
|
|
2770
2977
|
throw new Error("No OpenFn projects found");
|
|
2771
2978
|
}
|
|
2772
2979
|
logger.always(`Available openfn projects
|
|
2773
2980
|
|
|
2774
|
-
${workspace2.list().map((p) => describeProject(p, p
|
|
2981
|
+
${workspace2.list().map((p) => describeProject(p, p === workspace2.getActiveProject())).join("\n\n")}
|
|
2775
2982
|
`);
|
|
2776
2983
|
};
|
|
2777
2984
|
function describeProject(project, active = false) {
|
|
2778
2985
|
const uuid = project.openfn?.uuid;
|
|
2779
|
-
return `${project.id} ${active ? "(active)" : ""}
|
|
2986
|
+
return `${project.alias || "(no alias)"} | ${project.id} ${active ? "(active)" : ""}
|
|
2780
2987
|
${uuid || "<project-id>"}
|
|
2781
2988
|
workflows:
|
|
2782
2989
|
${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
@@ -2784,25 +2991,25 @@ ${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
|
2784
2991
|
|
|
2785
2992
|
// src/projects/version.ts
|
|
2786
2993
|
import { Workspace as Workspace4 } from "@openfn/project";
|
|
2787
|
-
var
|
|
2788
|
-
var
|
|
2994
|
+
var options5 = [workflow, workspace, workflowMappings];
|
|
2995
|
+
var command5 = {
|
|
2789
2996
|
command: "version [workflow]",
|
|
2790
2997
|
describe: "Returns the version hash of a given workflow in a workspace",
|
|
2791
|
-
handler: ensure("project-version",
|
|
2792
|
-
builder: (yargs) => build(
|
|
2998
|
+
handler: ensure("project-version", options5),
|
|
2999
|
+
builder: (yargs) => build(options5, yargs)
|
|
2793
3000
|
};
|
|
2794
|
-
var handler6 = async (
|
|
2795
|
-
const workspace2 = new Workspace4(
|
|
3001
|
+
var handler6 = async (options7, logger) => {
|
|
3002
|
+
const workspace2 = new Workspace4(options7.workspace);
|
|
2796
3003
|
if (!workspace2.valid) {
|
|
2797
3004
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2798
3005
|
return;
|
|
2799
3006
|
}
|
|
2800
3007
|
const output = /* @__PURE__ */ new Map();
|
|
2801
3008
|
const activeProject = workspace2.getActiveProject();
|
|
2802
|
-
if (
|
|
2803
|
-
const workflow2 = activeProject?.getWorkflow(
|
|
3009
|
+
if (options7.workflow) {
|
|
3010
|
+
const workflow2 = activeProject?.getWorkflow(options7.workflow);
|
|
2804
3011
|
if (!workflow2) {
|
|
2805
|
-
logger.error(`No workflow found with id ${
|
|
3012
|
+
logger.error(`No workflow found with id ${options7.workflow}`);
|
|
2806
3013
|
return;
|
|
2807
3014
|
}
|
|
2808
3015
|
output.set(workflow2.name || workflow2.id, workflow2.getVersionHash());
|
|
@@ -2816,7 +3023,7 @@ var handler6 = async (options6, logger) => {
|
|
|
2816
3023
|
return;
|
|
2817
3024
|
}
|
|
2818
3025
|
let final;
|
|
2819
|
-
if (
|
|
3026
|
+
if (options7.json) {
|
|
2820
3027
|
final = JSON.stringify(Object.fromEntries(output), void 0, 2);
|
|
2821
3028
|
} else {
|
|
2822
3029
|
final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
|
|
@@ -2828,16 +3035,16 @@ ${final}`);
|
|
|
2828
3035
|
|
|
2829
3036
|
// src/projects/merge.ts
|
|
2830
3037
|
import Project6, { Workspace as Workspace5 } from "@openfn/project";
|
|
2831
|
-
import
|
|
3038
|
+
import path15 from "node:path";
|
|
2832
3039
|
import fs6 from "node:fs/promises";
|
|
2833
|
-
var
|
|
2834
|
-
projectId,
|
|
3040
|
+
var options6 = [
|
|
2835
3041
|
removeUnmapped,
|
|
2836
3042
|
workflowMappings,
|
|
2837
|
-
log,
|
|
2838
3043
|
workspace,
|
|
3044
|
+
log,
|
|
2839
3045
|
// custom output because we don't want defaults or anything
|
|
2840
3046
|
{
|
|
3047
|
+
// TODO presumably if we do this we don't also checkout?
|
|
2841
3048
|
name: "output-path",
|
|
2842
3049
|
yargs: {
|
|
2843
3050
|
alias: "o",
|
|
@@ -2855,22 +3062,22 @@ var options5 = [
|
|
|
2855
3062
|
description: "Force a merge even when workflows are incompatible"
|
|
2856
3063
|
})
|
|
2857
3064
|
];
|
|
2858
|
-
var
|
|
2859
|
-
command: "merge <project
|
|
2860
|
-
describe: "Merges the specified project into the currently checked out project",
|
|
2861
|
-
handler: ensure("project-merge",
|
|
2862
|
-
builder: (yargs) => build(
|
|
2863
|
-
};
|
|
2864
|
-
var handler7 = async (
|
|
2865
|
-
const
|
|
2866
|
-
const workspace2 = new Workspace5(
|
|
3065
|
+
var command6 = {
|
|
3066
|
+
command: "merge <project>",
|
|
3067
|
+
describe: "Merges the specified project (by UUID, id or alias) into the currently checked out project",
|
|
3068
|
+
handler: ensure("project-merge", options6),
|
|
3069
|
+
builder: (yargs) => build(options6, yargs)
|
|
3070
|
+
};
|
|
3071
|
+
var handler7 = async (options7, logger) => {
|
|
3072
|
+
const workspacePath = options7.workspace;
|
|
3073
|
+
const workspace2 = new Workspace5(workspacePath);
|
|
2867
3074
|
if (!workspace2.valid) {
|
|
2868
3075
|
logger.error("Command was run in an invalid openfn workspace");
|
|
2869
3076
|
return;
|
|
2870
3077
|
}
|
|
2871
3078
|
let targetProject;
|
|
2872
|
-
if (
|
|
2873
|
-
const basePath =
|
|
3079
|
+
if (options7.base) {
|
|
3080
|
+
const basePath = path15.resolve(options7.base);
|
|
2874
3081
|
logger.debug("Loading target project from path", basePath);
|
|
2875
3082
|
targetProject = await Project6.from("path", basePath);
|
|
2876
3083
|
} else {
|
|
@@ -2881,17 +3088,22 @@ var handler7 = async (options6, logger) => {
|
|
|
2881
3088
|
}
|
|
2882
3089
|
logger.debug(`Loading target project from workspace (${targetProject.id})`);
|
|
2883
3090
|
}
|
|
3091
|
+
const sourceProjectIdentifier = options7.project;
|
|
2884
3092
|
let sourceProject;
|
|
2885
|
-
if (/\.(
|
|
2886
|
-
const filePath =
|
|
3093
|
+
if (/\.(ya?ml|json)$/.test(sourceProjectIdentifier)) {
|
|
3094
|
+
const filePath = path15.join(workspacePath, sourceProjectIdentifier);
|
|
2887
3095
|
logger.debug("Loading source project from path ", filePath);
|
|
2888
3096
|
sourceProject = await Project6.from("path", filePath);
|
|
2889
3097
|
} else {
|
|
2890
|
-
logger.debug(
|
|
2891
|
-
|
|
3098
|
+
logger.debug(
|
|
3099
|
+
`Loading source project from workspace ${sourceProjectIdentifier}`
|
|
3100
|
+
);
|
|
3101
|
+
sourceProject = workspace2.get(sourceProjectIdentifier);
|
|
2892
3102
|
}
|
|
2893
3103
|
if (!sourceProject) {
|
|
2894
|
-
logger.error(
|
|
3104
|
+
logger.error(
|
|
3105
|
+
`Project "${sourceProjectIdentifier}" not found in the workspace`
|
|
3106
|
+
);
|
|
2895
3107
|
return;
|
|
2896
3108
|
}
|
|
2897
3109
|
if (targetProject.id === sourceProject.id) {
|
|
@@ -2902,20 +3114,20 @@ var handler7 = async (options6, logger) => {
|
|
|
2902
3114
|
logger.error("The checked out project has no id");
|
|
2903
3115
|
return;
|
|
2904
3116
|
}
|
|
2905
|
-
const finalPath =
|
|
3117
|
+
const finalPath = options7.outputPath ?? workspace2.getProjectPath(targetProject.id);
|
|
2906
3118
|
if (!finalPath) {
|
|
2907
3119
|
logger.error("Path to checked out project not found.");
|
|
2908
3120
|
return;
|
|
2909
3121
|
}
|
|
2910
3122
|
const final = Project6.merge(sourceProject, targetProject, {
|
|
2911
|
-
removeUnmapped:
|
|
2912
|
-
workflowMappings:
|
|
2913
|
-
force:
|
|
3123
|
+
removeUnmapped: options7.removeUnmapped,
|
|
3124
|
+
workflowMappings: options7.workflowMappings,
|
|
3125
|
+
force: options7.force
|
|
2914
3126
|
});
|
|
2915
3127
|
let outputFormat = workspace2.config.formats.project;
|
|
2916
|
-
if (
|
|
3128
|
+
if (options7.outputPath?.endsWith(".json")) {
|
|
2917
3129
|
outputFormat = "json";
|
|
2918
|
-
} else if (
|
|
3130
|
+
} else if (options7.outputPath?.endsWith(".yaml")) {
|
|
2919
3131
|
outputFormat = "yaml";
|
|
2920
3132
|
}
|
|
2921
3133
|
let finalState = final.serialize("state", {
|
|
@@ -2929,10 +3141,9 @@ var handler7 = async (options6, logger) => {
|
|
|
2929
3141
|
logger.info("Checking out merged project to filesystem");
|
|
2930
3142
|
await handler3(
|
|
2931
3143
|
{
|
|
2932
|
-
|
|
2933
|
-
|
|
2934
|
-
|
|
2935
|
-
log: options6.log
|
|
3144
|
+
workspace: workspacePath,
|
|
3145
|
+
project: options7.outputPath ? finalPath : final.id,
|
|
3146
|
+
log: options7.log
|
|
2936
3147
|
},
|
|
2937
3148
|
logger
|
|
2938
3149
|
);
|
|
@@ -2943,7 +3154,7 @@ var handler7 = async (options6, logger) => {
|
|
|
2943
3154
|
|
|
2944
3155
|
// src/util/print-versions.ts
|
|
2945
3156
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
2946
|
-
import
|
|
3157
|
+
import path16 from "node:path";
|
|
2947
3158
|
import url from "node:url";
|
|
2948
3159
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
2949
3160
|
import { mainSymbols } from "figures";
|
|
@@ -2955,15 +3166,15 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
2955
3166
|
var loadVersionFromPath = (adaptorPath) => {
|
|
2956
3167
|
try {
|
|
2957
3168
|
const pkg = JSON.parse(
|
|
2958
|
-
readFileSync2(
|
|
3169
|
+
readFileSync2(path16.resolve(adaptorPath, "package.json"), "utf8")
|
|
2959
3170
|
);
|
|
2960
3171
|
return pkg.version;
|
|
2961
3172
|
} catch (e) {
|
|
2962
3173
|
return "unknown";
|
|
2963
3174
|
}
|
|
2964
3175
|
};
|
|
2965
|
-
var printVersions = async (logger,
|
|
2966
|
-
const { adaptors, logJson: logJson2 } =
|
|
3176
|
+
var printVersions = async (logger, options7 = {}, includeComponents = false) => {
|
|
3177
|
+
const { adaptors, logJson: logJson2 } = options7;
|
|
2967
3178
|
let longestAdaptorName = "";
|
|
2968
3179
|
const adaptorList = [];
|
|
2969
3180
|
adaptors?.forEach((adaptor) => {
|
|
@@ -2973,7 +3184,7 @@ var printVersions = async (logger, options6 = {}, includeComponents = false) =>
|
|
|
2973
3184
|
const [namePart, pathPart] = adaptor.split("=");
|
|
2974
3185
|
adaptorVersion = loadVersionFromPath(pathPart);
|
|
2975
3186
|
adaptorName = getNameAndVersion7(namePart).name;
|
|
2976
|
-
} else if (
|
|
3187
|
+
} else if (options7.monorepoPath) {
|
|
2977
3188
|
adaptorName = getNameAndVersion7(adaptor).name;
|
|
2978
3189
|
adaptorVersion = "monorepo";
|
|
2979
3190
|
} else {
|
|
@@ -2990,7 +3201,7 @@ var printVersions = async (logger, options6 = {}, includeComponents = false) =>
|
|
|
2990
3201
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
2991
3202
|
);
|
|
2992
3203
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
2993
|
-
const dirname3 =
|
|
3204
|
+
const dirname3 = path16.dirname(url.fileURLToPath(import.meta.url));
|
|
2994
3205
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
2995
3206
|
const { version, dependencies } = pkg;
|
|
2996
3207
|
const compilerVersion = dependencies["@openfn/compiler"];
|
|
@@ -3071,6 +3282,7 @@ var handlers = {
|
|
|
3071
3282
|
["repo-install"]: install,
|
|
3072
3283
|
["repo-pwd"]: pwd,
|
|
3073
3284
|
["repo-list"]: list,
|
|
3285
|
+
["project-pull"]: handler4,
|
|
3074
3286
|
["project-list"]: handler5,
|
|
3075
3287
|
["project-version"]: handler6,
|
|
3076
3288
|
["project-merge"]: handler7,
|
|
@@ -3078,13 +3290,13 @@ var handlers = {
|
|
|
3078
3290
|
["project-fetch"]: handler2,
|
|
3079
3291
|
version: async (opts, logger) => print_versions_default(logger, opts, true)
|
|
3080
3292
|
};
|
|
3081
|
-
var parse = async (
|
|
3082
|
-
const logger = log2 || logger_default(CLI,
|
|
3083
|
-
if (
|
|
3084
|
-
await print_versions_default(logger,
|
|
3293
|
+
var parse = async (options7, log2) => {
|
|
3294
|
+
const logger = log2 || logger_default(CLI, options7);
|
|
3295
|
+
if (options7.command === "execute" || options7.command === "test") {
|
|
3296
|
+
await print_versions_default(logger, options7);
|
|
3085
3297
|
}
|
|
3086
3298
|
report(logger);
|
|
3087
|
-
const { monorepoPath } =
|
|
3299
|
+
const { monorepoPath } = options7;
|
|
3088
3300
|
if (monorepoPath) {
|
|
3089
3301
|
if (monorepoPath === "ERR") {
|
|
3090
3302
|
logger.error(
|
|
@@ -3095,19 +3307,19 @@ var parse = async (options6, log2) => {
|
|
|
3095
3307
|
}
|
|
3096
3308
|
await validateMonoRepo(monorepoPath, logger);
|
|
3097
3309
|
logger.success(`Loading adaptors from monorepo at ${monorepoPath}`);
|
|
3098
|
-
|
|
3310
|
+
options7.adaptors = map_adaptors_to_monorepo_default(
|
|
3099
3311
|
monorepoPath,
|
|
3100
|
-
|
|
3312
|
+
options7.adaptors,
|
|
3101
3313
|
logger
|
|
3102
3314
|
);
|
|
3103
3315
|
}
|
|
3104
|
-
const handler8 = handlers[
|
|
3316
|
+
const handler8 = handlers[options7.command];
|
|
3105
3317
|
if (!handler8) {
|
|
3106
|
-
logger.error(`Unrecognised command: ${
|
|
3318
|
+
logger.error(`Unrecognised command: ${options7.command}`);
|
|
3107
3319
|
process.exit(1);
|
|
3108
3320
|
}
|
|
3109
3321
|
try {
|
|
3110
|
-
return await handler8(
|
|
3322
|
+
return await handler8(options7, logger);
|
|
3111
3323
|
} catch (e) {
|
|
3112
3324
|
if (!process.exitCode) {
|
|
3113
3325
|
process.exitCode = e.exitCode || 1;
|