@openfn/cli 1.22.0 → 1.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,13 +21,13 @@ var urlMap = {
21
21
  ["local"]: LOCAL_URL
22
22
  };
23
23
  var DEFAULT_ENV = "staging";
24
- var getURL = (options7) => {
25
- if (options7.apolloUrl) {
26
- if (options7.apolloUrl in urlMap) {
27
- return urlMap[options7.apolloUrl];
24
+ var getURL = (options8) => {
25
+ if (options8.apolloUrl) {
26
+ if (options8.apolloUrl in urlMap) {
27
+ return urlMap[options8.apolloUrl];
28
28
  }
29
- if (options7.apolloUrl.startsWith("http")) {
30
- return options7.apolloUrl;
29
+ if (options8.apolloUrl.startsWith("http")) {
30
+ return options8.apolloUrl;
31
31
  }
32
32
  throw new Error(`Unrecognised apollo URL`);
33
33
  }
@@ -52,14 +52,14 @@ var outputFiles = (files, logger) => {
52
52
  };
53
53
 
54
54
  // src/apollo/handler.ts
55
- var apolloHandler = async (options7, logger) => {
56
- logger.always(`Calling Apollo service: ${options7.service}`);
57
- const json = await loadPayload(logger, options7.payload);
58
- const url2 = getURL(options7);
55
+ var apolloHandler = async (options8, logger) => {
56
+ logger.always(`Calling Apollo service: ${options8.service}`);
57
+ const json = await loadPayload(logger, options8.payload);
58
+ const url2 = getURL(options8);
59
59
  logger.success(`Using apollo server at`, url2);
60
- const result = await callApollo(url2, options7.service, json, logger);
60
+ const result = await callApollo(url2, options8.service, json, logger);
61
61
  if (result) {
62
- await serializeOutput(options7, result, logger);
62
+ await serializeOutput(options8, result, logger);
63
63
  } else {
64
64
  logger.warn("No output returned from Apollo");
65
65
  }
@@ -79,15 +79,15 @@ var write = async (basePath, filePath, content, logger) => {
79
79
  await writeFile(dest, content);
80
80
  logger.success(`Wrote content to ${dest}`);
81
81
  };
82
- var serializeOutput = async (options7, result, logger) => {
83
- if (options7.outputPath) {
84
- if (result.files && !options7.outputPath.endsWith(".json")) {
82
+ var serializeOutput = async (options8, result, logger) => {
83
+ if (options8.outputPath) {
84
+ if (result.files && !options8.outputPath.endsWith(".json")) {
85
85
  for (const p in result.files) {
86
- await write(options7.outputPath, p, result.files[p], logger);
86
+ await write(options8.outputPath, p, result.files[p], logger);
87
87
  }
88
88
  } else {
89
89
  await write(
90
- options7.outputPath,
90
+ options8.outputPath,
91
91
  "",
92
92
  JSON.stringify(result, null, 2),
93
93
  logger
@@ -169,17 +169,17 @@ var namespaces = {
169
169
  [COMPILER]: "CMP",
170
170
  [JOB]: "JOB"
171
171
  };
172
- var createLogger2 = (name = "", options7) => {
173
- const logOptions = options7.log || {};
172
+ var createLogger2 = (name = "", options8) => {
173
+ const logOptions = options8.log || {};
174
174
  let json = false;
175
175
  let level = logOptions[name] || logOptions.default || "default";
176
- if (options7.logJson) {
176
+ if (options8.logJson) {
177
177
  json = true;
178
178
  }
179
179
  return actualCreateLogger(namespaces[name] || name, {
180
180
  level,
181
181
  json,
182
- sanitize: options7.sanitize || "none",
182
+ sanitize: options8.sanitize || "none",
183
183
  ...logOptions
184
184
  });
185
185
  };
@@ -190,8 +190,8 @@ var createNullLogger = () => createLogger2(void 0, { log: { default: "none" } })
190
190
  import fs from "node:fs";
191
191
  import path2 from "node:path";
192
192
  import { rmdir } from "node:fs/promises";
193
- var getCachePath = async (plan, options7, stepId) => {
194
- const { baseDir } = options7;
193
+ var getCachePath = async (plan, options8, stepId) => {
194
+ const { baseDir } = options8;
195
195
  const { name } = plan.workflow;
196
196
  const basePath = `${baseDir}/.cli-cache/${name}`;
197
197
  if (stepId) {
@@ -199,10 +199,10 @@ var getCachePath = async (plan, options7, stepId) => {
199
199
  }
200
200
  return path2.resolve(basePath);
201
201
  };
202
- var ensureGitIgnore = (options7) => {
203
- if (!options7._hasGitIgnore) {
202
+ var ensureGitIgnore = (options8) => {
203
+ if (!options8._hasGitIgnore) {
204
204
  const ignorePath = path2.resolve(
205
- options7.baseDir,
205
+ options8.baseDir,
206
206
  ".cli-cache",
207
207
  ".gitignore"
208
208
  );
@@ -212,19 +212,19 @@ var ensureGitIgnore = (options7) => {
212
212
  fs.writeFileSync(ignorePath, "*");
213
213
  }
214
214
  }
215
- options7._hasGitIgnore = true;
215
+ options8._hasGitIgnore = true;
216
216
  };
217
- var saveToCache = async (plan, stepId, output, options7, logger) => {
218
- if (options7.cacheSteps) {
219
- const cachePath = await getCachePath(plan, options7, stepId);
217
+ var saveToCache = async (plan, stepId, output, options8, logger) => {
218
+ if (options8.cacheSteps) {
219
+ const cachePath = await getCachePath(plan, options8, stepId);
220
220
  fs.mkdirSync(path2.dirname(cachePath), { recursive: true });
221
- ensureGitIgnore(options7);
221
+ ensureGitIgnore(options8);
222
222
  logger.info(`Writing ${stepId} output to ${cachePath}`);
223
223
  fs.writeFileSync(cachePath, JSON.stringify(output));
224
224
  }
225
225
  };
226
- var clearCache = async (plan, options7, logger) => {
227
- const cacheDir = await getCachePath(plan, options7);
226
+ var clearCache = async (plan, options8, logger) => {
227
+ const cacheDir = await getCachePath(plan, options8);
228
228
  try {
229
229
  await rmdir(cacheDir, { recursive: true });
230
230
  logger.info(`Cleared cache at ${cacheDir}`);
@@ -294,7 +294,7 @@ function parseAdaptors(plan) {
294
294
  // src/execute/serialize-output.ts
295
295
  import { mkdir as mkdir2, writeFile as writeFile2 } from "node:fs/promises";
296
296
  import { dirname } from "node:path";
297
- var serializeOutput2 = async (options7, result, logger) => {
297
+ var serializeOutput2 = async (options8, result, logger) => {
298
298
  let output = result;
299
299
  if (output && (output.configuration || output.data)) {
300
300
  const { configuration, ...rest } = result;
@@ -305,14 +305,14 @@ var serializeOutput2 = async (options7, result, logger) => {
305
305
  } else {
306
306
  output = JSON.stringify(output, void 0, 2);
307
307
  }
308
- if (options7.outputStdout) {
308
+ if (options8.outputStdout) {
309
309
  logger.success(`Result: `);
310
310
  logger.always(output);
311
- } else if (options7.outputPath) {
312
- await mkdir2(dirname(options7.outputPath), { recursive: true });
313
- logger.debug(`Writing output to ${options7.outputPath}`);
314
- await writeFile2(options7.outputPath, output);
315
- logger.success(`State written to ${options7.outputPath}`);
311
+ } else if (options8.outputPath) {
312
+ await mkdir2(dirname(options8.outputPath), { recursive: true });
313
+ logger.debug(`Writing output to ${options8.outputPath}`);
314
+ await writeFile2(options8.outputPath, output);
315
+ logger.success(`State written to ${options8.outputPath}`);
316
316
  }
317
317
  return output;
318
318
  };
@@ -332,20 +332,34 @@ var getAutoinstallTargets = (plan) => {
332
332
  var get_autoinstall_targets_default = getAutoinstallTargets;
333
333
 
334
334
  // src/execute/apply-credential-map.ts
335
+ var CREDENTIALS_KEY = "$CREDENTIALS$";
335
336
  var applyCredentialMap = (plan, map = {}, logger) => {
336
337
  const stepsWithCredentialIds = plan.workflow.steps.filter(
337
- (step) => typeof step.configuration === "string" && !step.configuration.endsWith(".json")
338
+ (step) => typeof step.configuration === "string" && !step.configuration.endsWith(".json") || step.configuration?.[CREDENTIALS_KEY]
338
339
  );
339
340
  const unmapped = {};
340
341
  for (const step of stepsWithCredentialIds) {
341
- if (map[step.configuration]) {
342
- logger?.debug(
343
- `Applying credential ${step.configuration} to "${step.name ?? step.id}"`
344
- );
345
- step.configuration = map[step.configuration];
342
+ if (typeof step.configuration === "string") {
343
+ const configId = step.configuration;
344
+ if (configId in map) {
345
+ step.configuration = map[configId];
346
+ } else {
347
+ unmapped[configId] = true;
348
+ delete step.configuration;
349
+ }
346
350
  } else {
347
- unmapped[step.configuration] = true;
348
- delete step.configuration;
351
+ const configId = step.configuration[CREDENTIALS_KEY];
352
+ delete step.configuration[CREDENTIALS_KEY];
353
+ if (configId in map) {
354
+ Object.assign(step.configuration, map[configId]);
355
+ } else {
356
+ unmapped[configId] = true;
357
+ }
358
+ if (!(configId in unmapped)) {
359
+ logger?.debug(
360
+ `Applied credential ${configId} to "${step.name ?? step.id}"`
361
+ );
362
+ }
349
363
  }
350
364
  }
351
365
  if (Object.keys(unmapped).length) {
@@ -409,16 +423,16 @@ var removePackage = async (packageSpecifier, repoDir, logger) => {
409
423
  logger.warn(`Failed to remove ${aliasedName}: ${error.message}`);
410
424
  }
411
425
  };
412
- var clean = async (options7, logger) => {
413
- if (options7.repoDir) {
426
+ var clean = async (options8, logger) => {
427
+ if (options8.repoDir) {
414
428
  const doIt = await logger.confirm(
415
- `This will remove everything at ${options7.repoDir}. Do you wish to proceed?`,
416
- options7.force
429
+ `This will remove everything at ${options8.repoDir}. Do you wish to proceed?`,
430
+ options8.force
417
431
  );
418
432
  if (doIt) {
419
433
  return new Promise((resolve) => {
420
- logger.info(`Cleaning repo at ${options7.repoDir} `);
421
- exec(`npm exec rimraf ${options7.repoDir}`, () => {
434
+ logger.info(`Cleaning repo at ${options8.repoDir} `);
435
+ exec(`npm exec rimraf ${options8.repoDir}`, () => {
422
436
  logger.success("Repo cleaned");
423
437
  resolve();
424
438
  });
@@ -429,12 +443,12 @@ var clean = async (options7, logger) => {
429
443
  logger.error("No repoDir path detected");
430
444
  }
431
445
  };
432
- var pwd = async (options7, logger) => {
446
+ var pwd = async (options8, logger) => {
433
447
  logger.info(`OPENFN_REPO_DIR is set to ${process.env.OPENFN_REPO_DIR}`);
434
- logger.success(`Repo working directory is: ${options7.repoDir}`);
448
+ logger.success(`Repo working directory is: ${options8.repoDir}`);
435
449
  };
436
- var getDependencyList = async (options7, _logger) => {
437
- const pkg = await loadRepoPkg(options7.repoDir);
450
+ var getDependencyList = async (options8, _logger) => {
451
+ const pkg = await loadRepoPkg(options8.repoDir);
438
452
  const result = {};
439
453
  if (pkg) {
440
454
  Object.keys(pkg.dependencies).forEach((key) => {
@@ -447,9 +461,9 @@ var getDependencyList = async (options7, _logger) => {
447
461
  }
448
462
  return result;
449
463
  };
450
- var list = async (options7, logger) => {
451
- const tree = await getDependencyList(options7, logger);
452
- await pwd(options7, logger);
464
+ var list = async (options8, logger) => {
465
+ const tree = await getDependencyList(options8, logger);
466
+ await pwd(options8, logger);
453
467
  const output = {};
454
468
  Object.keys(tree).forEach((key) => {
455
469
  const versions = tree[key];
@@ -581,7 +595,7 @@ var resolveSpecifierPath = async (pattern, repoDir, log2) => {
581
595
  return null;
582
596
  };
583
597
  var loadTransformOptions = async (opts, log2) => {
584
- const options7 = {
598
+ const options8 = {
585
599
  logger: log2 || logger_default(COMPILER, opts),
586
600
  trace: opts.trace
587
601
  };
@@ -609,12 +623,12 @@ var loadTransformOptions = async (opts, log2) => {
609
623
  exportAll: true
610
624
  });
611
625
  }
612
- options7["add-imports"] = {
626
+ options8["add-imports"] = {
613
627
  ignore: opts.ignoreImports,
614
628
  adaptors: adaptorsConfig
615
629
  };
616
630
  }
617
- return options7;
631
+ return options8;
618
632
  };
619
633
 
620
634
  // src/util/load-state.ts
@@ -707,20 +721,19 @@ var load_state_default = async (plan, opts, log2, start) => {
707
721
  };
708
722
 
709
723
  // src/util/validate-adaptors.ts
710
- var validateAdaptors = async (options7, logger) => {
711
- if (options7.skipAdaptorValidation) {
724
+ var validateAdaptors = async (options8, logger) => {
725
+ if (options8.skipAdaptorValidation) {
712
726
  return;
713
727
  }
714
- const isPlan = options7.planPath || options7.workflowPath || options7.workflow;
715
- const hasDeclaredAdaptors = options7.adaptors && options7.adaptors.length > 0;
716
- if (isPlan && hasDeclaredAdaptors) {
728
+ const hasDeclaredAdaptors = options8.adaptors && options8.adaptors.length > 0;
729
+ if (!options8.expressionPath && hasDeclaredAdaptors) {
717
730
  logger.error("ERROR: adaptor and workflow provided");
718
731
  logger.error(
719
732
  "This is probably not what you meant to do. A workflow should declare an adaptor for each job."
720
733
  );
721
734
  throw new Error("adaptor and workflow provided");
722
735
  }
723
- if (!isPlan && !hasDeclaredAdaptors) {
736
+ if (options8.expressionPath && !hasDeclaredAdaptors) {
724
737
  logger.warn("WARNING: No adaptor provided!");
725
738
  logger.warn(
726
739
  "This job will probably fail. Pass an adaptor with the -a flag, eg:"
@@ -736,7 +749,7 @@ var validate_adaptors_default = validateAdaptors;
736
749
  import fs3 from "node:fs/promises";
737
750
  import path4, { dirname as dirname2 } from "node:path";
738
751
  import { isPath } from "@openfn/compiler";
739
- import Project, { yamlToJson } from "@openfn/project";
752
+ import { Workspace, yamlToJson } from "@openfn/project";
740
753
 
741
754
  // src/util/expand-adaptors.ts
742
755
  var expand = (name) => {
@@ -822,48 +835,61 @@ var resolve_path_default = (path17, root) => {
822
835
  };
823
836
 
824
837
  // src/util/load-plan.ts
825
- var loadPlan = async (options7, logger) => {
826
- const { workflowPath, planPath, expressionPath } = options7;
827
- if (options7.path && /ya?ml$/.test(options7.path)) {
828
- const content = await fs3.readFile(path4.resolve(options7.path), "utf-8");
829
- const workflow2 = yamlToJson(content);
830
- options7.baseDir = dirname2(options7.path);
831
- return loadXPlan({ workflow: workflow2 }, options7, logger);
832
- }
833
- if (options7.path && options7.workflow) {
834
- options7.baseDir = options7.path;
835
- return fromProject(options7.path, options7.workflow, options7, logger);
838
+ var loadPlan = async (options8, logger) => {
839
+ const { workflowPath, planPath, expressionPath, workflowName } = options8;
840
+ let workflowObj;
841
+ if (workflowName || options8.workflow) {
842
+ logger.debug(
843
+ "Loading workflow from active project in workspace at ",
844
+ options8.workspace
845
+ );
846
+ const workspace2 = new Workspace(options8.workspace);
847
+ const proj = await workspace2.getCheckedOutProject();
848
+ const name = workflowName || options8.workflow;
849
+ const workflow2 = proj?.getWorkflow(name);
850
+ if (!workflow2) {
851
+ const e = new Error(`Could not find Workflow "${name}"`);
852
+ delete e.stack;
853
+ throw e;
854
+ }
855
+ workflowObj = {
856
+ workflow: workflow2.toJSON()
857
+ };
858
+ options8.credentials ??= workspace2.getConfig().credentials;
859
+ options8.collectionsEndpoint ??= proj.openfn?.endpoint;
836
860
  }
837
- if (!expressionPath && !workflowPath && !/\.(js|json|yaml)+$/.test(options7.path || "") && !options7.workflow) {
838
- const workflow2 = options7.path;
839
- return fromProject(path4.resolve("."), workflow2, options7, logger);
861
+ if (options8.path && /ya?ml$/.test(options8.path)) {
862
+ const content = await fs3.readFile(path4.resolve(options8.path), "utf-8");
863
+ options8.baseDir = dirname2(options8.path);
864
+ workflowObj = yamlToJson(content);
865
+ const { options: o, ...rest } = workflowObj;
866
+ if (!workflowObj.workflow && workflowObj.options) {
867
+ workflowObj = { workflow: rest, options: o };
868
+ }
840
869
  }
841
- if (expressionPath) {
842
- return loadExpression(options7, logger);
870
+ if (!workflowObj && expressionPath) {
871
+ return loadExpression(options8, logger);
843
872
  }
844
873
  const jsonPath = planPath || workflowPath;
845
- if (!options7.baseDir) {
846
- options7.baseDir = path4.dirname(jsonPath);
847
- }
848
- const json = await loadJson(jsonPath, logger);
849
- const defaultName = path4.parse(jsonPath).name;
850
- if (json.workflow) {
851
- return loadXPlan(json, options7, logger, defaultName);
874
+ if (jsonPath && !options8.baseDir) {
875
+ options8.baseDir = path4.dirname(jsonPath);
876
+ }
877
+ workflowObj = workflowObj ?? await loadJson(jsonPath, logger);
878
+ const defaultName = workflowObj.name || path4.parse(jsonPath ?? "").name;
879
+ if (workflowObj.jobs) {
880
+ return loadOldWorkflow(workflowObj, options8, logger, defaultName);
881
+ } else if (workflowObj.workflow) {
882
+ return loadXPlan(
883
+ workflowObj,
884
+ Object.assign({}, workflowObj.options, options8),
885
+ logger,
886
+ defaultName
887
+ );
852
888
  } else {
853
- return loadOldWorkflow(json, options7, logger, defaultName);
889
+ return loadXPlan({ workflow: workflowObj }, options8, logger, defaultName);
854
890
  }
855
891
  };
856
892
  var load_plan_default = loadPlan;
857
- var fromProject = async (rootDir, workflowName, options7, logger) => {
858
- logger.debug("Loading Repo from ", path4.resolve(rootDir));
859
- const project = await Project.from("fs", { root: rootDir });
860
- logger.debug("Loading workflow ", workflowName);
861
- const workflow2 = project.getWorkflow(workflowName);
862
- if (!workflow2) {
863
- throw new Error(`Workflow "${workflowName}" not found`);
864
- }
865
- return loadXPlan({ workflow: workflow2 }, options7, logger);
866
- };
867
893
  var loadJson = async (workflowPath, logger) => {
868
894
  let text;
869
895
  try {
@@ -897,8 +923,8 @@ var maybeAssign = (a, b, keys) => {
897
923
  }
898
924
  });
899
925
  };
900
- var loadExpression = async (options7, logger) => {
901
- const expressionPath = options7.expressionPath;
926
+ var loadExpression = async (options8, logger) => {
927
+ const expressionPath = options8.expressionPath;
902
928
  logger.debug(`Loading expression from ${expressionPath}`);
903
929
  try {
904
930
  const expression = await fs3.readFile(expressionPath, "utf8");
@@ -906,19 +932,19 @@ var loadExpression = async (options7, logger) => {
906
932
  const step = {
907
933
  expression,
908
934
  // The adaptor should have been expanded nicely already, so we don't need intervene here
909
- adaptors: options7.adaptors ?? []
935
+ adaptors: options8.adaptors ?? []
910
936
  };
911
937
  const wfOptions = {};
912
- maybeAssign(options7, wfOptions, ["timeout"]);
938
+ maybeAssign(options8, wfOptions, ["timeout"]);
913
939
  const plan = {
914
940
  workflow: {
915
941
  name,
916
942
  steps: [step],
917
- globals: options7.globals
943
+ globals: options8.globals
918
944
  },
919
945
  options: wfOptions
920
946
  };
921
- return loadXPlan(plan, options7, logger);
947
+ return loadXPlan(plan, options8, logger);
922
948
  } catch (e) {
923
949
  abort_default(
924
950
  logger,
@@ -929,7 +955,7 @@ var loadExpression = async (options7, logger) => {
929
955
  return {};
930
956
  }
931
957
  };
932
- var loadOldWorkflow = async (workflow2, options7, logger, defaultName = "") => {
958
+ var loadOldWorkflow = async (workflow2, options8, logger, defaultName = "") => {
933
959
  const plan = {
934
960
  workflow: {
935
961
  steps: workflow2.jobs
@@ -941,7 +967,7 @@ var loadOldWorkflow = async (workflow2, options7, logger, defaultName = "") => {
941
967
  if (workflow2.id) {
942
968
  plan.id = workflow2.id;
943
969
  }
944
- const final = await loadXPlan(plan, options7, logger, defaultName);
970
+ const final = await loadXPlan(plan, options8, logger, defaultName);
945
971
  logger.warn("Converted workflow into new format:");
946
972
  logger.warn(final);
947
973
  return final;
@@ -1034,11 +1060,12 @@ var ensureAdaptors = (plan) => {
1034
1060
  });
1035
1061
  };
1036
1062
  var ensureCollections = (plan, {
1037
- endpoint: endpoint2 = "https://app.openfn.org",
1063
+ endpoint: endpoint2,
1038
1064
  version = "latest",
1039
1065
  apiKey: apiKey2 = "null"
1040
1066
  } = {}, logger) => {
1041
1067
  let collectionsFound = false;
1068
+ endpoint2 ??= plan.options?.collectionsEndpoint ?? "https://app.openfn.org";
1042
1069
  Object.values(plan.workflow.steps).filter((step) => step.expression?.match(/(collections\.)/)).forEach((step) => {
1043
1070
  const job = step;
1044
1071
  if (!job.adaptors?.find(
@@ -1049,6 +1076,11 @@ var ensureCollections = (plan, {
1049
1076
  job.adaptors.push(
1050
1077
  `@openfn/language-collections@${version || "latest"}`
1051
1078
  );
1079
+ if (typeof job.configuration === "string") {
1080
+ job.configuration = {
1081
+ [CREDENTIALS_KEY]: job.configuration
1082
+ };
1083
+ }
1052
1084
  job.configuration = Object.assign({}, job.configuration, {
1053
1085
  collections_endpoint: `${endpoint2}/collections`,
1054
1086
  collections_token: apiKey2
@@ -1068,7 +1100,7 @@ var ensureCollections = (plan, {
1068
1100
  );
1069
1101
  }
1070
1102
  };
1071
- var loadXPlan = async (plan, options7, logger, defaultName = "") => {
1103
+ var loadXPlan = async (plan, options8, logger, defaultName = "") => {
1072
1104
  if (!plan.options) {
1073
1105
  plan.options = {};
1074
1106
  }
@@ -1079,21 +1111,21 @@ var loadXPlan = async (plan, options7, logger, defaultName = "") => {
1079
1111
  ensureCollections(
1080
1112
  plan,
1081
1113
  {
1082
- version: options7.collectionsVersion,
1083
- apiKey: options7.apiKey,
1084
- endpoint: options7.collectionsEndpoint
1114
+ version: options8.collectionsVersion,
1115
+ apiKey: options8.apiKey,
1116
+ endpoint: options8.collectionsEndpoint
1085
1117
  },
1086
1118
  logger
1087
1119
  );
1088
- if (options7.globals)
1089
- plan.workflow.globals = options7.globals;
1090
- await importGlobals(plan, options7.baseDir, logger);
1091
- await importExpressions(plan, options7.baseDir, logger);
1092
- if (options7.expandAdaptors) {
1120
+ if (options8.globals)
1121
+ plan.workflow.globals = options8.globals;
1122
+ await importGlobals(plan, options8.baseDir, logger);
1123
+ await importExpressions(plan, options8.baseDir, logger);
1124
+ if (options8.expandAdaptors) {
1093
1125
  expand_adaptors_default(plan);
1094
1126
  }
1095
- await map_adaptors_to_monorepo_default(options7.monorepoPath, plan, logger);
1096
- maybeAssign(options7, plan.options, ["timeout", "start"]);
1127
+ await map_adaptors_to_monorepo_default(options8.monorepoPath, plan, logger);
1128
+ maybeAssign(options8, plan.options, ["timeout", "start"]);
1097
1129
  logger.info(`Loaded workflow ${plan.workflow.name ?? ""}`);
1098
1130
  return plan;
1099
1131
  };
@@ -1136,7 +1168,7 @@ var fuzzy_match_step_default = (plan, stepPattern) => {
1136
1168
 
1137
1169
  // src/util/validate-plan.ts
1138
1170
  var assertWorkflowStructure = (plan, logger) => {
1139
- const { workflow: workflow2, options: options7 } = plan;
1171
+ const { workflow: workflow2, options: options8 } = plan;
1140
1172
  if (!workflow2 || typeof workflow2 !== "object") {
1141
1173
  throw new Error(`Missing or invalid "workflow" key in execution plan`);
1142
1174
  }
@@ -1149,7 +1181,7 @@ var assertWorkflowStructure = (plan, logger) => {
1149
1181
  workflow2.steps.forEach((step, index) => {
1150
1182
  assertStepStructure(step, index);
1151
1183
  });
1152
- assertOptionsStructure(options7, logger);
1184
+ assertOptionsStructure(options8, logger);
1153
1185
  };
1154
1186
  var assertStepStructure = (step, index) => {
1155
1187
  const allowedKeys = [
@@ -1176,9 +1208,9 @@ var assertStepStructure = (step, index) => {
1176
1208
  );
1177
1209
  }
1178
1210
  };
1179
- var assertOptionsStructure = (options7 = {}, logger) => {
1211
+ var assertOptionsStructure = (options8 = {}, logger) => {
1180
1212
  const allowedKeys = ["timeout", "stepTimeout", "start", "end", "sanitize"];
1181
- for (const key in options7) {
1213
+ for (const key in options8) {
1182
1214
  if (!allowedKeys.includes(key)) {
1183
1215
  logger.warn(`Unrecognized option "${key}" in options object`);
1184
1216
  }
@@ -1234,41 +1266,44 @@ var matchStep = (plan, stepPattern, stepName, logger) => {
1234
1266
  }
1235
1267
  return "";
1236
1268
  };
1237
- var loadAndApplyCredentialMap = async (plan, options7, logger) => {
1269
+ var loadAndApplyCredentialMap = async (plan, options8, logger) => {
1238
1270
  let creds = {};
1239
- if (options7.credentials) {
1271
+ if (options8.credentials) {
1240
1272
  try {
1241
1273
  const credsRaw = await readFile3(
1242
- path5.resolve(options7.credentials),
1274
+ path5.resolve(options8.workspace, options8.credentials),
1243
1275
  "utf8"
1244
1276
  );
1245
- if (options7.credentials.endsWith(".json")) {
1277
+ if (options8.credentials.endsWith(".json")) {
1246
1278
  creds = JSON.parse(credsRaw);
1247
1279
  } else {
1248
1280
  creds = yamlToJson2(credsRaw);
1249
1281
  }
1282
+ logger.info("Credential map loaded ");
1250
1283
  } catch (e) {
1251
- logger.error("Error processing credential map:");
1252
- logger.error(e);
1253
- process.exitCode = 1;
1254
- return;
1284
+ if (e?.message?.match(/ENOENT/)) {
1285
+ logger.debug("Credential map not found at", options8.credentials);
1286
+ } else {
1287
+ logger.error("Error processing credential map:");
1288
+ process.exitCode = 1;
1289
+ throw e;
1290
+ }
1255
1291
  }
1256
- logger.info("Credential map loaded ");
1257
1292
  }
1258
1293
  return apply_credential_map_default(plan, creds, logger);
1259
1294
  };
1260
- var executeHandler = async (options7, logger) => {
1295
+ var executeHandler = async (options8, logger) => {
1261
1296
  const start = (/* @__PURE__ */ new Date()).getTime();
1262
- assert_path_default(options7.path);
1263
- await validate_adaptors_default(options7, logger);
1264
- let plan = await load_plan_default(options7, logger);
1297
+ assert_path_default(options8.path);
1298
+ await validate_adaptors_default(options8, logger);
1299
+ let plan = await load_plan_default(options8, logger);
1265
1300
  validate_plan_default(plan, logger);
1266
- await loadAndApplyCredentialMap(plan, options7, logger);
1267
- if (options7.cacheSteps) {
1268
- await clearCache(plan, options7, logger);
1301
+ await loadAndApplyCredentialMap(plan, options8, logger);
1302
+ if (options8.cacheSteps) {
1303
+ await clearCache(plan, options8, logger);
1269
1304
  }
1270
1305
  const moduleResolutions = {};
1271
- const { repoDir, monorepoPath, autoinstall } = options7;
1306
+ const { repoDir, monorepoPath, autoinstall } = options8;
1272
1307
  if (autoinstall) {
1273
1308
  if (monorepoPath) {
1274
1309
  logger.warn("Skipping auto-install as monorepo is being used");
@@ -1276,13 +1311,13 @@ var executeHandler = async (options7, logger) => {
1276
1311
  const autoInstallTargets = get_autoinstall_targets_default(plan);
1277
1312
  if (autoInstallTargets.length) {
1278
1313
  logger.info("Auto-installing language adaptors");
1279
- options7.adaptors = await install(
1314
+ options8.adaptors = await install(
1280
1315
  { packages: autoInstallTargets, repoDir },
1281
1316
  logger
1282
1317
  );
1283
- if (autoInstallTargets.length === options7.adaptors.length) {
1318
+ if (autoInstallTargets.length === options8.adaptors.length) {
1284
1319
  for (let i = 0; i < autoInstallTargets.length; i++) {
1285
- moduleResolutions[autoInstallTargets[i]] = options7.adaptors[i];
1320
+ moduleResolutions[autoInstallTargets[i]] = options8.adaptors[i];
1286
1321
  }
1287
1322
  }
1288
1323
  }
@@ -1290,35 +1325,35 @@ var executeHandler = async (options7, logger) => {
1290
1325
  }
1291
1326
  let customStart;
1292
1327
  let customEnd;
1293
- if (options7.only) {
1294
- const step = matchStep(plan, options7.only, "only", logger);
1328
+ if (options8.only) {
1329
+ const step = matchStep(plan, options8.only, "only", logger);
1295
1330
  customStart = step;
1296
1331
  customEnd = step;
1297
- logger.always(`Only running workflow step "${options7.start}"`);
1332
+ logger.always(`Only running workflow step "${options8.start}"`);
1298
1333
  } else {
1299
- if (options7.start) {
1334
+ if (options8.start) {
1300
1335
  customStart = matchStep(
1301
1336
  plan,
1302
- options7.start ?? plan.options.start,
1337
+ options8.start ?? plan.options.start,
1303
1338
  "start",
1304
1339
  logger
1305
1340
  );
1306
- logger.info(`Starting workflow from step "${options7.start}"`);
1341
+ logger.info(`Starting workflow from step "${options8.start}"`);
1307
1342
  }
1308
- if (options7.end) {
1343
+ if (options8.end) {
1309
1344
  customEnd = matchStep(
1310
1345
  plan,
1311
- options7.end ?? plan.options.end,
1346
+ options8.end ?? plan.options.end,
1312
1347
  "end",
1313
1348
  logger
1314
1349
  );
1315
- logger.always(`Ending workflow at step "${options7.end}"`);
1350
+ logger.always(`Ending workflow at step "${options8.end}"`);
1316
1351
  }
1317
1352
  }
1318
- const state = await load_state_default(plan, options7, logger, customStart);
1353
+ const state = await load_state_default(plan, options8, logger, customStart);
1319
1354
  plan = override_plan_adaptors_default(plan, moduleResolutions);
1320
- if (options7.compile) {
1321
- plan = await compile_default(plan, options7, logger);
1355
+ if (options8.compile) {
1356
+ plan = await compile_default(plan, options8, logger);
1322
1357
  } else {
1323
1358
  logger.info("Skipping compilation as noCompile is set");
1324
1359
  }
@@ -1332,13 +1367,13 @@ var executeHandler = async (options7, logger) => {
1332
1367
  workflow: plan.workflow
1333
1368
  };
1334
1369
  try {
1335
- const result = await execute_default(finalPlan, state, options7, logger);
1336
- if (options7.cacheSteps) {
1370
+ const result = await execute_default(finalPlan, state, options8, logger);
1371
+ if (options8.cacheSteps) {
1337
1372
  logger.success(
1338
1373
  "Cached output written to ./cli-cache (see info logs for details)"
1339
1374
  );
1340
1375
  }
1341
- await serialize_output_default(options7, result, logger);
1376
+ await serialize_output_default(options8, result, logger);
1342
1377
  const duration = printDuration((/* @__PURE__ */ new Date()).getTime() - start);
1343
1378
  if (result?.errors) {
1344
1379
  logger.warn(
@@ -1361,22 +1396,22 @@ var handler_default2 = executeHandler;
1361
1396
 
1362
1397
  // src/compile/handler.ts
1363
1398
  import { writeFile as writeFile3 } from "node:fs/promises";
1364
- var compileHandler = async (options7, logger) => {
1365
- assert_path_default(options7.path);
1399
+ var compileHandler = async (options8, logger) => {
1400
+ assert_path_default(options8.path);
1366
1401
  let result;
1367
- if (options7.expressionPath) {
1368
- const { code } = await compile_default(options7.expressionPath, options7, logger);
1402
+ if (options8.expressionPath) {
1403
+ const { code } = await compile_default(options8.expressionPath, options8, logger);
1369
1404
  result = code;
1370
1405
  } else {
1371
- const plan = await load_plan_default(options7, logger);
1372
- const compiledPlan = await compile_default(plan, options7, logger);
1406
+ const plan = await load_plan_default(options8, logger);
1407
+ const compiledPlan = await compile_default(plan, options8, logger);
1373
1408
  result = JSON.stringify(compiledPlan, null, 2);
1374
1409
  }
1375
- if (options7.outputStdout) {
1410
+ if (options8.outputStdout) {
1376
1411
  logger.success("Result:\n\n" + result);
1377
1412
  } else {
1378
- await writeFile3(options7.outputPath, result);
1379
- logger.success(`Compiled to ${options7.outputPath}`);
1413
+ await writeFile3(options8.outputPath, result);
1414
+ logger.success(`Compiled to ${options8.outputPath}`);
1380
1415
  }
1381
1416
  };
1382
1417
  var handler_default3 = compileHandler;
@@ -1389,27 +1424,27 @@ import { readFile as readFile4, writeFile as writeFile4 } from "node:fs/promises
1389
1424
  import path6 from "node:path";
1390
1425
  import { request } from "undici";
1391
1426
  var DEFAULT_PAGE_SIZE = 1e3;
1392
- var request_default = async (method, options7, logger) => {
1393
- const base = options7.lightning || process.env.OPENFN_ENDPOINT || "https://app.openfn.org";
1394
- const url2 = path6.join(base, "/collections", options7.collectionName);
1427
+ var request_default = async (method, options8, logger) => {
1428
+ const base = options8.lightning || process.env.OPENFN_ENDPOINT || "https://app.openfn.org";
1429
+ const url2 = path6.join(base, "/collections", options8.collectionName);
1395
1430
  logger.debug("Calling Collections server at ", url2);
1396
1431
  const headers = {
1397
- Authorization: `Bearer ${options7.token}`
1432
+ Authorization: `Bearer ${options8.token}`
1398
1433
  };
1399
1434
  const query = Object.assign(
1400
1435
  {
1401
- key: options7.key,
1402
- limit: options7.pageSize || DEFAULT_PAGE_SIZE
1436
+ key: options8.key,
1437
+ limit: options8.pageSize || DEFAULT_PAGE_SIZE
1403
1438
  },
1404
- options7.query
1439
+ options8.query
1405
1440
  );
1406
1441
  const args = {
1407
1442
  headers,
1408
1443
  method,
1409
1444
  query
1410
1445
  };
1411
- if (options7.data) {
1412
- args.body = JSON.stringify(options7.data);
1446
+ if (options8.data) {
1447
+ args.body = JSON.stringify(options8.data);
1413
1448
  headers["content-type"] = "application/json";
1414
1449
  }
1415
1450
  let result = {};
@@ -1420,11 +1455,11 @@ var request_default = async (method, options7, logger) => {
1420
1455
  if (cursor) {
1421
1456
  query.cursor = cursor;
1422
1457
  }
1423
- if (options7.limit) {
1424
- limit = options7.limit;
1458
+ if (options8.limit) {
1459
+ limit = options8.limit;
1425
1460
  query.limit = Math.min(
1426
- options7.pageSize || DEFAULT_PAGE_SIZE,
1427
- options7.limit - count
1461
+ options8.pageSize || DEFAULT_PAGE_SIZE,
1462
+ options8.limit - count
1428
1463
  );
1429
1464
  }
1430
1465
  try {
@@ -1518,7 +1553,7 @@ var ensureToken = (opts, logger) => {
1518
1553
  }
1519
1554
  }
1520
1555
  };
1521
- var buildQuery = (options7) => {
1556
+ var buildQuery = (options8) => {
1522
1557
  const map = {
1523
1558
  createdBefore: "created_before",
1524
1559
  createdAfter: "created_after",
@@ -1527,34 +1562,34 @@ var buildQuery = (options7) => {
1527
1562
  };
1528
1563
  const query = {};
1529
1564
  Object.keys(map).forEach((key) => {
1530
- if (options7[key]) {
1531
- query[map[key]] = options7[key];
1565
+ if (options8[key]) {
1566
+ query[map[key]] = options8[key];
1532
1567
  }
1533
1568
  });
1534
1569
  return query;
1535
1570
  };
1536
- var get = async (options7, logger) => {
1537
- ensureToken(options7, logger);
1538
- const multiMode = options7.key.includes("*");
1571
+ var get = async (options8, logger) => {
1572
+ ensureToken(options8, logger);
1573
+ const multiMode = options8.key.includes("*");
1539
1574
  if (multiMode) {
1540
1575
  logger.info(
1541
- `Fetching multiple items from collection "${options7.collectionName}" with pattern ${options7.key}`
1576
+ `Fetching multiple items from collection "${options8.collectionName}" with pattern ${options8.key}`
1542
1577
  );
1543
1578
  } else {
1544
1579
  logger.info(
1545
- `Fetching "${options7.key}" from collection "${options7.collectionName}"`
1580
+ `Fetching "${options8.key}" from collection "${options8.collectionName}"`
1546
1581
  );
1547
1582
  }
1548
1583
  let result = await request_default(
1549
1584
  "GET",
1550
1585
  {
1551
- lightning: options7.endpoint,
1552
- token: options7.token,
1553
- pageSize: options7.pageSize,
1554
- limit: options7.limit,
1555
- key: options7.key,
1556
- collectionName: options7.collectionName,
1557
- query: buildQuery(options7)
1586
+ lightning: options8.endpoint,
1587
+ token: options8.token,
1588
+ pageSize: options8.pageSize,
1589
+ limit: options8.limit,
1590
+ key: options8.key,
1591
+ collectionName: options8.collectionName,
1592
+ query: buildQuery(options8)
1558
1593
  },
1559
1594
  logger
1560
1595
  );
@@ -1562,32 +1597,32 @@ var get = async (options7, logger) => {
1562
1597
  logger.success(`Fetched ${Object.keys(result).length} items!`);
1563
1598
  } else {
1564
1599
  result = Object.values(result)[0];
1565
- logger.success(`Fetched ${options7.key}`);
1600
+ logger.success(`Fetched ${options8.key}`);
1566
1601
  }
1567
- if (options7.outputPath) {
1602
+ if (options8.outputPath) {
1568
1603
  const content = JSON.stringify(
1569
1604
  result,
1570
1605
  null,
1571
- options7.pretty ? 2 : void 0
1606
+ options8.pretty ? 2 : void 0
1572
1607
  );
1573
- await writeFile4(options7.outputPath, content);
1574
- logger.always(`Wrote items to ${options7.outputPath}`);
1608
+ await writeFile4(options8.outputPath, content);
1609
+ logger.always(`Wrote items to ${options8.outputPath}`);
1575
1610
  } else {
1576
1611
  logger.print(result);
1577
1612
  }
1578
1613
  };
1579
- var set = async (options7, logger) => {
1580
- if (options7.key && options7.items) {
1614
+ var set = async (options8, logger) => {
1615
+ if (options8.key && options8.items) {
1581
1616
  throwAbortableError(
1582
1617
  "ARGUMENT_ERROR: arguments for key and items were provided",
1583
1618
  "If upserting multiple items with --items, do not pass a key"
1584
1619
  );
1585
1620
  }
1586
- ensureToken(options7, logger);
1587
- logger.info(`Upserting items to collection "${options7.collectionName}"`);
1621
+ ensureToken(options8, logger);
1622
+ logger.info(`Upserting items to collection "${options8.collectionName}"`);
1588
1623
  const items = [];
1589
- if (options7.items) {
1590
- const resolvedPath = path7.resolve(options7.items);
1624
+ if (options8.items) {
1625
+ const resolvedPath = path7.resolve(options8.items);
1591
1626
  logger.debug("Loading items from ", resolvedPath);
1592
1627
  const data = await readFile4(resolvedPath, "utf8");
1593
1628
  const obj = JSON.parse(data);
@@ -1595,43 +1630,43 @@ var set = async (options7, logger) => {
1595
1630
  items.push({ key, value: JSON.stringify(value) });
1596
1631
  });
1597
1632
  logger.info(`Upserting ${items.length} items`);
1598
- } else if (options7.key && options7.value) {
1599
- const resolvedPath = path7.resolve(options7.value);
1633
+ } else if (options8.key && options8.value) {
1634
+ const resolvedPath = path7.resolve(options8.value);
1600
1635
  logger.debug("Loading value from ", resolvedPath);
1601
- const data = await readFile4(path7.resolve(options7.value), "utf8");
1636
+ const data = await readFile4(path7.resolve(options8.value), "utf8");
1602
1637
  const value = JSON.stringify(JSON.parse(data));
1603
- items.push({ key: options7.key, value });
1604
- logger.info(`Upserting data to "${options7.key}"`);
1638
+ items.push({ key: options8.key, value });
1639
+ logger.info(`Upserting data to "${options8.key}"`);
1605
1640
  } else {
1606
1641
  throw new Error("INVALID_ARGUMENTS");
1607
1642
  }
1608
1643
  const result = await request_default(
1609
1644
  "POST",
1610
1645
  {
1611
- lightning: options7.endpoint,
1612
- token: options7.token,
1613
- key: options7.key,
1614
- collectionName: options7.collectionName,
1646
+ lightning: options8.endpoint,
1647
+ token: options8.token,
1648
+ key: options8.key,
1649
+ collectionName: options8.collectionName,
1615
1650
  data: { items }
1616
1651
  },
1617
1652
  logger
1618
1653
  );
1619
1654
  logger.success(`Upserted ${result.upserted} items!`);
1620
1655
  };
1621
- var remove = async (options7, logger) => {
1622
- ensureToken(options7, logger);
1656
+ var remove = async (options8, logger) => {
1657
+ ensureToken(options8, logger);
1623
1658
  logger.info(
1624
- `Removing "${options7.key}" from collection "${options7.collectionName}"`
1659
+ `Removing "${options8.key}" from collection "${options8.collectionName}"`
1625
1660
  );
1626
- if (options7.dryRun) {
1661
+ if (options8.dryRun) {
1627
1662
  logger.info("--dry-run passed: fetching affected items");
1628
1663
  let result = await request_default(
1629
1664
  "GET",
1630
1665
  {
1631
- lightning: options7.endpoint,
1632
- token: options7.token,
1633
- key: options7.key,
1634
- collectionName: options7.collectionName
1666
+ lightning: options8.endpoint,
1667
+ token: options8.token,
1668
+ key: options8.key,
1669
+ collectionName: options8.collectionName
1635
1670
  },
1636
1671
  logger
1637
1672
  );
@@ -1643,11 +1678,11 @@ var remove = async (options7, logger) => {
1643
1678
  let result = await request_default(
1644
1679
  "DELETE",
1645
1680
  {
1646
- lightning: options7.endpoint,
1647
- token: options7.token,
1648
- key: options7.key,
1649
- collectionName: options7.collectionName,
1650
- query: buildQuery(options7)
1681
+ lightning: options8.endpoint,
1682
+ token: options8.token,
1683
+ key: options8.key,
1684
+ collectionName: options8.collectionName,
1685
+ query: buildQuery(options8)
1651
1686
  },
1652
1687
  logger
1653
1688
  );
@@ -1661,9 +1696,9 @@ var handler_default4 = {
1661
1696
  };
1662
1697
 
1663
1698
  // src/test/handler.ts
1664
- var testHandler = async (options7, logger) => {
1699
+ var testHandler = async (options8, logger) => {
1665
1700
  logger.log("Running test workflow...");
1666
- const opts = { ...options7 };
1701
+ const opts = { ...options8 };
1667
1702
  opts.compile = true;
1668
1703
  opts.adaptors = [];
1669
1704
  const plan = {
@@ -1725,12 +1760,249 @@ import {
1725
1760
  validateConfig
1726
1761
  } from "@openfn/deploy";
1727
1762
 
1728
- // src/deploy/beta.ts
1729
- import Project2 from "@openfn/project";
1730
- import { deployProject } from "@openfn/deploy";
1763
+ // src/projects/deploy.ts
1764
+ import Project from "@openfn/project";
1765
+ import c2 from "chalk";
1766
+
1767
+ // src/util/ensure-log-opts.ts
1768
+ var defaultLoggerOptions = {
1769
+ default: "default",
1770
+ // TODO fix to lower case
1771
+ job: "debug"
1772
+ };
1773
+ var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
1774
+ var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
1775
+ var componentShorthands = {
1776
+ cmp: "compiler",
1777
+ rt: "runtime",
1778
+ "r/t": "runtime"
1779
+ };
1780
+ var ensureLogOpts = (opts) => {
1781
+ const components = {};
1782
+ const outgoingOpts = opts;
1783
+ if (!opts.log && /^(version|test)$/.test(opts.command)) {
1784
+ outgoingOpts.log = { default: "info" };
1785
+ return outgoingOpts;
1786
+ }
1787
+ if (opts.log) {
1788
+ const parts = opts.log.split(",");
1789
+ parts.forEach((l) => {
1790
+ let component = "";
1791
+ let level = "";
1792
+ if (l.match(/=/)) {
1793
+ const parts2 = l.split("=");
1794
+ component = parts2[0].toLowerCase();
1795
+ if (componentShorthands[component]) {
1796
+ component = componentShorthands[component];
1797
+ }
1798
+ level = parts2[1].toLowerCase();
1799
+ } else {
1800
+ component = "default";
1801
+ level = l.toLowerCase();
1802
+ if (level === "none" && !parts.find((p) => p.startsWith("job"))) {
1803
+ components["job"] = "none";
1804
+ }
1805
+ }
1806
+ if (!/^(cli|runtime|compiler|job|default)$/i.test(component)) {
1807
+ throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
1808
+ }
1809
+ level = level.toLowerCase();
1810
+ if (!isValidLogLevel(level)) {
1811
+ throw new Error(ERROR_MESSAGE_LOG_LEVEL);
1812
+ }
1813
+ components[component] = level;
1814
+ });
1815
+ }
1816
+ outgoingOpts.log = {
1817
+ ...defaultLoggerOptions,
1818
+ ...components
1819
+ };
1820
+ return outgoingOpts;
1821
+ };
1822
+ var ensure_log_opts_default = ensureLogOpts;
1823
+
1824
+ // src/options.ts
1825
+ var setDefaultValue = (opts, key, value) => {
1826
+ const v = opts[key];
1827
+ if (isNaN(v) && !v) {
1828
+ opts[key] = value;
1829
+ }
1830
+ };
1831
+ var apiKey = {
1832
+ name: "apikey",
1833
+ yargs: {
1834
+ alias: ["pat", "token", "api-key"],
1835
+ description: "API Key, Personal Access Token (PAT), or other access token from Lightning"
1836
+ },
1837
+ ensure: (opts) => {
1838
+ if (!opts.apikey) {
1839
+ opts.apiKey = process.env.OPENFN_API_KEY;
1840
+ }
1841
+ }
1842
+ };
1843
+ var confirm = {
1844
+ name: "confirm",
1845
+ yargs: {
1846
+ alias: ["y"],
1847
+ boolean: true,
1848
+ description: "Skip confirmation prompts (e.g. 'Are you sure?')"
1849
+ },
1850
+ ensure: (opts) => {
1851
+ if (opts.y) {
1852
+ opts.confirm = false;
1853
+ }
1854
+ setDefaultValue(opts, "confirm", true);
1855
+ }
1856
+ };
1857
+ var endpoint = {
1858
+ name: "endpoint",
1859
+ yargs: {
1860
+ alias: ["lightning"],
1861
+ description: "[beta only] URL to Lightning endpoint"
1862
+ }
1863
+ };
1864
+ var force = {
1865
+ name: "force",
1866
+ yargs: {
1867
+ alias: ["f"],
1868
+ boolean: true,
1869
+ description: "Force metadata to be regenerated",
1870
+ default: false
1871
+ }
1872
+ };
1873
+ var log = {
1874
+ name: "log",
1875
+ yargs: {
1876
+ alias: ["l"],
1877
+ description: "Set the log level",
1878
+ string: true
1879
+ },
1880
+ ensure: (opts) => {
1881
+ ensure_log_opts_default(opts);
1882
+ }
1883
+ };
1884
+ var logJson = {
1885
+ name: "log-json",
1886
+ yargs: {
1887
+ description: "Output all logs as JSON objects",
1888
+ boolean: true
1889
+ }
1890
+ };
1891
+ var path8 = {
1892
+ name: "path",
1893
+ yargs: {
1894
+ description: "Path"
1895
+ }
1896
+ };
1897
+ var snapshots = {
1898
+ name: "snapshots",
1899
+ yargs: {
1900
+ description: "List of snapshot ids to pull",
1901
+ array: true
1902
+ }
1903
+ };
1904
+ var timeout = {
1905
+ name: "timeout",
1906
+ yargs: {
1907
+ alias: ["t"],
1908
+ number: true,
1909
+ description: "Set the timeout duration (ms). Defaults to 5 minutes.",
1910
+ default: 5 * 60 * 1e3
1911
+ }
1912
+ };
1913
+ var workflow = {
1914
+ name: "workflow",
1915
+ yargs: {
1916
+ string: true,
1917
+ description: "Name of the workflow to execute"
1918
+ }
1919
+ };
1920
+
1921
+ // src/util/get-cli-option-object.ts
1922
+ function getCLIOptionObject(arg) {
1923
+ if (isObject(arg)) {
1924
+ return arg;
1925
+ } else if (typeof arg === "string") {
1926
+ try {
1927
+ const p = JSON.parse(arg);
1928
+ if (isObject(p))
1929
+ return p;
1930
+ } catch (e) {
1931
+ }
1932
+ return Object.fromEntries(
1933
+ arg.split(",").map((pair) => {
1934
+ const [k, v] = pair.split("=");
1935
+ return [k.trim(), v.trim()];
1936
+ })
1937
+ );
1938
+ }
1939
+ }
1940
+ function isObject(arg) {
1941
+ return typeof arg === "object" && arg !== null && !Array.isArray(arg);
1942
+ }
1943
+
1944
+ // src/projects/options.ts
1945
+ var env = {
1946
+ name: "env",
1947
+ yargs: {
1948
+ description: "Environment name (eg staging, prod, branch)",
1949
+ hidden: true
1950
+ }
1951
+ };
1952
+ var alias = {
1953
+ name: "alias",
1954
+ yargs: {
1955
+ alias: ["env"],
1956
+ description: "Environment name (eg staging, prod, branch)"
1957
+ }
1958
+ };
1959
+ var dryRun = {
1960
+ name: "dryRun",
1961
+ yargs: {
1962
+ description: "Runs the command but does not commit any changes to disk or app"
1963
+ }
1964
+ };
1965
+ var removeUnmapped = {
1966
+ name: "remove-unmapped",
1967
+ yargs: {
1968
+ boolean: true,
1969
+ description: "Removes all workflows that didn't get mapped from the final project after merge"
1970
+ }
1971
+ };
1972
+ var workflowMappings = {
1973
+ name: "workflow-mappings",
1974
+ yargs: {
1975
+ type: "string",
1976
+ coerce: getCLIOptionObject,
1977
+ description: "A manual object mapping of which workflows in source and target should be matched for a merge."
1978
+ }
1979
+ };
1980
+ var outputPath = {
1981
+ name: "output-path",
1982
+ yargs: {
1983
+ alias: ["o", "output"],
1984
+ type: "string",
1985
+ description: "Path to output the fetched project to"
1986
+ }
1987
+ };
1988
+ var workspace = {
1989
+ name: "workspace",
1990
+ yargs: {
1991
+ alias: ["w"],
1992
+ description: "Path to the project workspace (ie, path to openfn.yaml)"
1993
+ },
1994
+ ensure: (opts) => {
1995
+ const ws = opts.workspace ?? process.env.OPENFN_WORKSPACE;
1996
+ if (!ws) {
1997
+ opts.workspace = process.cwd();
1998
+ } else {
1999
+ opts.workspace = resolve_path_default(ws);
2000
+ }
2001
+ }
2002
+ };
1731
2003
 
1732
2004
  // src/projects/util.ts
1733
- import path8 from "node:path";
2005
+ import path9 from "node:path";
1734
2006
  import { mkdir as mkdir3, writeFile as writeFile5 } from "node:fs/promises";
1735
2007
 
1736
2008
  // src/errors.ts
@@ -1741,17 +2013,17 @@ var CLIError = class extends Error {
1741
2013
  };
1742
2014
 
1743
2015
  // src/projects/util.ts
1744
- var loadAppAuthConfig = (options7, logger) => {
2016
+ var loadAppAuthConfig = (options8, logger) => {
1745
2017
  const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
1746
2018
  const config2 = {
1747
- apiKey: options7.apiKey,
1748
- endpoint: options7.endpoint
2019
+ apiKey: options8.apiKey,
2020
+ endpoint: options8.endpoint
1749
2021
  };
1750
- if (!options7.apiKey && OPENFN_API_KEY) {
2022
+ if (!options8.apiKey && OPENFN_API_KEY) {
1751
2023
  logger.info("Using OPENFN_API_KEY environment variable");
1752
2024
  config2.apiKey = OPENFN_API_KEY;
1753
2025
  }
1754
- if (!options7.endpoint && OPENFN_ENDPOINT) {
2026
+ if (!options8.endpoint && OPENFN_ENDPOINT) {
1755
2027
  logger.info("Using OPENFN_ENDPOINT environment variable");
1756
2028
  config2.endpoint = OPENFN_ENDPOINT;
1757
2029
  }
@@ -1763,13 +2035,18 @@ var ensureExt = (filePath, ext) => {
1763
2035
  }
1764
2036
  return filePath;
1765
2037
  };
1766
- var serialize = async (project, outputPath2, formatOverride, dryRun = false) => {
1767
- const root = path8.dirname(outputPath2);
2038
+ var getSerializePath = (project, workspacePath, outputPath2) => {
2039
+ const outputRoot = resolve_path_default(outputPath2 || workspacePath);
2040
+ const projectsDir = project?.config.dirs.projects ?? ".projects";
2041
+ return outputPath2 ?? `${outputRoot}/${projectsDir}/${project.qname}`;
2042
+ };
2043
+ var serialize = async (project, outputPath2, formatOverride, dryRun2 = false) => {
2044
+ const root = path9.dirname(outputPath2);
1768
2045
  await mkdir3(root, { recursive: true });
1769
2046
  const format = formatOverride ?? project.config?.formats.project;
1770
2047
  const output = project?.serialize("project", { format });
1771
2048
  const maybeWriteFile = (filePath, output2) => {
1772
- if (!dryRun) {
2049
+ if (!dryRun2) {
1773
2050
  return writeFile5(filePath, output2);
1774
2051
  }
1775
2052
  };
@@ -1818,45 +2095,262 @@ async function fetchProject(endpoint2, apiKey2, projectId, logger, snapshots2) {
1818
2095
  throw error;
1819
2096
  }
1820
2097
  }
1821
- function handleCommonErrors(config2, error) {
1822
- if (error.cause?.code === "ECONNREFUSED") {
1823
- throw new DeployError(
1824
- `Failed to connect to endpoint ${config2.endpoint}, got ECONNREFUSED.`
1825
- );
1826
- }
1827
- }
2098
+ async function deployProject(endpoint2, apiKey2, state, logger) {
2099
+ try {
2100
+ const url2 = getLightningUrl(endpoint2);
2101
+ const response = await fetch(url2, {
2102
+ method: "POST",
2103
+ headers: {
2104
+ Authorization: `Bearer ${apiKey2}`,
2105
+ "Content-Type": "application/json"
2106
+ },
2107
+ body: JSON.stringify(state)
2108
+ });
2109
+ if (!response.ok) {
2110
+ const body = await response.json();
2111
+ logger?.error("Failed to deploy project:");
2112
+ logger?.error(JSON.stringify(body, null, 2));
2113
+ throw new CLIError(
2114
+ `Failed to deploy project ${state.name}: ${response.status}`
2115
+ );
2116
+ }
2117
+ return await response.json();
2118
+ } catch (error) {
2119
+ handleCommonErrors({ endpoint: endpoint2, apiKey: apiKey2 }, error);
2120
+ throw error;
2121
+ }
2122
+ }
2123
+ function handleCommonErrors(config2, error) {
2124
+ if (error.cause?.code === "ECONNREFUSED") {
2125
+ throw new DeployError(
2126
+ `Failed to connect to endpoint ${config2.endpoint}, got ECONNREFUSED.`
2127
+ );
2128
+ }
2129
+ }
1828
2130
  var DeployError = class extends Error {
1829
2131
  constructor(message) {
1830
2132
  super(message);
1831
2133
  }
1832
2134
  };
1833
2135
 
1834
- // src/deploy/beta.ts
1835
- async function handler(options7, logger) {
1836
- const config2 = loadAppAuthConfig(options7, logger);
1837
- const project = await Project2.from("fs", {
1838
- root: options7.workspace || "."
2136
+ // src/util/command-builders.ts
2137
+ import c from "chalk";
2138
+ var expandYargs = (y) => {
2139
+ if (typeof y === "function") {
2140
+ return y();
2141
+ }
2142
+ return y;
2143
+ };
2144
+ function build(opts, yargs) {
2145
+ return opts.reduce((_y, o) => {
2146
+ if (!o?.name) {
2147
+ console.error(`ERROR: INVALID COMMAND OPTION PASSED`, o);
2148
+ console.error("Check the options passed to the command builder");
2149
+ throw new Error("Invalid command");
2150
+ }
2151
+ return yargs.option(o.name, expandYargs(o.yargs));
2152
+ }, yargs);
2153
+ }
2154
+ var ensure = (command8, opts) => (yargs) => {
2155
+ yargs.command = command8;
2156
+ opts.filter((opt) => opt.ensure).forEach((opt) => {
2157
+ try {
2158
+ opt.ensure(yargs);
2159
+ } catch (e) {
2160
+ console.log(e);
2161
+ console.error(
2162
+ c.red(`
2163
+ Error parsing command arguments: ${command8}.${opt.name}
2164
+ `)
2165
+ );
2166
+ console.error(c.red("Aborting"));
2167
+ console.error();
2168
+ process.exit(9);
2169
+ }
2170
+ });
2171
+ };
2172
+ var override = (command8, yargs) => {
2173
+ return {
2174
+ ...command8,
2175
+ yargs: {
2176
+ ...command8.yargs || {},
2177
+ ...yargs
2178
+ }
2179
+ };
2180
+ };
2181
+
2182
+ // src/projects/deploy.ts
2183
+ var options = [
2184
+ env,
2185
+ workspace,
2186
+ dryRun,
2187
+ apiKey,
2188
+ endpoint,
2189
+ log,
2190
+ logJson,
2191
+ snapshots,
2192
+ force,
2193
+ confirm
2194
+ ];
2195
+ var printProjectName = (project) => `${project.id} (${project.openfn?.uuid || "<no UUID>"})`;
2196
+ var command = {
2197
+ command: "deploy",
2198
+ describe: `Deploy the checked out project to a Lightning Instance`,
2199
+ builder: (yargs) => build(options, yargs).positional("project", {
2200
+ describe: "The UUID, local id or local alias of the project to deploy to"
2201
+ }).example(
2202
+ "deploy",
2203
+ "Deploy the checkout project to the connected instance"
2204
+ ),
2205
+ handler: ensure("project-deploy", options)
2206
+ };
2207
+ async function handler(options8, logger) {
2208
+ logger.warn(
2209
+ "WARNING: the project deploy command is in BETA and may not be stable. Use cautiously on production projects."
2210
+ );
2211
+ const config2 = loadAppAuthConfig(options8, logger);
2212
+ logger.info("Attempting to load checked-out project from workspace");
2213
+ const localProject = await Project.from("fs", {
2214
+ root: options8.workspace || "."
1839
2215
  });
1840
- const state = project.serialize("state", { format: "json" });
1841
- logger.debug("Converted local project to app state:");
2216
+ logger.success(`Loaded local project ${printProjectName(localProject)}`);
2217
+ let remoteProject;
2218
+ try {
2219
+ const { data } = await fetchProject(
2220
+ config2.endpoint,
2221
+ config2.apiKey,
2222
+ localProject.uuid ?? localProject.id,
2223
+ logger
2224
+ );
2225
+ remoteProject = await Project.from("state", data, {
2226
+ endpoint: config2.endpoint
2227
+ });
2228
+ logger.success("Downloaded latest version of project at ", config2.endpoint);
2229
+ } catch (e) {
2230
+ console.log(e);
2231
+ throw e;
2232
+ }
2233
+ if (!options8.force && localProject.uuid !== remoteProject.uuid) {
2234
+ logger.error(`UUID conflict!
2235
+
2236
+ Your local project (${localProject.uuid}) has a different UUID to the remote project (${remoteProject.uuid}).
2237
+
2238
+ Pass --force to override this error and deploy anyway.`);
2239
+ return false;
2240
+ }
2241
+ const diffs = reportDiff(remoteProject, localProject, logger);
2242
+ if (!diffs.length) {
2243
+ logger.success("Nothing to deploy");
2244
+ return;
2245
+ }
2246
+ if (!localProject.canMergeInto(remoteProject)) {
2247
+ if (!options8.force) {
2248
+ logger.error(`Error: Projects have diverged!
2249
+
2250
+ The remote project has been edited since the local project was branched. Changes may be lost.
2251
+
2252
+ Pass --force to override this error and deploy anyway.`);
2253
+ return;
2254
+ } else {
2255
+ logger.warn(
2256
+ "Remote project has not diverged from local project! Pushing anyway as -f passed"
2257
+ );
2258
+ }
2259
+ } else {
2260
+ logger.info(
2261
+ "Remote project has not diverged from local project - it is safe to deploy \u{1F389}"
2262
+ );
2263
+ }
2264
+ logger.info("Merging changes into remote project");
2265
+ const merged = Project.merge(localProject, remoteProject, {
2266
+ mode: "replace",
2267
+ force: true
2268
+ });
2269
+ const state = merged.serialize("state", {
2270
+ format: "json"
2271
+ });
2272
+ logger.debug("Converted merged local project to app state:");
1842
2273
  logger.debug(JSON.stringify(state, null, 2));
1843
- config2.endpoint ??= project.openfn?.endpoint;
1844
- logger.info("Sending project to app...");
1845
- await deployProject(config2, state);
2274
+ config2.endpoint ??= localProject.openfn?.endpoint;
2275
+ if (options8.dryRun) {
2276
+ logger.always("dryRun option set: skipping upload step");
2277
+ } else {
2278
+ if (options8.confirm) {
2279
+ if (!await logger.confirm(
2280
+ `Ready to deploy changes to ${config2.endpoint}?`
2281
+ )) {
2282
+ logger.always("Cancelled deployment");
2283
+ return false;
2284
+ }
2285
+ }
2286
+ logger.info("Sending project to app...");
2287
+ const { data: result } = await deployProject(
2288
+ config2.endpoint,
2289
+ config2.apiKey,
2290
+ state,
2291
+ logger
2292
+ );
2293
+ const finalProject = await Project.from(
2294
+ "state",
2295
+ result,
2296
+ {
2297
+ endpoint: config2.endpoint
2298
+ },
2299
+ merged.config
2300
+ );
2301
+ const finalOutputPath = getSerializePath(localProject, options8.workspace);
2302
+ logger.debug("Updating local project at ", finalOutputPath);
2303
+ await serialize(finalProject, finalOutputPath);
2304
+ }
1846
2305
  logger.success("Updated project at", config2.endpoint);
1847
2306
  }
2307
+ var reportDiff = (local, remote, logger) => {
2308
+ const diffs = remote.diff(local);
2309
+ if (diffs.length === 0) {
2310
+ logger.info("No workflow changes detected");
2311
+ return diffs;
2312
+ }
2313
+ const added = diffs.filter((d) => d.type === "added");
2314
+ const changed = diffs.filter((d) => d.type === "changed");
2315
+ const removed = diffs.filter((d) => d.type === "removed");
2316
+ if (added.length > 0) {
2317
+ logger.break();
2318
+ logger.always(c2.green("Workflows added:"));
2319
+ for (const diff of added) {
2320
+ logger.always(c2.green(` - ${diff.id}`));
2321
+ }
2322
+ logger.break();
2323
+ }
2324
+ if (changed.length > 0) {
2325
+ logger.break();
2326
+ logger.always(c2.yellow("Workflows modified:"));
2327
+ for (const diff of changed) {
2328
+ logger.always(c2.yellow(` - ${diff.id}`));
2329
+ }
2330
+ logger.break();
2331
+ }
2332
+ if (removed.length > 0) {
2333
+ logger.break();
2334
+ logger.always(c2.red("Workflows removed:"));
2335
+ for (const diff of removed) {
2336
+ logger.always(c2.red(` - ${diff.id}`));
2337
+ }
2338
+ logger.break();
2339
+ }
2340
+ return diffs;
2341
+ };
1848
2342
 
1849
2343
  // src/deploy/handler.ts
1850
2344
  var actualDeploy = deploy;
1851
- async function deployHandler(options7, logger, deployFn = actualDeploy) {
1852
- if (options7.beta) {
1853
- return handler(options7, logger);
2345
+ async function deployHandler(options8, logger, deployFn = actualDeploy) {
2346
+ if (options8.beta) {
2347
+ return handler(options8, logger);
1854
2348
  }
1855
2349
  try {
1856
- const config2 = mergeOverrides(await getConfig(options7.configPath), options7);
2350
+ const config2 = mergeOverrides(await getConfig(options8.configPath), options8);
1857
2351
  logger.debug("Deploying with config", JSON.stringify(config2, null, 2));
1858
- if (options7.confirm === false) {
1859
- config2.requireConfirmation = options7.confirm;
2352
+ if (options8.confirm === false) {
2353
+ config2.requireConfirmation = options8.confirm;
1860
2354
  }
1861
2355
  if (process.env["OPENFN_API_KEY"]) {
1862
2356
  logger.info("Using OPENFN_API_KEY environment variable");
@@ -1881,15 +2375,15 @@ async function deployHandler(options7, logger, deployFn = actualDeploy) {
1881
2375
  throw error;
1882
2376
  }
1883
2377
  }
1884
- function mergeOverrides(config2, options7) {
2378
+ function mergeOverrides(config2, options8) {
1885
2379
  return {
1886
2380
  ...config2,
1887
2381
  apiKey: pickFirst(process.env["OPENFN_API_KEY"], config2.apiKey),
1888
2382
  endpoint: pickFirst(process.env["OPENFN_ENDPOINT"], config2.endpoint),
1889
- statePath: pickFirst(options7.statePath, config2.statePath),
1890
- specPath: pickFirst(options7.projectPath, config2.specPath),
1891
- configPath: options7.configPath,
1892
- requireConfirmation: pickFirst(options7.confirm, config2.requireConfirmation)
2383
+ statePath: pickFirst(options8.statePath, config2.statePath),
2384
+ specPath: pickFirst(options8.projectPath, config2.specPath),
2385
+ configPath: options8.configPath,
2386
+ requireConfirmation: pickFirst(options8.confirm, config2.requireConfirmation)
1893
2387
  };
1894
2388
  }
1895
2389
  function pickFirst(...args) {
@@ -1900,20 +2394,20 @@ var handler_default6 = deployHandler;
1900
2394
  // src/docgen/handler.ts
1901
2395
  import { writeFile as writeFile6 } from "node:fs/promises";
1902
2396
  import { readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
1903
- import path9 from "node:path";
2397
+ import path10 from "node:path";
1904
2398
  import { describePackage } from "@openfn/describe-package";
1905
2399
  import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
1906
2400
  var RETRY_DURATION = 500;
1907
2401
  var RETRY_COUNT = 20;
1908
2402
  var TIMEOUT_MS = 1e3 * 60;
1909
2403
  var actualDocGen = (specifier) => describePackage(specifier, {});
1910
- var ensurePath = (filePath) => mkdirSync(path9.dirname(filePath), { recursive: true });
2404
+ var ensurePath = (filePath) => mkdirSync(path10.dirname(filePath), { recursive: true });
1911
2405
  var generatePlaceholder = (path17) => {
1912
2406
  writeFileSync(path17, `{ "loading": true, "timestamp": ${Date.now()}}`);
1913
2407
  };
1914
2408
  var finish = (logger, resultPath) => {
1915
2409
  logger.success("Done! Docs can be found at:\n");
1916
- logger.print(` ${path9.resolve(resultPath)}`);
2410
+ logger.print(` ${path10.resolve(resultPath)}`);
1917
2411
  };
1918
2412
  var generateDocs = async (specifier, path17, docgen, logger) => {
1919
2413
  const result = await docgen(specifier);
@@ -1952,8 +2446,8 @@ var waitForDocs = async (docs, path17, logger, retryDuration = RETRY_DURATION) =
1952
2446
  throw e;
1953
2447
  }
1954
2448
  };
1955
- var docgenHandler = (options7, logger, docgen = actualDocGen, retryDuration = RETRY_DURATION) => {
1956
- const { specifier, repoDir } = options7;
2449
+ var docgenHandler = (options8, logger, docgen = actualDocGen, retryDuration = RETRY_DURATION) => {
2450
+ const { specifier, repoDir } = options8;
1957
2451
  const { version } = getNameAndVersion4(specifier);
1958
2452
  if (!version) {
1959
2453
  logger.error("Error: No version number detected");
@@ -1994,14 +2488,14 @@ var handler_default7 = docgenHandler;
1994
2488
 
1995
2489
  // src/docs/handler.ts
1996
2490
  import { readFile as readFile5 } from "node:fs/promises";
1997
- import c from "chalk";
2491
+ import c3 from "chalk";
1998
2492
  import { getNameAndVersion as getNameAndVersion5, getLatestVersion } from "@openfn/runtime";
1999
2493
  var describeFn = (adaptorName, fn) => [
2000
- c.green(
2494
+ c3.green(
2001
2495
  `## ${fn.name}(${fn.parameters.map(({ name }) => name).join(",")})`
2002
2496
  ),
2003
2497
  `${fn.description}`,
2004
- c.green("### Usage Examples"),
2498
+ c3.green("### Usage Examples"),
2005
2499
  fn.examples.length ? fn.examples.map(({ code, caption }) => {
2006
2500
  if (caption) {
2007
2501
  return `${caption}:
@@ -2009,21 +2503,21 @@ ${code}`;
2009
2503
  }
2010
2504
  return code;
2011
2505
  }).join("\n\n") : "None",
2012
- c.green("### API Reference"),
2506
+ c3.green("### API Reference"),
2013
2507
  `https://docs.openfn.org/adaptors/packages/${adaptorName.replace(
2014
2508
  "@openfn/language-",
2015
2509
  ""
2016
2510
  )}-docs#${fn.name}
2017
2511
  `
2018
2512
  ].join("\n\n");
2019
- var describeLib = (adaptorName, data) => c.green(`## ${adaptorName} ${data.version}`) + `
2513
+ var describeLib = (adaptorName, data) => c3.green(`## ${adaptorName} ${data.version}`) + `
2020
2514
 
2021
2515
  ${data.functions.map(
2022
- (fn) => ` ${c.yellow(fn.name)} (${fn.parameters.map((p) => p.name).join(", ")})`
2516
+ (fn) => ` ${c3.yellow(fn.name)} (${fn.parameters.map((p) => p.name).join(", ")})`
2023
2517
  ).sort().join("\n")}
2024
2518
  `;
2025
- var docsHandler = async (options7, logger) => {
2026
- const { adaptor, operation, repoDir } = options7;
2519
+ var docsHandler = async (options8, logger) => {
2520
+ const { adaptor, operation, repoDir } = options8;
2027
2521
  const adaptors = expand_adaptors_default([adaptor]);
2028
2522
  const [adaptorName] = adaptors;
2029
2523
  let { name, version } = getNameAndVersion5(adaptorName);
@@ -2083,12 +2577,12 @@ var handler_default8 = docsHandler;
2083
2577
  import { getNameAndVersion as getNameAndVersion6 } from "@openfn/runtime";
2084
2578
  import { createHash } from "node:crypto";
2085
2579
  import { mkdir as mkdir4, readFile as readFile6, writeFile as writeFile7, readdir, rm } from "node:fs/promises";
2086
- import path10 from "node:path";
2580
+ import path11 from "node:path";
2087
2581
  var UNSUPPORTED_FILE_NAME = "unsupported.json";
2088
2582
  var getCachePath2 = (repoDir, key) => {
2089
- const base = path10.join(repoDir, "meta");
2583
+ const base = path11.join(repoDir, "meta");
2090
2584
  if (key) {
2091
- return path10.join(base, key.endsWith(".json") ? key : `${key}.json`);
2585
+ return path11.join(base, key.endsWith(".json") ? key : `${key}.json`);
2092
2586
  }
2093
2587
  return base;
2094
2588
  };
@@ -2130,7 +2624,7 @@ var get2 = async (repoPath, key) => {
2130
2624
  };
2131
2625
  var set2 = async (repoPath, key, result) => {
2132
2626
  const p = getCachePath2(repoPath, key);
2133
- await mkdir4(path10.dirname(p), { recursive: true });
2627
+ await mkdir4(path11.dirname(p), { recursive: true });
2134
2628
  await writeFile7(p, JSON.stringify(result));
2135
2629
  };
2136
2630
  var getUnsupportedCachePath = (repoDir) => {
@@ -2189,7 +2683,7 @@ var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
2189
2683
  majorMinor: parsed.majorMinor,
2190
2684
  timestamp: Date.now()
2191
2685
  };
2192
- await mkdir4(path10.dirname(cachePath), { recursive: true });
2686
+ await mkdir4(path11.dirname(cachePath), { recursive: true });
2193
2687
  await writeFile7(cachePath, JSON.stringify(cache, null, 2));
2194
2688
  }
2195
2689
  };
@@ -2228,8 +2722,8 @@ var getAdaptorPath = async (adaptor, logger, repoDir) => {
2228
2722
  return adaptorPath;
2229
2723
  };
2230
2724
  var shouldAutoinstall = (adaptor) => adaptor?.length > 0 && !adaptor.startsWith("/") && !adaptor.includes("=");
2231
- var metadataHandler = async (options7, logger) => {
2232
- const { repoDir, adaptors, keepUnsupported } = options7;
2725
+ var metadataHandler = async (options8, logger) => {
2726
+ const { repoDir, adaptors, keepUnsupported } = options8;
2233
2727
  let adaptor = adaptors[0];
2234
2728
  if (await isAdaptorUnsupported(adaptor, repoDir)) {
2235
2729
  logger.info(
@@ -2238,7 +2732,7 @@ var metadataHandler = async (options7, logger) => {
2238
2732
  logger.error("No metadata helper found");
2239
2733
  process.exit(1);
2240
2734
  }
2241
- const state = await load_state_default({}, options7, logger);
2735
+ const state = await load_state_default({}, options8, logger);
2242
2736
  logger.success(`Generating metadata`);
2243
2737
  logger.info("config:", state);
2244
2738
  const config2 = state.configuration;
@@ -2251,7 +2745,7 @@ var metadataHandler = async (options7, logger) => {
2251
2745
  logger.print(getCachePath2(repoDir, id));
2252
2746
  };
2253
2747
  const id = generateKey(config2, adaptor);
2254
- if (!options7.force) {
2748
+ if (!options8.force) {
2255
2749
  logger.debug("config hash: ", id);
2256
2750
  const cached = await get2(repoDir, id);
2257
2751
  if (cached) {
@@ -2269,7 +2763,7 @@ var metadataHandler = async (options7, logger) => {
2269
2763
  wasAutoInstalled = true;
2270
2764
  adaptor = autoinstallResult[0];
2271
2765
  }
2272
- const adaptorPath = await getAdaptorPath(adaptor, logger, options7.repoDir);
2766
+ const adaptorPath = await getAdaptorPath(adaptor, logger, options8.repoDir);
2273
2767
  if (!adaptorPath) {
2274
2768
  throw new Error(`Could not resolve adaptor path for ${adaptor}`);
2275
2769
  }
@@ -2321,276 +2815,10 @@ import {
2321
2815
  syncRemoteSpec
2322
2816
  } from "@openfn/deploy";
2323
2817
 
2324
- // src/util/command-builders.ts
2325
- import c2 from "chalk";
2326
- var expandYargs = (y) => {
2327
- if (typeof y === "function") {
2328
- return y();
2329
- }
2330
- return y;
2331
- };
2332
- function build(opts, yargs) {
2333
- return opts.reduce((_y, o) => {
2334
- if (!o?.name) {
2335
- console.error(`ERROR: INVALID COMMAND OPTION PASSED`, o);
2336
- console.error("Check the options passed to the command builder");
2337
- throw new Error("Invalid command");
2338
- }
2339
- return yargs.option(o.name, expandYargs(o.yargs));
2340
- }, yargs);
2341
- }
2342
- var ensure = (command7, opts) => (yargs) => {
2343
- yargs.command = command7;
2344
- opts.filter((opt) => opt.ensure).forEach((opt) => {
2345
- try {
2346
- opt.ensure(yargs);
2347
- } catch (e) {
2348
- console.log(e);
2349
- console.error(
2350
- c2.red(`
2351
- Error parsing command arguments: ${command7}.${opt.name}
2352
- `)
2353
- );
2354
- console.error(c2.red("Aborting"));
2355
- console.error();
2356
- process.exit(9);
2357
- }
2358
- });
2359
- };
2360
- var override = (command7, yargs) => {
2361
- return {
2362
- ...command7,
2363
- yargs: {
2364
- ...command7.yargs || {},
2365
- ...yargs
2366
- }
2367
- };
2368
- };
2369
-
2370
2818
  // src/projects/fetch.ts
2371
2819
  import path12 from "node:path";
2372
- import Project3, { Workspace } from "@openfn/project";
2373
-
2374
- // src/util/ensure-log-opts.ts
2375
- var defaultLoggerOptions = {
2376
- default: "default",
2377
- // TODO fix to lower case
2378
- job: "debug"
2379
- };
2380
- var ERROR_MESSAGE_LOG_LEVEL = "Unknown log level. Valid levels are none, debug, info and default.";
2381
- var ERROR_MESSAGE_LOG_COMPONENT = "Unknown log component. Valid components are cli, compiler, runtime and job.";
2382
- var componentShorthands = {
2383
- cmp: "compiler",
2384
- rt: "runtime",
2385
- "r/t": "runtime"
2386
- };
2387
- var ensureLogOpts = (opts) => {
2388
- const components = {};
2389
- const outgoingOpts = opts;
2390
- if (!opts.log && /^(version|test)$/.test(opts.command)) {
2391
- outgoingOpts.log = { default: "info" };
2392
- return outgoingOpts;
2393
- }
2394
- if (opts.log) {
2395
- const parts = opts.log.split(",");
2396
- parts.forEach((l) => {
2397
- let component = "";
2398
- let level = "";
2399
- if (l.match(/=/)) {
2400
- const parts2 = l.split("=");
2401
- component = parts2[0].toLowerCase();
2402
- if (componentShorthands[component]) {
2403
- component = componentShorthands[component];
2404
- }
2405
- level = parts2[1].toLowerCase();
2406
- } else {
2407
- component = "default";
2408
- level = l.toLowerCase();
2409
- if (level === "none" && !parts.find((p) => p.startsWith("job"))) {
2410
- components["job"] = "none";
2411
- }
2412
- }
2413
- if (!/^(cli|runtime|compiler|job|default)$/i.test(component)) {
2414
- throw new Error(ERROR_MESSAGE_LOG_COMPONENT);
2415
- }
2416
- level = level.toLowerCase();
2417
- if (!isValidLogLevel(level)) {
2418
- throw new Error(ERROR_MESSAGE_LOG_LEVEL);
2419
- }
2420
- components[component] = level;
2421
- });
2422
- }
2423
- outgoingOpts.log = {
2424
- ...defaultLoggerOptions,
2425
- ...components
2426
- };
2427
- return outgoingOpts;
2428
- };
2429
- var ensure_log_opts_default = ensureLogOpts;
2430
-
2431
- // src/options.ts
2432
- var apiKey = {
2433
- name: "apikey",
2434
- yargs: {
2435
- alias: ["pat", "token", "api-key"],
2436
- description: "API Key, Personal Access Token (PAT), or other access token from Lightning"
2437
- },
2438
- ensure: (opts) => {
2439
- if (!opts.apikey) {
2440
- opts.apiKey = process.env.OPENFN_API_KEY;
2441
- }
2442
- }
2443
- };
2444
- var endpoint = {
2445
- name: "endpoint",
2446
- yargs: {
2447
- alias: ["lightning"],
2448
- description: "[beta only] URL to Lightning endpoint"
2449
- }
2450
- };
2451
- var force = {
2452
- name: "force",
2453
- yargs: {
2454
- alias: ["f"],
2455
- boolean: true,
2456
- description: "Force metadata to be regenerated",
2457
- default: false
2458
- }
2459
- };
2460
- var log = {
2461
- name: "log",
2462
- yargs: {
2463
- alias: ["l"],
2464
- description: "Set the log level",
2465
- string: true
2466
- },
2467
- ensure: (opts) => {
2468
- ensure_log_opts_default(opts);
2469
- }
2470
- };
2471
- var logJson = {
2472
- name: "log-json",
2473
- yargs: {
2474
- description: "Output all logs as JSON objects",
2475
- boolean: true
2476
- }
2477
- };
2478
- var projectPath = {
2479
- name: "project-path",
2480
- yargs: {
2481
- string: true,
2482
- alias: ["p"],
2483
- description: "The location of your project.yaml file"
2484
- }
2485
- };
2486
- var path11 = {
2487
- name: "path",
2488
- yargs: {
2489
- description: "Path"
2490
- }
2491
- };
2492
- var snapshots = {
2493
- name: "snapshots",
2494
- yargs: {
2495
- description: "List of snapshot ids to pull",
2496
- array: true
2497
- }
2498
- };
2499
- var timeout = {
2500
- name: "timeout",
2501
- yargs: {
2502
- alias: ["t"],
2503
- number: true,
2504
- description: "Set the timeout duration (ms). Defaults to 5 minutes.",
2505
- default: 5 * 60 * 1e3
2506
- }
2507
- };
2508
- var workflow = {
2509
- name: "workflow",
2510
- yargs: {
2511
- string: true,
2512
- description: "Name of the workflow to execute"
2513
- }
2514
- };
2515
-
2516
- // src/util/get-cli-option-object.ts
2517
- function getCLIOptionObject(arg) {
2518
- if (isObject(arg)) {
2519
- return arg;
2520
- } else if (typeof arg === "string") {
2521
- try {
2522
- const p = JSON.parse(arg);
2523
- if (isObject(p))
2524
- return p;
2525
- } catch (e) {
2526
- }
2527
- return Object.fromEntries(
2528
- arg.split(",").map((pair) => {
2529
- const [k, v] = pair.split("=");
2530
- return [k.trim(), v.trim()];
2531
- })
2532
- );
2533
- }
2534
- }
2535
- function isObject(arg) {
2536
- return typeof arg === "object" && arg !== null && !Array.isArray(arg);
2537
- }
2538
-
2539
- // src/projects/options.ts
2540
- var env = {
2541
- name: "env",
2542
- yargs: {
2543
- description: "Environment name (eg staging, prod, branch)",
2544
- hidden: true
2545
- }
2546
- };
2547
- var alias = {
2548
- name: "alias",
2549
- yargs: {
2550
- description: "Environment name (eg staging, prod, branch)"
2551
- }
2552
- };
2553
- var removeUnmapped = {
2554
- name: "remove-unmapped",
2555
- yargs: {
2556
- boolean: true,
2557
- description: "Removes all workflows that didn't get mapped from the final project after merge"
2558
- }
2559
- };
2560
- var workflowMappings = {
2561
- name: "workflow-mappings",
2562
- yargs: {
2563
- type: "string",
2564
- coerce: getCLIOptionObject,
2565
- description: "A manual object mapping of which workflows in source and target should be matched for a merge."
2566
- }
2567
- };
2568
- var outputPath = {
2569
- name: "output-path",
2570
- yargs: {
2571
- alias: ["output"],
2572
- type: "string",
2573
- description: "Path to output the fetched project to"
2574
- }
2575
- };
2576
- var workspace = {
2577
- name: "workspace",
2578
- yargs: {
2579
- alias: ["w"],
2580
- description: "Path to the project workspace (ie, path to openfn.yaml)"
2581
- },
2582
- ensure: (opts) => {
2583
- const ws = opts.workspace ?? process.env.OPENFN_WORKSPACE;
2584
- if (!ws) {
2585
- opts.workspace = process.cwd();
2586
- } else {
2587
- opts.workspace = resolve_path_default(ws);
2588
- }
2589
- }
2590
- };
2591
-
2592
- // src/projects/fetch.ts
2593
- var options = [
2820
+ import Project2, { Workspace as Workspace2 } from "@openfn/project";
2821
+ var options2 = [
2594
2822
  alias,
2595
2823
  apiKey,
2596
2824
  endpoint,
@@ -2605,42 +2833,44 @@ var options = [
2605
2833
  env,
2606
2834
  workspace
2607
2835
  ];
2608
- var command = {
2836
+ var command2 = {
2609
2837
  command: "fetch [project]",
2610
2838
  describe: `Download the latest version of a project from a lightning server (does not expand the project, use checkout)`,
2611
- builder: (yargs) => build(options, yargs).positional("project", {
2839
+ builder: (yargs) => build(options2, yargs).positional("project", {
2612
2840
  describe: "The id, alias or UUID of the project to fetch. If not set, will default to the active project"
2613
2841
  }).example(
2614
2842
  "fetch 57862287-23e6-4650-8d79-e1dd88b24b1c",
2615
2843
  "Fetch an updated copy of a the above spec and state from a Lightning Instance"
2616
2844
  ),
2617
- handler: ensure("project-fetch", options)
2845
+ handler: ensure("project-fetch", options2)
2618
2846
  };
2619
- var printProjectName = (project) => `${project.qname} (${project.id})`;
2620
- var handler2 = async (options7, logger) => {
2621
- const workspacePath = options7.workspace ?? process.cwd();
2847
+ var printProjectName2 = (project) => `${project.qname} (${project.id})`;
2848
+ var handler2 = async (options8, logger) => {
2849
+ const workspacePath = options8.workspace ?? process.cwd();
2622
2850
  logger.debug("Using workspace at", workspacePath);
2623
- const workspace2 = new Workspace(workspacePath, logger, false);
2624
- const { outputPath: outputPath2 } = options7;
2851
+ const workspace2 = new Workspace2(workspacePath, logger, false);
2852
+ const { outputPath: outputPath2 } = options8;
2625
2853
  const localTargetProject = await resolveOutputProject(
2626
2854
  workspace2,
2627
- options7,
2855
+ options8,
2628
2856
  logger
2629
2857
  );
2630
- const remoteProject = await fetchRemoteProject(workspace2, options7, logger);
2631
- ensureTargetCompatible(options7, remoteProject, localTargetProject);
2632
- const outputRoot = resolve_path_default(outputPath2 || workspacePath);
2633
- const projectsDir = remoteProject?.config.dirs.projects ?? ".projects";
2634
- const finalOutputPath = outputPath2 ?? `${outputRoot}/${projectsDir}/${remoteProject.qname}`;
2858
+ const remoteProject = await fetchRemoteProject(workspace2, options8, logger);
2859
+ ensureTargetCompatible(options8, remoteProject, localTargetProject);
2860
+ const finalOutputPath = getSerializePath(
2861
+ remoteProject,
2862
+ workspacePath,
2863
+ outputPath2
2864
+ );
2635
2865
  let format = void 0;
2636
2866
  if (outputPath2) {
2637
2867
  const ext = path12.extname(outputPath2).substring(1);
2638
2868
  if (ext.length) {
2639
2869
  format = ext;
2640
2870
  }
2641
- if (options7.alias) {
2871
+ if (options8.alias) {
2642
2872
  logger.warn(
2643
- `WARNING: alias "${options7.alias}" was set, but will be ignored as output path was provided`
2873
+ `WARNING: alias "${options8.alias}" was set, but will be ignored as output path was provided`
2644
2874
  );
2645
2875
  }
2646
2876
  }
@@ -2650,55 +2880,55 @@ var handler2 = async (options7, logger) => {
2650
2880
  );
2651
2881
  return remoteProject;
2652
2882
  };
2653
- async function resolveOutputProject(workspace2, options7, logger) {
2883
+ async function resolveOutputProject(workspace2, options8, logger) {
2654
2884
  logger.debug("Checking for local copy of project...");
2655
- if (options7.outputPath) {
2885
+ if (options8.outputPath) {
2656
2886
  try {
2657
- const customProject = await Project3.from("path", options7.outputPath);
2887
+ const customProject = await Project2.from("path", options8.outputPath);
2658
2888
  logger.debug(
2659
- `Found existing local project ${printProjectName(customProject)} at`,
2660
- options7.outputPath
2889
+ `Found existing local project ${printProjectName2(customProject)} at`,
2890
+ options8.outputPath
2661
2891
  );
2662
2892
  return customProject;
2663
2893
  } catch (e) {
2664
- logger.debug("No project found at", options7.outputPath);
2894
+ logger.debug("No project found at", options8.outputPath);
2665
2895
  }
2666
2896
  }
2667
- if (options7.alias) {
2668
- const aliasProject = workspace2.get(options7.alias);
2897
+ if (options8.alias) {
2898
+ const aliasProject = workspace2.get(options8.alias);
2669
2899
  if (aliasProject) {
2670
2900
  logger.debug(
2671
2901
  `Found local project from alias:`,
2672
- printProjectName(aliasProject)
2902
+ printProjectName2(aliasProject)
2673
2903
  );
2674
2904
  return aliasProject;
2675
2905
  } else {
2676
- logger.debug(`No local project found with alias ${options7.alias}`);
2906
+ logger.debug(`No local project found with alias ${options8.alias}`);
2677
2907
  }
2678
2908
  }
2679
- const project = workspace2.get(options7.project);
2909
+ const project = workspace2.get(options8.project);
2680
2910
  if (project) {
2681
2911
  logger.debug(
2682
2912
  `Found local project from identifier:`,
2683
- printProjectName(project)
2913
+ printProjectName2(project)
2684
2914
  );
2685
2915
  return project;
2686
2916
  } else {
2687
2917
  logger.debug(
2688
2918
  `No local project found matching identifier: `,
2689
- options7.project
2919
+ options8.project
2690
2920
  );
2691
2921
  }
2692
2922
  }
2693
- async function fetchRemoteProject(workspace2, options7, logger) {
2923
+ async function fetchRemoteProject(workspace2, options8, logger) {
2694
2924
  logger.debug(`Fetching latest project data from app`);
2695
- const config2 = loadAppAuthConfig(options7, logger);
2696
- let projectUUID = options7.project;
2697
- const localProject = workspace2.get(options7.project);
2698
- if (localProject?.openfn?.uuid && localProject.openfn.uuid !== options7.project) {
2925
+ const config2 = loadAppAuthConfig(options8, logger);
2926
+ let projectUUID = options8.project;
2927
+ const localProject = workspace2.get(options8.project);
2928
+ if (localProject?.openfn?.uuid && localProject.openfn.uuid !== options8.project) {
2699
2929
  projectUUID = localProject.openfn.uuid;
2700
2930
  logger.debug(
2701
- `Resolved ${options7.project} to UUID ${projectUUID} from local project ${printProjectName(
2931
+ `Resolved ${options8.project} to UUID ${projectUUID} from local project ${printProjectName2(
2702
2932
  localProject
2703
2933
  )}}`
2704
2934
  );
@@ -2710,7 +2940,7 @@ async function fetchRemoteProject(workspace2, options7, logger) {
2710
2940
  projectUUID,
2711
2941
  logger
2712
2942
  );
2713
- const project = await Project3.from(
2943
+ const project = await Project2.from(
2714
2944
  "state",
2715
2945
  data,
2716
2946
  {
@@ -2718,7 +2948,7 @@ async function fetchRemoteProject(workspace2, options7, logger) {
2718
2948
  },
2719
2949
  {
2720
2950
  ...workspace2.getConfig(),
2721
- alias: options7.alias ?? localProject?.alias ?? "main"
2951
+ alias: options8.alias ?? localProject?.alias ?? "main"
2722
2952
  }
2723
2953
  );
2724
2954
  logger.debug(
@@ -2726,20 +2956,20 @@ async function fetchRemoteProject(workspace2, options7, logger) {
2726
2956
  );
2727
2957
  return project;
2728
2958
  }
2729
- function ensureTargetCompatible(options7, remoteProject, localProject) {
2959
+ function ensureTargetCompatible(options8, remoteProject, localProject) {
2730
2960
  if (localProject) {
2731
- if (!options7.force && localProject.uuid != remoteProject.uuid) {
2961
+ if (!options8.force && localProject.uuid != remoteProject.uuid) {
2732
2962
  const error = new Error("PROJECT_EXISTS");
2733
2963
  error.message = "A project with a different UUID exists at this location";
2734
2964
  error.fix = `You have tried to fetch a remote project into a local project with a different UUID
2735
2965
 
2736
2966
  Try adding an alias to rename the new project:
2737
2967
 
2738
- openfn fetch ${options7.project} --alias ${remoteProject.id}
2968
+ openfn fetch ${options8.project} --alias ${remoteProject.id}
2739
2969
 
2740
2970
  To ignore this error and override the local file, pass --force (-f)
2741
2971
 
2742
- openfn fetch ${options7.project} --force
2972
+ openfn fetch ${options8.project} --force
2743
2973
  `;
2744
2974
  error.fetched_project = {
2745
2975
  uuid: remoteProject.uuid,
@@ -2757,7 +2987,7 @@ To ignore this error and override the local file, pass --force (-f)
2757
2987
  const hasAnyHistory = remoteProject.workflows.find(
2758
2988
  (w) => w.workflow.history?.length
2759
2989
  );
2760
- const skipVersionCheck = options7.force || // The user forced the checkout
2990
+ const skipVersionCheck = options8.force || // The user forced the checkout
2761
2991
  !hasAnyHistory;
2762
2992
  if (!skipVersionCheck && !remoteProject.canMergeInto(localProject)) {
2763
2993
  throw new Error("Error! An incompatible project exists at this location");
@@ -2766,30 +2996,30 @@ To ignore this error and override the local file, pass --force (-f)
2766
2996
  }
2767
2997
 
2768
2998
  // src/projects/checkout.ts
2769
- import Project4, { Workspace as Workspace2 } from "@openfn/project";
2999
+ import Project3, { Workspace as Workspace3 } from "@openfn/project";
2770
3000
  import path13 from "path";
2771
3001
  import fs4 from "fs";
2772
3002
  import { rimraf } from "rimraf";
2773
- var options2 = [log, workspace];
2774
- var command2 = {
3003
+ var options3 = [log, workspace];
3004
+ var command3 = {
2775
3005
  command: "checkout <project>",
2776
3006
  describe: "Switch to a different OpenFn project in the same workspace",
2777
- handler: ensure("project-checkout", options2),
2778
- builder: (yargs) => build(options2, yargs).positional("project", {
3007
+ handler: ensure("project-checkout", options3),
3008
+ builder: (yargs) => build(options3, yargs).positional("project", {
2779
3009
  describe: "The id, alias or UUID of the project to chcekout",
2780
3010
  demandOption: true
2781
3011
  })
2782
3012
  };
2783
- var handler3 = async (options7, logger) => {
2784
- const projectIdentifier = options7.project;
2785
- const workspacePath = options7.workspace ?? process.cwd();
2786
- const workspace2 = new Workspace2(workspacePath, logger);
3013
+ var handler3 = async (options8, logger) => {
3014
+ const projectIdentifier = options8.project;
3015
+ const workspacePath = options8.workspace ?? process.cwd();
3016
+ const workspace2 = new Workspace3(workspacePath, logger);
2787
3017
  const { project: _, ...config2 } = workspace2.getConfig();
2788
3018
  let switchProject;
2789
3019
  if (/\.(yaml|json)$/.test(projectIdentifier)) {
2790
3020
  const filePath = projectIdentifier.startsWith("/") ? projectIdentifier : path13.join(workspacePath, projectIdentifier);
2791
3021
  logger.debug("Loading project from path ", filePath);
2792
- switchProject = await Project4.from("path", filePath, config2);
3022
+ switchProject = await Project3.from("path", filePath, config2);
2793
3023
  } else {
2794
3024
  switchProject = workspace2.get(projectIdentifier);
2795
3025
  }
@@ -2814,48 +3044,48 @@ var handler3 = async (options7, logger) => {
2814
3044
  };
2815
3045
 
2816
3046
  // src/projects/pull.ts
2817
- var options3 = [
3047
+ var options4 = [
2818
3048
  alias,
2819
3049
  env,
2820
3050
  workspace,
2821
3051
  apiKey,
2822
3052
  endpoint,
2823
3053
  log,
2824
- override(path11, {
3054
+ override(path8, {
2825
3055
  description: "path to output the project to"
2826
3056
  }),
2827
3057
  logJson,
2828
- projectPath,
2829
3058
  snapshots,
2830
- path11,
3059
+ path8,
2831
3060
  force
2832
3061
  ];
2833
- var command3 = {
3062
+ var command4 = {
2834
3063
  command: "pull [project]",
2835
3064
  describe: `Pull a project from a Lightning Instance and expand to the file system (ie fetch + checkout)`,
2836
- builder: (yargs) => build(options3, yargs).positional("project", {
3065
+ builder: (yargs) => build(options4, yargs).positional("project", {
2837
3066
  describe: "The UUID, local id or local alias of the project to pull"
2838
3067
  }).example(
2839
3068
  "pull 57862287-23e6-4650-8d79-e1dd88b24b1c",
2840
3069
  "Pull project with a UUID from a lightning instance"
2841
3070
  ),
2842
- handler: ensure("project-pull", options3)
3071
+ handler: ensure("project-pull", options4)
2843
3072
  };
2844
- async function handler4(options7, logger) {
2845
- await handler2(options7, logger);
3073
+ async function handler4(options8, logger) {
3074
+ await handler2(options8, logger);
2846
3075
  logger.success(`Downloaded latest project version`);
2847
- await handler3(options7, logger);
3076
+ await handler3(options8, logger);
2848
3077
  logger.success(`Checked out project locally`);
2849
3078
  }
2850
3079
  var pull_default = handler4;
2851
3080
 
2852
3081
  // src/pull/handler.ts
2853
- async function pullHandler(options7, logger) {
2854
- if (options7.beta) {
2855
- return pull_default(options7, logger);
3082
+ async function pullHandler(options8, logger) {
3083
+ if (options8.beta) {
3084
+ options8.project = options8.projectId;
3085
+ return pull_default(options8, logger);
2856
3086
  }
2857
3087
  try {
2858
- const config2 = mergeOverrides2(await getConfig2(options7.configPath), options7);
3088
+ const config2 = mergeOverrides2(await getConfig2(options8.configPath), options8);
2859
3089
  if (process.env["OPENFN_API_KEY"]) {
2860
3090
  logger.info("Using OPENFN_API_KEY environment variable");
2861
3091
  config2.apiKey = process.env["OPENFN_API_KEY"];
@@ -2869,8 +3099,8 @@ async function pullHandler(options7, logger) {
2869
3099
  );
2870
3100
  const { data: project } = await getProject(
2871
3101
  config2,
2872
- options7.projectId,
2873
- options7.snapshots
3102
+ options8.projectId,
3103
+ options8.snapshots
2874
3104
  );
2875
3105
  if (!project) {
2876
3106
  logger.error("ERROR: Project not found.");
@@ -2883,8 +3113,8 @@ async function pullHandler(options7, logger) {
2883
3113
  const state = getStateFromProjectPayload(project);
2884
3114
  logger.always("Downloading the project spec (as YAML) from the server.");
2885
3115
  const queryParams = new URLSearchParams();
2886
- queryParams.append("id", options7.projectId);
2887
- options7.snapshots?.forEach(
3116
+ queryParams.append("id", options8.projectId);
3117
+ options8.snapshots?.forEach(
2888
3118
  (snapshot) => queryParams.append("snapshots[]", snapshot)
2889
3119
  );
2890
3120
  const url2 = new URL(
@@ -2933,13 +3163,13 @@ async function pullHandler(options7, logger) {
2933
3163
  throw error;
2934
3164
  }
2935
3165
  }
2936
- function mergeOverrides2(config2, options7) {
3166
+ function mergeOverrides2(config2, options8) {
2937
3167
  return {
2938
3168
  ...config2,
2939
3169
  apiKey: pickFirst2(process.env["OPENFN_API_KEY"], config2.apiKey),
2940
3170
  endpoint: pickFirst2(process.env["OPENFN_ENDPOINT"], config2.endpoint),
2941
- configPath: options7.configPath,
2942
- requireConfirmation: pickFirst2(options7.confirm, config2.requireConfirmation)
3171
+ configPath: options8.configPath,
3172
+ requireConfirmation: pickFirst2(options8.confirm, config2.requireConfirmation)
2943
3173
  };
2944
3174
  }
2945
3175
  function pickFirst2(...args) {
@@ -2947,10 +3177,11 @@ function pickFirst2(...args) {
2947
3177
  }
2948
3178
  var handler_default10 = pullHandler;
2949
3179
 
2950
- // src/projects/handler.ts
2951
- var handler_exports = {};
2952
- __export(handler_exports, {
3180
+ // src/projects/index.ts
3181
+ var projects_exports = {};
3182
+ __export(projects_exports, {
2953
3183
  checkout: () => handler3,
3184
+ deploy: () => handler,
2954
3185
  fetch: () => handler2,
2955
3186
  list: () => handler5,
2956
3187
  merge: () => handler7,
@@ -2959,20 +3190,20 @@ __export(handler_exports, {
2959
3190
  });
2960
3191
 
2961
3192
  // src/projects/list.ts
2962
- import { Workspace as Workspace3 } from "@openfn/project";
2963
- var options4 = [log, workspace];
2964
- var command4 = {
3193
+ import { Workspace as Workspace4 } from "@openfn/project";
3194
+ var options5 = [log, workspace];
3195
+ var command5 = {
2965
3196
  command: "list [project-path]",
2966
3197
  describe: "List all the openfn projects available in the current directory",
2967
3198
  aliases: ["project", "$0"],
2968
- handler: ensure("project-list", options4),
2969
- builder: (yargs) => build(options4, yargs)
3199
+ handler: ensure("project-list", options5),
3200
+ builder: (yargs) => build(options5, yargs)
2970
3201
  };
2971
- var handler5 = async (options7, logger) => {
3202
+ var handler5 = async (options8, logger) => {
2972
3203
  logger.info("Searching for projects in workspace at:");
2973
- logger.info(" ", options7.workspace);
3204
+ logger.info(" ", options8.workspace);
2974
3205
  logger.break();
2975
- const workspace2 = new Workspace3(options7.workspace);
3206
+ const workspace2 = new Workspace4(options8.workspace);
2976
3207
  if (!workspace2.valid) {
2977
3208
  throw new Error("No OpenFn projects found");
2978
3209
  }
@@ -2990,26 +3221,26 @@ ${project.workflows.map((w) => " - " + w.id).join("\n")}`;
2990
3221
  }
2991
3222
 
2992
3223
  // src/projects/version.ts
2993
- import { Workspace as Workspace4 } from "@openfn/project";
2994
- var options5 = [workflow, workspace, workflowMappings];
2995
- var command5 = {
3224
+ import { Workspace as Workspace5 } from "@openfn/project";
3225
+ var options6 = [workflow, workspace, workflowMappings];
3226
+ var command6 = {
2996
3227
  command: "version [workflow]",
2997
3228
  describe: "Returns the version hash of a given workflow in a workspace",
2998
- handler: ensure("project-version", options5),
2999
- builder: (yargs) => build(options5, yargs)
3229
+ handler: ensure("project-version", options6),
3230
+ builder: (yargs) => build(options6, yargs)
3000
3231
  };
3001
- var handler6 = async (options7, logger) => {
3002
- const workspace2 = new Workspace4(options7.workspace);
3232
+ var handler6 = async (options8, logger) => {
3233
+ const workspace2 = new Workspace5(options8.workspace);
3003
3234
  if (!workspace2.valid) {
3004
3235
  logger.error("Command was run in an invalid openfn workspace");
3005
3236
  return;
3006
3237
  }
3007
3238
  const output = /* @__PURE__ */ new Map();
3008
3239
  const activeProject = workspace2.getActiveProject();
3009
- if (options7.workflow) {
3010
- const workflow2 = activeProject?.getWorkflow(options7.workflow);
3240
+ if (options8.workflow) {
3241
+ const workflow2 = activeProject?.getWorkflow(options8.workflow);
3011
3242
  if (!workflow2) {
3012
- logger.error(`No workflow found with id ${options7.workflow}`);
3243
+ logger.error(`No workflow found with id ${options8.workflow}`);
3013
3244
  return;
3014
3245
  }
3015
3246
  output.set(workflow2.name || workflow2.id, workflow2.getVersionHash());
@@ -3023,7 +3254,7 @@ var handler6 = async (options7, logger) => {
3023
3254
  return;
3024
3255
  }
3025
3256
  let final;
3026
- if (options7.json) {
3257
+ if (options8.json) {
3027
3258
  final = JSON.stringify(Object.fromEntries(output), void 0, 2);
3028
3259
  } else {
3029
3260
  final = Array.from(output.entries()).map(([key, value]) => key + "\n" + value).join("\n\n");
@@ -3034,10 +3265,10 @@ ${final}`);
3034
3265
  };
3035
3266
 
3036
3267
  // src/projects/merge.ts
3037
- import Project6, { Workspace as Workspace5 } from "@openfn/project";
3268
+ import Project5, { Workspace as Workspace6 } from "@openfn/project";
3038
3269
  import path15 from "node:path";
3039
3270
  import fs6 from "node:fs/promises";
3040
- var options6 = [
3271
+ var options7 = [
3041
3272
  removeUnmapped,
3042
3273
  workflowMappings,
3043
3274
  workspace,
@@ -3062,24 +3293,24 @@ var options6 = [
3062
3293
  description: "Force a merge even when workflows are incompatible"
3063
3294
  })
3064
3295
  ];
3065
- var command6 = {
3296
+ var command7 = {
3066
3297
  command: "merge <project>",
3067
3298
  describe: "Merges the specified project (by UUID, id or alias) into the currently checked out project",
3068
- handler: ensure("project-merge", options6),
3069
- builder: (yargs) => build(options6, yargs)
3299
+ handler: ensure("project-merge", options7),
3300
+ builder: (yargs) => build(options7, yargs)
3070
3301
  };
3071
- var handler7 = async (options7, logger) => {
3072
- const workspacePath = options7.workspace;
3073
- const workspace2 = new Workspace5(workspacePath);
3302
+ var handler7 = async (options8, logger) => {
3303
+ const workspacePath = options8.workspace;
3304
+ const workspace2 = new Workspace6(workspacePath);
3074
3305
  if (!workspace2.valid) {
3075
3306
  logger.error("Command was run in an invalid openfn workspace");
3076
3307
  return;
3077
3308
  }
3078
3309
  let targetProject;
3079
- if (options7.base) {
3080
- const basePath = path15.resolve(options7.base);
3310
+ if (options8.base) {
3311
+ const basePath = path15.resolve(options8.base);
3081
3312
  logger.debug("Loading target project from path", basePath);
3082
- targetProject = await Project6.from("path", basePath);
3313
+ targetProject = await Project5.from("path", basePath);
3083
3314
  } else {
3084
3315
  targetProject = workspace2.getActiveProject();
3085
3316
  if (!targetProject) {
@@ -3088,12 +3319,12 @@ var handler7 = async (options7, logger) => {
3088
3319
  }
3089
3320
  logger.debug(`Loading target project from workspace (${targetProject.id})`);
3090
3321
  }
3091
- const sourceProjectIdentifier = options7.project;
3322
+ const sourceProjectIdentifier = options8.project;
3092
3323
  let sourceProject;
3093
3324
  if (/\.(ya?ml|json)$/.test(sourceProjectIdentifier)) {
3094
3325
  const filePath = path15.join(workspacePath, sourceProjectIdentifier);
3095
3326
  logger.debug("Loading source project from path ", filePath);
3096
- sourceProject = await Project6.from("path", filePath);
3327
+ sourceProject = await Project5.from("path", filePath);
3097
3328
  } else {
3098
3329
  logger.debug(
3099
3330
  `Loading source project from workspace ${sourceProjectIdentifier}`
@@ -3114,20 +3345,20 @@ var handler7 = async (options7, logger) => {
3114
3345
  logger.error("The checked out project has no id");
3115
3346
  return;
3116
3347
  }
3117
- const finalPath = options7.outputPath ?? workspace2.getProjectPath(targetProject.id);
3348
+ const finalPath = options8.outputPath ?? workspace2.getProjectPath(targetProject.id);
3118
3349
  if (!finalPath) {
3119
3350
  logger.error("Path to checked out project not found.");
3120
3351
  return;
3121
3352
  }
3122
- const final = Project6.merge(sourceProject, targetProject, {
3123
- removeUnmapped: options7.removeUnmapped,
3124
- workflowMappings: options7.workflowMappings,
3125
- force: options7.force
3353
+ const final = Project5.merge(sourceProject, targetProject, {
3354
+ removeUnmapped: options8.removeUnmapped,
3355
+ workflowMappings: options8.workflowMappings,
3356
+ force: options8.force
3126
3357
  });
3127
3358
  let outputFormat = workspace2.config.formats.project;
3128
- if (options7.outputPath?.endsWith(".json")) {
3359
+ if (options8.outputPath?.endsWith(".json")) {
3129
3360
  outputFormat = "json";
3130
- } else if (options7.outputPath?.endsWith(".yaml")) {
3361
+ } else if (options8.outputPath?.endsWith(".yaml")) {
3131
3362
  outputFormat = "yaml";
3132
3363
  }
3133
3364
  let finalState = final.serialize("state", {
@@ -3142,8 +3373,8 @@ var handler7 = async (options7, logger) => {
3142
3373
  await handler3(
3143
3374
  {
3144
3375
  workspace: workspacePath,
3145
- project: options7.outputPath ? finalPath : final.id,
3146
- log: options7.log
3376
+ project: options8.outputPath ? finalPath : final.id,
3377
+ log: options8.log
3147
3378
  },
3148
3379
  logger
3149
3380
  );
@@ -3173,8 +3404,8 @@ var loadVersionFromPath = (adaptorPath) => {
3173
3404
  return "unknown";
3174
3405
  }
3175
3406
  };
3176
- var printVersions = async (logger, options7 = {}, includeComponents = false) => {
3177
- const { adaptors, logJson: logJson2 } = options7;
3407
+ var printVersions = async (logger, options8 = {}, includeComponents = false) => {
3408
+ const { adaptors, logJson: logJson2 } = options8;
3178
3409
  let longestAdaptorName = "";
3179
3410
  const adaptorList = [];
3180
3411
  adaptors?.forEach((adaptor) => {
@@ -3184,7 +3415,7 @@ var printVersions = async (logger, options7 = {}, includeComponents = false) =>
3184
3415
  const [namePart, pathPart] = adaptor.split("=");
3185
3416
  adaptorVersion = loadVersionFromPath(pathPart);
3186
3417
  adaptorName = getNameAndVersion7(namePart).name;
3187
- } else if (options7.monorepoPath) {
3418
+ } else if (options8.monorepoPath) {
3188
3419
  adaptorName = getNameAndVersion7(adaptor).name;
3189
3420
  adaptorVersion = "monorepo";
3190
3421
  } else {
@@ -3273,8 +3504,8 @@ var handlers = {
3273
3504
  docs: handler_default8,
3274
3505
  metadata: handler_default9,
3275
3506
  pull: handler_default10,
3276
- projects: handler_exports,
3277
- project: handler_exports,
3507
+ projects: projects_exports,
3508
+ project: projects_exports,
3278
3509
  ["collections-get"]: handler_default4.get,
3279
3510
  ["collections-set"]: handler_default4.set,
3280
3511
  ["collections-remove"]: handler_default4.remove,
@@ -3282,6 +3513,7 @@ var handlers = {
3282
3513
  ["repo-install"]: install,
3283
3514
  ["repo-pwd"]: pwd,
3284
3515
  ["repo-list"]: list,
3516
+ ["project-deploy"]: handler,
3285
3517
  ["project-pull"]: handler4,
3286
3518
  ["project-list"]: handler5,
3287
3519
  ["project-version"]: handler6,
@@ -3290,13 +3522,13 @@ var handlers = {
3290
3522
  ["project-fetch"]: handler2,
3291
3523
  version: async (opts, logger) => print_versions_default(logger, opts, true)
3292
3524
  };
3293
- var parse = async (options7, log2) => {
3294
- const logger = log2 || logger_default(CLI, options7);
3295
- if (options7.command === "execute" || options7.command === "test") {
3296
- await print_versions_default(logger, options7);
3525
+ var parse = async (options8, log2) => {
3526
+ const logger = log2 || logger_default(CLI, options8);
3527
+ if (options8.command === "execute" || options8.command === "test") {
3528
+ await print_versions_default(logger, options8);
3297
3529
  }
3298
3530
  report(logger);
3299
- const { monorepoPath } = options7;
3531
+ const { monorepoPath } = options8;
3300
3532
  if (monorepoPath) {
3301
3533
  if (monorepoPath === "ERR") {
3302
3534
  logger.error(
@@ -3307,19 +3539,19 @@ var parse = async (options7, log2) => {
3307
3539
  }
3308
3540
  await validateMonoRepo(monorepoPath, logger);
3309
3541
  logger.success(`Loading adaptors from monorepo at ${monorepoPath}`);
3310
- options7.adaptors = map_adaptors_to_monorepo_default(
3542
+ options8.adaptors = map_adaptors_to_monorepo_default(
3311
3543
  monorepoPath,
3312
- options7.adaptors,
3544
+ options8.adaptors,
3313
3545
  logger
3314
3546
  );
3315
3547
  }
3316
- const handler8 = handlers[options7.command];
3548
+ const handler8 = handlers[options8.command];
3317
3549
  if (!handler8) {
3318
- logger.error(`Unrecognised command: ${options7.command}`);
3550
+ logger.error(`Unrecognised command: ${options8.command}`);
3319
3551
  process.exit(1);
3320
3552
  }
3321
3553
  try {
3322
- return await handler8(options7, logger);
3554
+ return await handler8(options8, logger);
3323
3555
  } catch (e) {
3324
3556
  if (!process.exitCode) {
3325
3557
  process.exitCode = e.exitCode || 1;