@openfn/cli 1.24.1 → 1.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +34 -10
- package/dist/process/runner.js +409 -154
- package/package.json +7 -6
package/dist/index.js
CHANGED
|
@@ -368,7 +368,7 @@ var ignoreImports = {
|
|
|
368
368
|
}
|
|
369
369
|
};
|
|
370
370
|
var getBaseDir = (opts2) => {
|
|
371
|
-
const basePath = opts2.path ?? ".";
|
|
371
|
+
const basePath = opts2.path ?? opts2.workspace ?? ".";
|
|
372
372
|
if (/\.(jso?n?|ya?ml)$/.test(basePath)) {
|
|
373
373
|
return nodePath.dirname(basePath);
|
|
374
374
|
}
|
|
@@ -923,12 +923,27 @@ var alias = {
|
|
|
923
923
|
description: "Environment name (eg staging, prod, branch)"
|
|
924
924
|
}
|
|
925
925
|
};
|
|
926
|
+
var clean = {
|
|
927
|
+
name: "clean",
|
|
928
|
+
yargs: {
|
|
929
|
+
description: "Clean the working dir before checking out the new project",
|
|
930
|
+
default: false,
|
|
931
|
+
boolean: true
|
|
932
|
+
}
|
|
933
|
+
};
|
|
926
934
|
var dryRun2 = {
|
|
927
935
|
name: "dryRun",
|
|
928
936
|
yargs: {
|
|
929
937
|
description: "Runs the command but does not commit any changes to disk or app"
|
|
930
938
|
}
|
|
931
939
|
};
|
|
940
|
+
var format = {
|
|
941
|
+
name: "format",
|
|
942
|
+
yargs: {
|
|
943
|
+
hidden: true,
|
|
944
|
+
description: "The format to save the project as - state, yaml or json. Use this to download raw state files."
|
|
945
|
+
}
|
|
946
|
+
};
|
|
932
947
|
var removeUnmapped = {
|
|
933
948
|
name: "remove-unmapped",
|
|
934
949
|
yargs: {
|
|
@@ -1138,7 +1153,7 @@ var command_default9 = pullCommand;
|
|
|
1138
1153
|
var repo = {
|
|
1139
1154
|
command: "repo [subcommand]",
|
|
1140
1155
|
describe: "Run commands on the module repo (install|clean)",
|
|
1141
|
-
builder: (yargs2) => yargs2.command(
|
|
1156
|
+
builder: (yargs2) => yargs2.command(clean2).command(install).command(list).example("repo install -a http", "Install @openfn/language-http").example("repo clean", "Remove everything from the repo working dir")
|
|
1142
1157
|
};
|
|
1143
1158
|
var installOptions = [
|
|
1144
1159
|
log,
|
|
@@ -1177,7 +1192,7 @@ var cleanOptions = [
|
|
|
1177
1192
|
}
|
|
1178
1193
|
}
|
|
1179
1194
|
];
|
|
1180
|
-
var
|
|
1195
|
+
var clean2 = {
|
|
1181
1196
|
command: "clean",
|
|
1182
1197
|
describe: "Removes all modules from the runtime module repo",
|
|
1183
1198
|
handler: ensure("repo-clean", cleanOptions),
|
|
@@ -1225,18 +1240,24 @@ var command2 = {
|
|
|
1225
1240
|
var version_default = command2;
|
|
1226
1241
|
|
|
1227
1242
|
// src/projects/merge.ts
|
|
1228
|
-
import Project3, { Workspace as
|
|
1243
|
+
import Project3, { Workspace as Workspace5 } from "@openfn/project";
|
|
1229
1244
|
|
|
1230
1245
|
// src/projects/checkout.ts
|
|
1231
|
-
import Project2, { Workspace as
|
|
1246
|
+
import Project2, { Workspace as Workspace4 } from "@openfn/project";
|
|
1247
|
+
import { rimraf as rimraf2 } from "rimraf";
|
|
1248
|
+
|
|
1249
|
+
// src/projects/util.ts
|
|
1232
1250
|
import { rimraf } from "rimraf";
|
|
1233
|
-
|
|
1251
|
+
import { versionsEqual } from "@openfn/project";
|
|
1252
|
+
|
|
1253
|
+
// src/projects/checkout.ts
|
|
1254
|
+
var options11 = [log, workspace, clean, force];
|
|
1234
1255
|
var command3 = {
|
|
1235
1256
|
command: "checkout <project>",
|
|
1236
1257
|
describe: "Switch to a different OpenFn project in the same workspace",
|
|
1237
1258
|
handler: ensure("project-checkout", options11),
|
|
1238
1259
|
builder: (yargs2) => build(options11, yargs2).positional("project", {
|
|
1239
|
-
describe: "The id, alias or UUID of the project to
|
|
1260
|
+
describe: "The id, alias or UUID of the project to checkout",
|
|
1240
1261
|
demandOption: true
|
|
1241
1262
|
})
|
|
1242
1263
|
};
|
|
@@ -1277,7 +1298,7 @@ var command4 = {
|
|
|
1277
1298
|
var merge_default = command4;
|
|
1278
1299
|
|
|
1279
1300
|
// src/projects/fetch.ts
|
|
1280
|
-
import Project4, { Workspace as
|
|
1301
|
+
import Project4, { Workspace as Workspace6 } from "@openfn/project";
|
|
1281
1302
|
var options13 = [
|
|
1282
1303
|
alias,
|
|
1283
1304
|
apiKey,
|
|
@@ -1291,7 +1312,8 @@ var options13 = [
|
|
|
1291
1312
|
}),
|
|
1292
1313
|
outputPath2,
|
|
1293
1314
|
env2,
|
|
1294
|
-
workspace
|
|
1315
|
+
workspace,
|
|
1316
|
+
format
|
|
1295
1317
|
];
|
|
1296
1318
|
var command5 = {
|
|
1297
1319
|
command: "fetch [project]",
|
|
@@ -1307,6 +1329,7 @@ var command5 = {
|
|
|
1307
1329
|
var fetch_default = command5;
|
|
1308
1330
|
|
|
1309
1331
|
// src/projects/pull.ts
|
|
1332
|
+
import { Workspace as Workspace7 } from "@openfn/project";
|
|
1310
1333
|
var options14 = [
|
|
1311
1334
|
alias,
|
|
1312
1335
|
env2,
|
|
@@ -1335,7 +1358,7 @@ var command6 = {
|
|
|
1335
1358
|
};
|
|
1336
1359
|
|
|
1337
1360
|
// src/projects/deploy.ts
|
|
1338
|
-
import Project5 from "@openfn/project";
|
|
1361
|
+
import Project5, { versionsEqual as versionsEqual2, Workspace as Workspace8 } from "@openfn/project";
|
|
1339
1362
|
import c2 from "chalk";
|
|
1340
1363
|
var options15 = [
|
|
1341
1364
|
env2,
|
|
@@ -1351,6 +1374,7 @@ var options15 = [
|
|
|
1351
1374
|
];
|
|
1352
1375
|
var command7 = {
|
|
1353
1376
|
command: "deploy",
|
|
1377
|
+
aliases: "push",
|
|
1354
1378
|
describe: `Deploy the checked out project to a Lightning Instance`,
|
|
1355
1379
|
builder: (yargs2) => build(options15, yargs2).positional("project", {
|
|
1356
1380
|
describe: "The UUID, local id or local alias of the project to deploy to"
|
package/dist/process/runner.js
CHANGED
|
@@ -133,14 +133,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
133
133
|
});
|
|
134
134
|
});
|
|
135
135
|
};
|
|
136
|
-
var loadPayload = async (logger,
|
|
137
|
-
if (!
|
|
136
|
+
var loadPayload = async (logger, path18) => {
|
|
137
|
+
if (!path18) {
|
|
138
138
|
logger.warn("No JSON payload provided");
|
|
139
139
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
140
140
|
return {};
|
|
141
141
|
}
|
|
142
|
-
if (
|
|
143
|
-
const str = await readFile(
|
|
142
|
+
if (path18.endsWith(".json")) {
|
|
143
|
+
const str = await readFile(path18, "utf8");
|
|
144
144
|
const json = JSON.parse(str);
|
|
145
145
|
logger.debug("Loaded JSON payload");
|
|
146
146
|
return json;
|
|
@@ -190,22 +190,31 @@ var createNullLogger = () => createLogger2(void 0, { log: { default: "none" } })
|
|
|
190
190
|
import fs from "node:fs";
|
|
191
191
|
import path2 from "node:path";
|
|
192
192
|
import { rmdir } from "node:fs/promises";
|
|
193
|
-
var
|
|
194
|
-
|
|
195
|
-
const {
|
|
196
|
-
|
|
193
|
+
var CACHE_DIR = ".cli-cache";
|
|
194
|
+
var getCachePath = (options8, workflowName, stepId) => {
|
|
195
|
+
const { baseDir, cachePath } = options8;
|
|
196
|
+
if (cachePath) {
|
|
197
|
+
if (stepId) {
|
|
198
|
+
return path2.resolve(cachePath, `${stepId.replace(/ /, "-")}.json`);
|
|
199
|
+
}
|
|
200
|
+
return path2.resolve(cachePath);
|
|
201
|
+
}
|
|
202
|
+
const basePath = path2.resolve(
|
|
203
|
+
baseDir ?? process.cwd(),
|
|
204
|
+
`${CACHE_DIR}/${workflowName}`
|
|
205
|
+
);
|
|
197
206
|
if (stepId) {
|
|
198
|
-
return
|
|
207
|
+
return `${basePath}/${stepId.replace(/ /, "-")}.json`;
|
|
199
208
|
}
|
|
200
|
-
return
|
|
209
|
+
return basePath;
|
|
201
210
|
};
|
|
202
|
-
var ensureGitIgnore = (options8) => {
|
|
211
|
+
var ensureGitIgnore = (options8, cachePath) => {
|
|
203
212
|
if (!options8._hasGitIgnore) {
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
);
|
|
213
|
+
let root = cachePath;
|
|
214
|
+
while (root.length > 1 && !root.endsWith(CACHE_DIR)) {
|
|
215
|
+
root = path2.dirname(root);
|
|
216
|
+
}
|
|
217
|
+
const ignorePath = path2.resolve(root, ".gitignore");
|
|
209
218
|
try {
|
|
210
219
|
fs.accessSync(ignorePath);
|
|
211
220
|
} catch (e) {
|
|
@@ -216,15 +225,15 @@ var ensureGitIgnore = (options8) => {
|
|
|
216
225
|
};
|
|
217
226
|
var saveToCache = async (plan, stepId, output, options8, logger) => {
|
|
218
227
|
if (options8.cacheSteps) {
|
|
219
|
-
const cachePath = await getCachePath(
|
|
228
|
+
const cachePath = await getCachePath(options8, plan.workflow.name, stepId);
|
|
220
229
|
fs.mkdirSync(path2.dirname(cachePath), { recursive: true });
|
|
221
|
-
ensureGitIgnore(options8);
|
|
230
|
+
ensureGitIgnore(options8, path2.dirname(cachePath));
|
|
222
231
|
logger.info(`Writing ${stepId} output to ${cachePath}`);
|
|
223
232
|
fs.writeFileSync(cachePath, JSON.stringify(output));
|
|
224
233
|
}
|
|
225
234
|
};
|
|
226
235
|
var clearCache = async (plan, options8, logger) => {
|
|
227
|
-
const cacheDir = await getCachePath(
|
|
236
|
+
const cacheDir = await getCachePath(options8, plan.workflow?.name);
|
|
228
237
|
try {
|
|
229
238
|
await rmdir(cacheDir, { recursive: true });
|
|
230
239
|
logger.info(`Cleared cache at ${cacheDir}`);
|
|
@@ -267,13 +276,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
267
276
|
};
|
|
268
277
|
function parseAdaptors(plan) {
|
|
269
278
|
const extractInfo = (specifier) => {
|
|
270
|
-
const [module,
|
|
279
|
+
const [module, path18] = specifier.split("=");
|
|
271
280
|
const { name, version } = getNameAndVersion(module);
|
|
272
281
|
const info = {
|
|
273
282
|
name
|
|
274
283
|
};
|
|
275
|
-
if (
|
|
276
|
-
info.path =
|
|
284
|
+
if (path18) {
|
|
285
|
+
info.path = path18;
|
|
277
286
|
}
|
|
278
287
|
if (version) {
|
|
279
288
|
info.version = version;
|
|
@@ -583,10 +592,10 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
583
592
|
return specifier;
|
|
584
593
|
};
|
|
585
594
|
var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
586
|
-
const [specifier,
|
|
587
|
-
if (
|
|
588
|
-
log2.debug(`Resolved ${specifier} to path: ${
|
|
589
|
-
return
|
|
595
|
+
const [specifier, path18] = pattern.split("=");
|
|
596
|
+
if (path18) {
|
|
597
|
+
log2.debug(`Resolved ${specifier} to path: ${path18}`);
|
|
598
|
+
return path18;
|
|
590
599
|
}
|
|
591
600
|
const repoPath = await getModulePath(specifier, repoDir, log2);
|
|
592
601
|
if (repoPath) {
|
|
@@ -605,12 +614,12 @@ var loadTransformOptions = async (opts, log2) => {
|
|
|
605
614
|
let exports;
|
|
606
615
|
const [specifier] = adaptorInput.split("=");
|
|
607
616
|
log2.debug(`Trying to preload types for ${specifier}`);
|
|
608
|
-
const
|
|
609
|
-
if (
|
|
617
|
+
const path18 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log2);
|
|
618
|
+
if (path18) {
|
|
610
619
|
try {
|
|
611
|
-
exports = await preloadAdaptorExports(
|
|
620
|
+
exports = await preloadAdaptorExports(path18, log2);
|
|
612
621
|
} catch (e) {
|
|
613
|
-
log2.error(`Failed to load adaptor typedefs from path ${
|
|
622
|
+
log2.error(`Failed to load adaptor typedefs from path ${path18}`);
|
|
614
623
|
log2.error(e);
|
|
615
624
|
}
|
|
616
625
|
}
|
|
@@ -685,7 +694,11 @@ var load_state_default = async (plan, opts, log2, start) => {
|
|
|
685
694
|
const upstreamStepId = getUpstreamStepId(plan, start);
|
|
686
695
|
if (upstreamStepId) {
|
|
687
696
|
log2.debug(`Input step for "${start}" is "${upstreamStepId}"`);
|
|
688
|
-
const cachedStatePath = await getCachePath(
|
|
697
|
+
const cachedStatePath = await getCachePath(
|
|
698
|
+
opts,
|
|
699
|
+
plan.workflow.name,
|
|
700
|
+
upstreamStepId
|
|
701
|
+
);
|
|
689
702
|
log2.debug("Loading cached state from", cachedStatePath);
|
|
690
703
|
try {
|
|
691
704
|
await fs2.access(cachedStatePath);
|
|
@@ -830,8 +843,8 @@ var map_adaptors_to_monorepo_default = mapAdaptorsToMonorepo;
|
|
|
830
843
|
// src/util/resolve-path.ts
|
|
831
844
|
import nodepath from "node:path";
|
|
832
845
|
import os from "node:os";
|
|
833
|
-
var resolve_path_default = (
|
|
834
|
-
return
|
|
846
|
+
var resolve_path_default = (path18, root) => {
|
|
847
|
+
return path18.startsWith("~") ? path18.replace(`~`, os.homedir) : nodepath.resolve(root ?? "", path18);
|
|
835
848
|
};
|
|
836
849
|
|
|
837
850
|
// src/util/load-plan.ts
|
|
@@ -857,6 +870,7 @@ var loadPlan = async (options8, logger) => {
|
|
|
857
870
|
};
|
|
858
871
|
options8.credentials ??= workspace2.getConfig().credentials;
|
|
859
872
|
options8.collectionsEndpoint ??= proj.openfn?.endpoint;
|
|
873
|
+
options8.cachePath ??= workspace2.workflowsPath + `/${name}/${CACHE_DIR}`;
|
|
860
874
|
}
|
|
861
875
|
if (options8.path && /ya?ml$/.test(options8.path)) {
|
|
862
876
|
const content = await fs3.readFile(path4.resolve(options8.path), "utf-8");
|
|
@@ -886,7 +900,10 @@ var loadPlan = async (options8, logger) => {
|
|
|
886
900
|
defaultName
|
|
887
901
|
);
|
|
888
902
|
} else {
|
|
889
|
-
|
|
903
|
+
const { id, start, options: o, ...w } = workflowObj;
|
|
904
|
+
const opts = { ...o, start };
|
|
905
|
+
const plan = { id, workflow: w, options: opts };
|
|
906
|
+
return loadXPlan(plan, options8, logger, defaultName);
|
|
890
907
|
}
|
|
891
908
|
};
|
|
892
909
|
var load_plan_default = loadPlan;
|
|
@@ -1131,8 +1148,8 @@ var loadXPlan = async (plan, options8, logger, defaultName = "") => {
|
|
|
1131
1148
|
};
|
|
1132
1149
|
|
|
1133
1150
|
// src/util/assert-path.ts
|
|
1134
|
-
var assert_path_default = (
|
|
1135
|
-
if (!
|
|
1151
|
+
var assert_path_default = (path18) => {
|
|
1152
|
+
if (!path18) {
|
|
1136
1153
|
console.error("ERROR: no path provided!");
|
|
1137
1154
|
console.error("\nUsage:");
|
|
1138
1155
|
console.error(" open path/to/job");
|
|
@@ -1195,7 +1212,8 @@ var assertStepStructure = (step, index) => {
|
|
|
1195
1212
|
"state",
|
|
1196
1213
|
"configuration",
|
|
1197
1214
|
"linker",
|
|
1198
|
-
"openfn"
|
|
1215
|
+
"openfn",
|
|
1216
|
+
"enabled"
|
|
1199
1217
|
];
|
|
1200
1218
|
for (const key in step) {
|
|
1201
1219
|
if (!allowedKeys.includes(key)) {
|
|
@@ -1370,7 +1388,10 @@ var executeHandler = async (options8, logger) => {
|
|
|
1370
1388
|
const result = await execute_default(finalPlan, state, options8, logger);
|
|
1371
1389
|
if (options8.cacheSteps) {
|
|
1372
1390
|
logger.success(
|
|
1373
|
-
|
|
1391
|
+
`Cached output written to ${getCachePath(
|
|
1392
|
+
options8,
|
|
1393
|
+
plan.workflow.name
|
|
1394
|
+
)} (see info logs for details)`
|
|
1374
1395
|
);
|
|
1375
1396
|
}
|
|
1376
1397
|
await serialize_output_default(options8, result, logger);
|
|
@@ -1761,8 +1782,10 @@ import {
|
|
|
1761
1782
|
} from "@openfn/deploy";
|
|
1762
1783
|
|
|
1763
1784
|
// src/projects/deploy.ts
|
|
1764
|
-
import Project from "@openfn/project";
|
|
1785
|
+
import Project, { versionsEqual as versionsEqual2, Workspace as Workspace3 } from "@openfn/project";
|
|
1765
1786
|
import c2 from "chalk";
|
|
1787
|
+
import { writeFile as writeFile6 } from "node:fs/promises";
|
|
1788
|
+
import path10 from "node:path";
|
|
1766
1789
|
|
|
1767
1790
|
// src/util/ensure-log-opts.ts
|
|
1768
1791
|
var defaultLoggerOptions = {
|
|
@@ -1956,12 +1979,27 @@ var alias = {
|
|
|
1956
1979
|
description: "Environment name (eg staging, prod, branch)"
|
|
1957
1980
|
}
|
|
1958
1981
|
};
|
|
1982
|
+
var clean2 = {
|
|
1983
|
+
name: "clean",
|
|
1984
|
+
yargs: {
|
|
1985
|
+
description: "Clean the working dir before checking out the new project",
|
|
1986
|
+
default: false,
|
|
1987
|
+
boolean: true
|
|
1988
|
+
}
|
|
1989
|
+
};
|
|
1959
1990
|
var dryRun = {
|
|
1960
1991
|
name: "dryRun",
|
|
1961
1992
|
yargs: {
|
|
1962
1993
|
description: "Runs the command but does not commit any changes to disk or app"
|
|
1963
1994
|
}
|
|
1964
1995
|
};
|
|
1996
|
+
var format = {
|
|
1997
|
+
name: "format",
|
|
1998
|
+
yargs: {
|
|
1999
|
+
hidden: true,
|
|
2000
|
+
description: "The format to save the project as - state, yaml or json. Use this to download raw state files."
|
|
2001
|
+
}
|
|
2002
|
+
};
|
|
1965
2003
|
var removeUnmapped = {
|
|
1966
2004
|
name: "remove-unmapped",
|
|
1967
2005
|
yargs: {
|
|
@@ -2013,6 +2051,8 @@ var CLIError = class extends Error {
|
|
|
2013
2051
|
};
|
|
2014
2052
|
|
|
2015
2053
|
// src/projects/util.ts
|
|
2054
|
+
import { rimraf } from "rimraf";
|
|
2055
|
+
import { versionsEqual } from "@openfn/project";
|
|
2016
2056
|
var loadAppAuthConfig = (options8, logger) => {
|
|
2017
2057
|
const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
|
|
2018
2058
|
const config2 = {
|
|
@@ -2036,22 +2076,22 @@ var ensureExt = (filePath, ext) => {
|
|
|
2036
2076
|
return filePath;
|
|
2037
2077
|
};
|
|
2038
2078
|
var getSerializePath = (project, workspacePath, outputPath2) => {
|
|
2039
|
-
const outputRoot = resolve_path_default(outputPath2 || workspacePath);
|
|
2079
|
+
const outputRoot = resolve_path_default(outputPath2 || workspacePath || ".");
|
|
2040
2080
|
const projectsDir = project?.config.dirs.projects ?? ".projects";
|
|
2041
|
-
return outputPath2 ?? `${outputRoot}/${projectsDir}/${project
|
|
2081
|
+
return outputPath2 ?? `${outputRoot}/${projectsDir}/${project?.qname}`;
|
|
2042
2082
|
};
|
|
2043
2083
|
var serialize = async (project, outputPath2, formatOverride, dryRun2 = false) => {
|
|
2044
2084
|
const root = path9.dirname(outputPath2);
|
|
2045
2085
|
await mkdir3(root, { recursive: true });
|
|
2046
|
-
const
|
|
2047
|
-
const output = project?.serialize("project", { format });
|
|
2086
|
+
const format2 = formatOverride ?? project.config?.formats.project;
|
|
2087
|
+
const output = format2 === "state" ? project?.serialize("state", { format: "json" }) : project?.serialize("project", { format: format2 });
|
|
2048
2088
|
const maybeWriteFile = (filePath, output2) => {
|
|
2049
2089
|
if (!dryRun2) {
|
|
2050
2090
|
return writeFile5(filePath, output2);
|
|
2051
2091
|
}
|
|
2052
2092
|
};
|
|
2053
2093
|
let finalPath;
|
|
2054
|
-
if (
|
|
2094
|
+
if (format2 === "yaml") {
|
|
2055
2095
|
finalPath = ensureExt(outputPath2, "yaml");
|
|
2056
2096
|
await maybeWriteFile(finalPath, output);
|
|
2057
2097
|
} else {
|
|
@@ -2060,10 +2100,10 @@ var serialize = async (project, outputPath2, formatOverride, dryRun2 = false) =>
|
|
|
2060
2100
|
}
|
|
2061
2101
|
return finalPath;
|
|
2062
2102
|
};
|
|
2063
|
-
var getLightningUrl = (endpoint2,
|
|
2103
|
+
var getLightningUrl = (endpoint2, path18 = "", snapshots2) => {
|
|
2064
2104
|
const params = new URLSearchParams();
|
|
2065
2105
|
snapshots2?.forEach((snapshot) => params.append("snapshots[]", snapshot));
|
|
2066
|
-
return new URL(`/api/provision/${
|
|
2106
|
+
return new URL(`/api/provision/${path18}?${params.toString()}`, endpoint2);
|
|
2067
2107
|
};
|
|
2068
2108
|
async function fetchProject(endpoint2, apiKey2, projectId, logger, snapshots2) {
|
|
2069
2109
|
const url2 = getLightningUrl(endpoint2, projectId, snapshots2);
|
|
@@ -2107,9 +2147,16 @@ async function deployProject(endpoint2, apiKey2, state, logger) {
|
|
|
2107
2147
|
body: JSON.stringify(state)
|
|
2108
2148
|
});
|
|
2109
2149
|
if (!response.ok) {
|
|
2110
|
-
|
|
2150
|
+
logger?.error(`Deploy failed with code `, response.status);
|
|
2111
2151
|
logger?.error("Failed to deploy project:");
|
|
2112
|
-
|
|
2152
|
+
const contentType = response.headers.get("content-type") ?? "";
|
|
2153
|
+
if (contentType.match("application/json ")) {
|
|
2154
|
+
const body = await response.json();
|
|
2155
|
+
logger?.error(JSON.stringify(body, null, 2));
|
|
2156
|
+
} else {
|
|
2157
|
+
const content = await response.text();
|
|
2158
|
+
logger?.error(content);
|
|
2159
|
+
}
|
|
2113
2160
|
throw new CLIError(
|
|
2114
2161
|
`Failed to deploy project ${state.name}: ${response.status}`
|
|
2115
2162
|
);
|
|
@@ -2132,6 +2179,58 @@ var DeployError = class extends Error {
|
|
|
2132
2179
|
super(message);
|
|
2133
2180
|
}
|
|
2134
2181
|
};
|
|
2182
|
+
async function tidyWorkflowDir(currentProject, incomingProject, dryRun2 = false) {
|
|
2183
|
+
if (!currentProject || !incomingProject) {
|
|
2184
|
+
return [];
|
|
2185
|
+
}
|
|
2186
|
+
const currentFiles = currentProject.serialize("fs");
|
|
2187
|
+
const newFiles = incomingProject.serialize("fs");
|
|
2188
|
+
const toRemove = [];
|
|
2189
|
+
for (const path18 in currentFiles) {
|
|
2190
|
+
if (!newFiles[path18]) {
|
|
2191
|
+
toRemove.push(path18);
|
|
2192
|
+
}
|
|
2193
|
+
}
|
|
2194
|
+
if (!dryRun2) {
|
|
2195
|
+
await rimraf(toRemove);
|
|
2196
|
+
}
|
|
2197
|
+
return toRemove.sort();
|
|
2198
|
+
}
|
|
2199
|
+
var updateForkedFrom = (proj) => {
|
|
2200
|
+
proj.cli.forked_from = proj.workflows.reduce((obj, wf) => {
|
|
2201
|
+
if (wf.history.length) {
|
|
2202
|
+
obj[wf.id] = wf.history.at(-1);
|
|
2203
|
+
}
|
|
2204
|
+
return obj;
|
|
2205
|
+
}, {});
|
|
2206
|
+
return proj;
|
|
2207
|
+
};
|
|
2208
|
+
var findLocallyChangedWorkflows = async (workspace2, project, ifNoForkedFrom = "assume-diverged") => {
|
|
2209
|
+
const { forked_from } = workspace2.activeProject ?? {};
|
|
2210
|
+
if (!forked_from || Object.keys(forked_from).length === 0) {
|
|
2211
|
+
if (ifNoForkedFrom === "assume-ok") {
|
|
2212
|
+
return [];
|
|
2213
|
+
}
|
|
2214
|
+
return project.workflows.map((w) => w.id);
|
|
2215
|
+
}
|
|
2216
|
+
const changedWorkflows = [];
|
|
2217
|
+
for (const workflow2 of project.workflows) {
|
|
2218
|
+
const currentHash = workflow2.getVersionHash();
|
|
2219
|
+
const forkedHash = forked_from[workflow2.id];
|
|
2220
|
+
if (forkedHash === void 0) {
|
|
2221
|
+
changedWorkflows.push(workflow2.id);
|
|
2222
|
+
} else if (!versionsEqual(currentHash, forkedHash)) {
|
|
2223
|
+
changedWorkflows.push(workflow2.id);
|
|
2224
|
+
}
|
|
2225
|
+
}
|
|
2226
|
+
const currentWorkflowIds = new Set(project.workflows.map((w) => w.id));
|
|
2227
|
+
for (const workflowId in forked_from) {
|
|
2228
|
+
if (!currentWorkflowIds.has(workflowId)) {
|
|
2229
|
+
changedWorkflows.push(workflowId);
|
|
2230
|
+
}
|
|
2231
|
+
}
|
|
2232
|
+
return changedWorkflows;
|
|
2233
|
+
};
|
|
2135
2234
|
|
|
2136
2235
|
// src/util/command-builders.ts
|
|
2137
2236
|
import c from "chalk";
|
|
@@ -2195,6 +2294,7 @@ var options = [
|
|
|
2195
2294
|
var printProjectName = (project) => `${project.id} (${project.openfn?.uuid || "<no UUID>"})`;
|
|
2196
2295
|
var command = {
|
|
2197
2296
|
command: "deploy",
|
|
2297
|
+
aliases: "push",
|
|
2198
2298
|
describe: `Deploy the checked out project to a Lightning Instance`,
|
|
2199
2299
|
builder: (yargs) => build(options, yargs).positional("project", {
|
|
2200
2300
|
describe: "The UUID, local id or local alias of the project to deploy to"
|
|
@@ -2204,14 +2304,34 @@ var command = {
|
|
|
2204
2304
|
),
|
|
2205
2305
|
handler: ensure("project-deploy", options)
|
|
2206
2306
|
};
|
|
2307
|
+
var hasRemoteDiverged = (local, remote, workflows = []) => {
|
|
2308
|
+
let diverged = null;
|
|
2309
|
+
const refs = local.cli.forked_from ?? {};
|
|
2310
|
+
const filteredWorkflows = workflows.length ? local.workflows.filter((w) => workflows.includes(w.id)) : local.workflows;
|
|
2311
|
+
for (const wf of filteredWorkflows) {
|
|
2312
|
+
if (wf.id in refs) {
|
|
2313
|
+
const forkedVersion = refs[wf.id];
|
|
2314
|
+
const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1);
|
|
2315
|
+
if (!versionsEqual2(forkedVersion, remoteVersion)) {
|
|
2316
|
+
diverged ??= [];
|
|
2317
|
+
diverged.push(wf.id);
|
|
2318
|
+
}
|
|
2319
|
+
} else {
|
|
2320
|
+
}
|
|
2321
|
+
}
|
|
2322
|
+
return diverged;
|
|
2323
|
+
};
|
|
2207
2324
|
async function handler(options8, logger) {
|
|
2208
2325
|
logger.warn(
|
|
2209
2326
|
"WARNING: the project deploy command is in BETA and may not be stable. Use cautiously on production projects."
|
|
2210
2327
|
);
|
|
2211
2328
|
const config2 = loadAppAuthConfig(options8, logger);
|
|
2212
2329
|
logger.info("Attempting to load checked-out project from workspace");
|
|
2330
|
+
const ws = new Workspace3(options8.workspace || ".");
|
|
2331
|
+
const { alias: alias2 } = ws.getActiveProject();
|
|
2213
2332
|
const localProject = await Project.from("fs", {
|
|
2214
|
-
root: options8.workspace || "."
|
|
2333
|
+
root: options8.workspace || ".",
|
|
2334
|
+
alias: alias2
|
|
2215
2335
|
});
|
|
2216
2336
|
logger.success(`Loaded local project ${printProjectName(localProject)}`);
|
|
2217
2337
|
let remoteProject;
|
|
@@ -2238,33 +2358,61 @@ Your local project (${localProject.uuid}) has a different UUID to the remote pro
|
|
|
2238
2358
|
Pass --force to override this error and deploy anyway.`);
|
|
2239
2359
|
return false;
|
|
2240
2360
|
}
|
|
2241
|
-
const
|
|
2361
|
+
const locallyChangedWorkflows = await findLocallyChangedWorkflows(
|
|
2362
|
+
ws,
|
|
2363
|
+
localProject
|
|
2364
|
+
);
|
|
2365
|
+
const diffs = reportDiff(
|
|
2366
|
+
localProject,
|
|
2367
|
+
remoteProject,
|
|
2368
|
+
locallyChangedWorkflows,
|
|
2369
|
+
logger
|
|
2370
|
+
);
|
|
2242
2371
|
if (!diffs.length) {
|
|
2243
2372
|
logger.success("Nothing to deploy");
|
|
2244
2373
|
return;
|
|
2245
2374
|
}
|
|
2246
|
-
|
|
2247
|
-
|
|
2248
|
-
|
|
2375
|
+
const skipVersionTest = remoteProject.workflows.find(
|
|
2376
|
+
(wf) => wf.history.length === 0
|
|
2377
|
+
);
|
|
2378
|
+
if (skipVersionTest) {
|
|
2379
|
+
logger.warn(
|
|
2380
|
+
"Skipping compatibility check as no local version history detected"
|
|
2381
|
+
);
|
|
2382
|
+
logger.warn("Pushing these changes may overwrite changes made to the app");
|
|
2383
|
+
} else {
|
|
2384
|
+
const divergentWorkflows = hasRemoteDiverged(
|
|
2385
|
+
localProject,
|
|
2386
|
+
remoteProject,
|
|
2387
|
+
locallyChangedWorkflows
|
|
2388
|
+
);
|
|
2389
|
+
if (divergentWorkflows) {
|
|
2390
|
+
logger.warn(
|
|
2391
|
+
`The following workflows have diverged: ${divergentWorkflows}`
|
|
2392
|
+
);
|
|
2393
|
+
if (!options8.force) {
|
|
2394
|
+
logger.error(`Error: Projects have diverged!
|
|
2249
2395
|
|
|
2250
|
-
The remote project has been edited since the local project was branched. Changes may be lost.
|
|
2396
|
+
The remote project has been edited since the local project was branched. Changes may be lost.
|
|
2251
2397
|
|
|
2252
|
-
Pass --force to override this error and deploy anyway.`);
|
|
2253
|
-
|
|
2398
|
+
Pass --force to override this error and deploy anyway.`);
|
|
2399
|
+
return;
|
|
2400
|
+
} else {
|
|
2401
|
+
logger.warn(
|
|
2402
|
+
"Remote project has diverged from local project! Pushing anyway as -f passed"
|
|
2403
|
+
);
|
|
2404
|
+
}
|
|
2254
2405
|
} else {
|
|
2255
|
-
logger.
|
|
2256
|
-
"Remote project has not diverged from local project
|
|
2406
|
+
logger.info(
|
|
2407
|
+
"Remote project has not diverged from local project - it is safe to deploy \u{1F389}"
|
|
2257
2408
|
);
|
|
2258
2409
|
}
|
|
2259
|
-
} else {
|
|
2260
|
-
logger.info(
|
|
2261
|
-
"Remote project has not diverged from local project - it is safe to deploy \u{1F389}"
|
|
2262
|
-
);
|
|
2263
2410
|
}
|
|
2264
2411
|
logger.info("Merging changes into remote project");
|
|
2265
2412
|
const merged = Project.merge(localProject, remoteProject, {
|
|
2266
2413
|
mode: "replace",
|
|
2267
|
-
force: true
|
|
2414
|
+
force: true,
|
|
2415
|
+
onlyUpdated: true
|
|
2268
2416
|
});
|
|
2269
2417
|
const state = merged.serialize("state", {
|
|
2270
2418
|
format: "json"
|
|
@@ -2298,14 +2446,20 @@ Pass --force to override this error and deploy anyway.`);
|
|
|
2298
2446
|
},
|
|
2299
2447
|
merged.config
|
|
2300
2448
|
);
|
|
2449
|
+
updateForkedFrom(finalProject);
|
|
2450
|
+
const configData = finalProject.generateConfig();
|
|
2451
|
+
await writeFile6(
|
|
2452
|
+
path10.resolve(options8.workspace, configData.path),
|
|
2453
|
+
configData.content
|
|
2454
|
+
);
|
|
2301
2455
|
const finalOutputPath = getSerializePath(localProject, options8.workspace);
|
|
2302
|
-
|
|
2303
|
-
|
|
2456
|
+
const fullFinalPath = await serialize(finalProject, finalOutputPath);
|
|
2457
|
+
logger.debug("Updated local project at ", fullFinalPath);
|
|
2458
|
+
logger.success("Updated project at", config2.endpoint);
|
|
2304
2459
|
}
|
|
2305
|
-
logger.success("Updated project at", config2.endpoint);
|
|
2306
2460
|
}
|
|
2307
|
-
var reportDiff = (local, remote, logger) => {
|
|
2308
|
-
const diffs = remote.diff(local);
|
|
2461
|
+
var reportDiff = (local, remote, locallyChangedWorkflows, logger) => {
|
|
2462
|
+
const diffs = remote.diff(local, locallyChangedWorkflows);
|
|
2309
2463
|
if (diffs.length === 0) {
|
|
2310
2464
|
logger.info("No workflow changes detected");
|
|
2311
2465
|
return diffs;
|
|
@@ -2392,30 +2546,30 @@ function pickFirst(...args) {
|
|
|
2392
2546
|
var handler_default6 = deployHandler;
|
|
2393
2547
|
|
|
2394
2548
|
// src/docgen/handler.ts
|
|
2395
|
-
import { writeFile as
|
|
2549
|
+
import { writeFile as writeFile7 } from "node:fs/promises";
|
|
2396
2550
|
import { readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
|
|
2397
|
-
import
|
|
2551
|
+
import path11 from "node:path";
|
|
2398
2552
|
import { describePackage } from "@openfn/describe-package";
|
|
2399
2553
|
import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
|
|
2400
2554
|
var RETRY_DURATION = 500;
|
|
2401
2555
|
var RETRY_COUNT = 20;
|
|
2402
2556
|
var TIMEOUT_MS = 1e3 * 60;
|
|
2403
2557
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
2404
|
-
var ensurePath = (filePath) => mkdirSync(
|
|
2405
|
-
var generatePlaceholder = (
|
|
2406
|
-
writeFileSync(
|
|
2558
|
+
var ensurePath = (filePath) => mkdirSync(path11.dirname(filePath), { recursive: true });
|
|
2559
|
+
var generatePlaceholder = (path18) => {
|
|
2560
|
+
writeFileSync(path18, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
2407
2561
|
};
|
|
2408
2562
|
var finish = (logger, resultPath) => {
|
|
2409
2563
|
logger.success("Done! Docs can be found at:\n");
|
|
2410
|
-
logger.print(` ${
|
|
2564
|
+
logger.print(` ${path11.resolve(resultPath)}`);
|
|
2411
2565
|
};
|
|
2412
|
-
var generateDocs = async (specifier,
|
|
2566
|
+
var generateDocs = async (specifier, path18, docgen, logger) => {
|
|
2413
2567
|
const result = await docgen(specifier);
|
|
2414
|
-
await
|
|
2415
|
-
finish(logger,
|
|
2416
|
-
return
|
|
2568
|
+
await writeFile7(path18, JSON.stringify(result, null, 2));
|
|
2569
|
+
finish(logger, path18);
|
|
2570
|
+
return path18;
|
|
2417
2571
|
};
|
|
2418
|
-
var waitForDocs = async (docs,
|
|
2572
|
+
var waitForDocs = async (docs, path18, logger, retryDuration = RETRY_DURATION) => {
|
|
2419
2573
|
try {
|
|
2420
2574
|
if (docs.hasOwnProperty("loading")) {
|
|
2421
2575
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -2427,19 +2581,19 @@ var waitForDocs = async (docs, path17, logger, retryDuration = RETRY_DURATION) =
|
|
|
2427
2581
|
clearInterval(i);
|
|
2428
2582
|
reject(new Error("Timed out waiting for docs to load"));
|
|
2429
2583
|
}
|
|
2430
|
-
const updated = JSON.parse(readFileSync(
|
|
2584
|
+
const updated = JSON.parse(readFileSync(path18, "utf8"));
|
|
2431
2585
|
if (!updated.hasOwnProperty("loading")) {
|
|
2432
2586
|
logger.info("Docs found!");
|
|
2433
2587
|
clearInterval(i);
|
|
2434
|
-
resolve(
|
|
2588
|
+
resolve(path18);
|
|
2435
2589
|
}
|
|
2436
2590
|
count++;
|
|
2437
2591
|
}, retryDuration);
|
|
2438
2592
|
});
|
|
2439
2593
|
} else {
|
|
2440
|
-
logger.info(`Docs already written to cache at ${
|
|
2441
|
-
finish(logger,
|
|
2442
|
-
return
|
|
2594
|
+
logger.info(`Docs already written to cache at ${path18}`);
|
|
2595
|
+
finish(logger, path18);
|
|
2596
|
+
return path18;
|
|
2443
2597
|
}
|
|
2444
2598
|
} catch (e) {
|
|
2445
2599
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
@@ -2456,28 +2610,28 @@ var docgenHandler = (options8, logger, docgen = actualDocGen, retryDuration = RE
|
|
|
2456
2610
|
process.exit(9);
|
|
2457
2611
|
}
|
|
2458
2612
|
logger.success(`Generating docs for ${specifier}`);
|
|
2459
|
-
const
|
|
2460
|
-
ensurePath(
|
|
2613
|
+
const path18 = `${repoDir}/docs/${specifier}.json`;
|
|
2614
|
+
ensurePath(path18);
|
|
2461
2615
|
const handleError2 = () => {
|
|
2462
2616
|
logger.info("Removing placeholder");
|
|
2463
|
-
rmSync(
|
|
2617
|
+
rmSync(path18);
|
|
2464
2618
|
};
|
|
2465
2619
|
try {
|
|
2466
|
-
const existing = readFileSync(
|
|
2620
|
+
const existing = readFileSync(path18, "utf8");
|
|
2467
2621
|
const json = JSON.parse(existing);
|
|
2468
2622
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
2469
2623
|
logger.info(`Expired placeholder found. Removing.`);
|
|
2470
|
-
rmSync(
|
|
2624
|
+
rmSync(path18);
|
|
2471
2625
|
throw new Error("TIMEOUT");
|
|
2472
2626
|
}
|
|
2473
|
-
return waitForDocs(json,
|
|
2627
|
+
return waitForDocs(json, path18, logger, retryDuration);
|
|
2474
2628
|
} catch (e) {
|
|
2475
2629
|
if (e.message !== "TIMEOUT") {
|
|
2476
|
-
logger.info(`Docs JSON not found at ${
|
|
2630
|
+
logger.info(`Docs JSON not found at ${path18}`);
|
|
2477
2631
|
}
|
|
2478
2632
|
logger.debug("Generating placeholder");
|
|
2479
|
-
generatePlaceholder(
|
|
2480
|
-
return generateDocs(specifier,
|
|
2633
|
+
generatePlaceholder(path18);
|
|
2634
|
+
return generateDocs(specifier, path18, docgen, logger).catch((e2) => {
|
|
2481
2635
|
logger.error("Error generating documentation");
|
|
2482
2636
|
logger.error(e2);
|
|
2483
2637
|
handleError2();
|
|
@@ -2528,7 +2682,7 @@ var docsHandler = async (options8, logger) => {
|
|
|
2528
2682
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
2529
2683
|
}
|
|
2530
2684
|
logger.info("Generating/loading documentation...");
|
|
2531
|
-
const
|
|
2685
|
+
const path18 = await handler_default7(
|
|
2532
2686
|
{
|
|
2533
2687
|
specifier: `${name}@${version}`,
|
|
2534
2688
|
repoDir
|
|
@@ -2537,8 +2691,8 @@ var docsHandler = async (options8, logger) => {
|
|
|
2537
2691
|
createNullLogger()
|
|
2538
2692
|
);
|
|
2539
2693
|
let didError = false;
|
|
2540
|
-
if (
|
|
2541
|
-
const source = await readFile5(
|
|
2694
|
+
if (path18) {
|
|
2695
|
+
const source = await readFile5(path18, "utf8");
|
|
2542
2696
|
const data = JSON.parse(source);
|
|
2543
2697
|
let desc;
|
|
2544
2698
|
if (operation) {
|
|
@@ -2576,13 +2730,13 @@ var handler_default8 = docsHandler;
|
|
|
2576
2730
|
// src/metadata/cache.ts
|
|
2577
2731
|
import { getNameAndVersion as getNameAndVersion6 } from "@openfn/runtime";
|
|
2578
2732
|
import { createHash } from "node:crypto";
|
|
2579
|
-
import { mkdir as mkdir4, readFile as readFile6, writeFile as
|
|
2580
|
-
import
|
|
2733
|
+
import { mkdir as mkdir4, readFile as readFile6, writeFile as writeFile8, readdir, rm } from "node:fs/promises";
|
|
2734
|
+
import path12 from "node:path";
|
|
2581
2735
|
var UNSUPPORTED_FILE_NAME = "unsupported.json";
|
|
2582
2736
|
var getCachePath2 = (repoDir, key) => {
|
|
2583
|
-
const base =
|
|
2737
|
+
const base = path12.join(repoDir, "meta");
|
|
2584
2738
|
if (key) {
|
|
2585
|
-
return
|
|
2739
|
+
return path12.join(base, key.endsWith(".json") ? key : `${key}.json`);
|
|
2586
2740
|
}
|
|
2587
2741
|
return base;
|
|
2588
2742
|
};
|
|
@@ -2624,8 +2778,8 @@ var get2 = async (repoPath, key) => {
|
|
|
2624
2778
|
};
|
|
2625
2779
|
var set2 = async (repoPath, key, result) => {
|
|
2626
2780
|
const p = getCachePath2(repoPath, key);
|
|
2627
|
-
await mkdir4(
|
|
2628
|
-
await
|
|
2781
|
+
await mkdir4(path12.dirname(p), { recursive: true });
|
|
2782
|
+
await writeFile8(p, JSON.stringify(result));
|
|
2629
2783
|
};
|
|
2630
2784
|
var getUnsupportedCachePath = (repoDir) => {
|
|
2631
2785
|
return getCachePath2(repoDir, UNSUPPORTED_FILE_NAME);
|
|
@@ -2683,8 +2837,8 @@ var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
|
2683
2837
|
majorMinor: parsed.majorMinor,
|
|
2684
2838
|
timestamp: Date.now()
|
|
2685
2839
|
};
|
|
2686
|
-
await mkdir4(
|
|
2687
|
-
await
|
|
2840
|
+
await mkdir4(path12.dirname(cachePath), { recursive: true });
|
|
2841
|
+
await writeFile8(cachePath, JSON.stringify(cache, null, 2));
|
|
2688
2842
|
}
|
|
2689
2843
|
};
|
|
2690
2844
|
|
|
@@ -2805,7 +2959,7 @@ var metadataHandler = async (options8, logger) => {
|
|
|
2805
2959
|
var handler_default9 = metadataHandler;
|
|
2806
2960
|
|
|
2807
2961
|
// src/pull/handler.ts
|
|
2808
|
-
import
|
|
2962
|
+
import path15 from "path";
|
|
2809
2963
|
import fs5 from "node:fs/promises";
|
|
2810
2964
|
import {
|
|
2811
2965
|
getConfig as getConfig2,
|
|
@@ -2815,9 +2969,13 @@ import {
|
|
|
2815
2969
|
syncRemoteSpec
|
|
2816
2970
|
} from "@openfn/deploy";
|
|
2817
2971
|
|
|
2972
|
+
// src/projects/pull.ts
|
|
2973
|
+
import { Workspace as Workspace6 } from "@openfn/project";
|
|
2974
|
+
|
|
2818
2975
|
// src/projects/fetch.ts
|
|
2819
|
-
import
|
|
2820
|
-
import Project2, { Workspace as
|
|
2976
|
+
import path13 from "node:path";
|
|
2977
|
+
import Project2, { Workspace as Workspace4 } from "@openfn/project";
|
|
2978
|
+
import { writeFile as writeFile9 } from "node:fs/promises";
|
|
2821
2979
|
var options2 = [
|
|
2822
2980
|
alias,
|
|
2823
2981
|
apiKey,
|
|
@@ -2831,7 +2989,8 @@ var options2 = [
|
|
|
2831
2989
|
}),
|
|
2832
2990
|
outputPath,
|
|
2833
2991
|
env,
|
|
2834
|
-
workspace
|
|
2992
|
+
workspace,
|
|
2993
|
+
format
|
|
2835
2994
|
];
|
|
2836
2995
|
var command2 = {
|
|
2837
2996
|
command: "fetch [project]",
|
|
@@ -2845,28 +3004,73 @@ var command2 = {
|
|
|
2845
3004
|
handler: ensure("project-fetch", options2)
|
|
2846
3005
|
};
|
|
2847
3006
|
var printProjectName2 = (project) => `${project.qname} (${project.id})`;
|
|
2848
|
-
var
|
|
3007
|
+
var fetchV1 = async (options8, logger) => {
|
|
2849
3008
|
const workspacePath = options8.workspace ?? process.cwd();
|
|
2850
3009
|
logger.debug("Using workspace at", workspacePath);
|
|
2851
|
-
const workspace2 = new
|
|
2852
|
-
const
|
|
2853
|
-
|
|
2854
|
-
|
|
2855
|
-
|
|
3010
|
+
const workspace2 = new Workspace4(workspacePath, logger, false);
|
|
3011
|
+
const localProject = workspace2.get(options8.project);
|
|
3012
|
+
if (localProject) {
|
|
3013
|
+
logger.debug(
|
|
3014
|
+
`Resolved "${options8.project}" to local project ${printProjectName2(
|
|
3015
|
+
localProject
|
|
3016
|
+
)}`
|
|
3017
|
+
);
|
|
3018
|
+
} else {
|
|
3019
|
+
logger.debug(
|
|
3020
|
+
`Failed to resolve "${options8.project}" to local project. Will send request to app anyway.`
|
|
3021
|
+
);
|
|
3022
|
+
}
|
|
3023
|
+
const config2 = loadAppAuthConfig(options8, logger);
|
|
3024
|
+
const { data } = await fetchProject(
|
|
3025
|
+
options8.endpoint ?? localProject?.openfn?.endpoint,
|
|
3026
|
+
config2.apiKey,
|
|
3027
|
+
localProject?.uuid ?? options8.project,
|
|
2856
3028
|
logger
|
|
2857
3029
|
);
|
|
3030
|
+
const finalOutputPath = getSerializePath(
|
|
3031
|
+
localProject,
|
|
3032
|
+
options8.workspace,
|
|
3033
|
+
options8.outputPath
|
|
3034
|
+
);
|
|
3035
|
+
logger.success(`Fetched project file to ${finalOutputPath}`);
|
|
3036
|
+
await writeFile9(finalOutputPath, JSON.stringify(data, null, 2));
|
|
3037
|
+
return data;
|
|
3038
|
+
};
|
|
3039
|
+
var handler2 = async (options8, logger) => {
|
|
3040
|
+
if (options8.format === "state") {
|
|
3041
|
+
return fetchV1(options8, logger);
|
|
3042
|
+
}
|
|
3043
|
+
return fetchV2(options8, logger);
|
|
3044
|
+
};
|
|
3045
|
+
var fetchV2 = async (options8, logger) => {
|
|
3046
|
+
const workspacePath = options8.workspace ?? process.cwd();
|
|
3047
|
+
logger.debug("Using workspace at", workspacePath);
|
|
3048
|
+
const workspace2 = new Workspace4(workspacePath, logger, false);
|
|
3049
|
+
const { outputPath: outputPath2 } = options8;
|
|
2858
3050
|
const remoteProject = await fetchRemoteProject(workspace2, options8, logger);
|
|
2859
|
-
|
|
3051
|
+
if (!options8.alias && remoteProject.sandbox?.parentId) {
|
|
3052
|
+
options8.alias = remoteProject.id;
|
|
3053
|
+
remoteProject.cli.alias = options8.alias;
|
|
3054
|
+
logger.debug("Defaulting alias to sandbox id", options8.alias);
|
|
3055
|
+
}
|
|
3056
|
+
if (!options8.force && options8.format !== "state") {
|
|
3057
|
+
const localTargetProject = await resolveOutputProject(
|
|
3058
|
+
workspace2,
|
|
3059
|
+
options8,
|
|
3060
|
+
logger
|
|
3061
|
+
);
|
|
3062
|
+
ensureTargetCompatible(options8, remoteProject, localTargetProject);
|
|
3063
|
+
}
|
|
2860
3064
|
const finalOutputPath = getSerializePath(
|
|
2861
3065
|
remoteProject,
|
|
2862
3066
|
workspacePath,
|
|
2863
3067
|
outputPath2
|
|
2864
3068
|
);
|
|
2865
|
-
let
|
|
3069
|
+
let format2 = options8.format;
|
|
2866
3070
|
if (outputPath2) {
|
|
2867
|
-
const ext =
|
|
3071
|
+
const ext = path13.extname(outputPath2).substring(1);
|
|
2868
3072
|
if (ext.length) {
|
|
2869
|
-
|
|
3073
|
+
format2 = ext;
|
|
2870
3074
|
}
|
|
2871
3075
|
if (options8.alias) {
|
|
2872
3076
|
logger.warn(
|
|
@@ -2874,10 +3078,12 @@ var handler2 = async (options8, logger) => {
|
|
|
2874
3078
|
);
|
|
2875
3079
|
}
|
|
2876
3080
|
}
|
|
2877
|
-
await serialize(
|
|
2878
|
-
|
|
2879
|
-
|
|
3081
|
+
const finalPathWithExt = await serialize(
|
|
3082
|
+
remoteProject,
|
|
3083
|
+
finalOutputPath,
|
|
3084
|
+
format2
|
|
2880
3085
|
);
|
|
3086
|
+
logger.success(`Fetched project file to ${finalPathWithExt}`);
|
|
2881
3087
|
return remoteProject;
|
|
2882
3088
|
};
|
|
2883
3089
|
async function resolveOutputProject(workspace2, options8, logger) {
|
|
@@ -2930,7 +3136,7 @@ async function fetchRemoteProject(workspace2, options8, logger) {
|
|
|
2930
3136
|
logger.debug(
|
|
2931
3137
|
`Resolved ${options8.project} to UUID ${projectUUID} from local project ${printProjectName2(
|
|
2932
3138
|
localProject
|
|
2933
|
-
)}
|
|
3139
|
+
)}`
|
|
2934
3140
|
);
|
|
2935
3141
|
}
|
|
2936
3142
|
const projectEndpoint = localProject?.openfn?.endpoint ?? config2.endpoint;
|
|
@@ -2984,40 +3190,33 @@ To ignore this error and override the local file, pass --force (-f)
|
|
|
2984
3190
|
delete error.stack;
|
|
2985
3191
|
throw error;
|
|
2986
3192
|
}
|
|
2987
|
-
const hasAnyHistory = remoteProject.workflows.find(
|
|
2988
|
-
(w) => w.workflow.history?.length
|
|
2989
|
-
);
|
|
2990
|
-
const skipVersionCheck = options8.force || // The user forced the checkout
|
|
2991
|
-
!hasAnyHistory;
|
|
2992
|
-
if (!skipVersionCheck && !remoteProject.canMergeInto(localProject)) {
|
|
2993
|
-
throw new Error("Error! An incompatible project exists at this location");
|
|
2994
|
-
}
|
|
2995
3193
|
}
|
|
2996
3194
|
}
|
|
2997
3195
|
|
|
2998
3196
|
// src/projects/checkout.ts
|
|
2999
|
-
import Project3, { Workspace as
|
|
3000
|
-
import
|
|
3197
|
+
import Project3, { Workspace as Workspace5 } from "@openfn/project";
|
|
3198
|
+
import path14 from "path";
|
|
3001
3199
|
import fs4 from "fs";
|
|
3002
|
-
import { rimraf } from "rimraf";
|
|
3003
|
-
var options3 = [log, workspace];
|
|
3200
|
+
import { rimraf as rimraf2 } from "rimraf";
|
|
3201
|
+
var options3 = [log, workspace, clean2, force];
|
|
3004
3202
|
var command3 = {
|
|
3005
3203
|
command: "checkout <project>",
|
|
3006
3204
|
describe: "Switch to a different OpenFn project in the same workspace",
|
|
3007
3205
|
handler: ensure("project-checkout", options3),
|
|
3008
3206
|
builder: (yargs) => build(options3, yargs).positional("project", {
|
|
3009
|
-
describe: "The id, alias or UUID of the project to
|
|
3207
|
+
describe: "The id, alias or UUID of the project to checkout",
|
|
3010
3208
|
demandOption: true
|
|
3011
3209
|
})
|
|
3012
3210
|
};
|
|
3013
3211
|
var handler3 = async (options8, logger) => {
|
|
3014
3212
|
const projectIdentifier = options8.project;
|
|
3015
3213
|
const workspacePath = options8.workspace ?? process.cwd();
|
|
3016
|
-
const workspace2 = new
|
|
3214
|
+
const workspace2 = new Workspace5(workspacePath, logger);
|
|
3017
3215
|
const { project: _, ...config2 } = workspace2.getConfig();
|
|
3216
|
+
const currentProject = workspace2.getActiveProject();
|
|
3018
3217
|
let switchProject;
|
|
3019
3218
|
if (/\.(yaml|json)$/.test(projectIdentifier)) {
|
|
3020
|
-
const filePath = projectIdentifier.startsWith("/") ? projectIdentifier :
|
|
3219
|
+
const filePath = projectIdentifier.startsWith("/") ? projectIdentifier : path14.join(workspacePath, projectIdentifier);
|
|
3021
3220
|
logger.debug("Loading project from path ", filePath);
|
|
3022
3221
|
switchProject = await Project3.from("path", filePath, config2);
|
|
3023
3222
|
} else {
|
|
@@ -3028,14 +3227,51 @@ var handler3 = async (options8, logger) => {
|
|
|
3028
3227
|
`Project with id ${projectIdentifier} not found in the workspace`
|
|
3029
3228
|
);
|
|
3030
3229
|
}
|
|
3031
|
-
|
|
3230
|
+
try {
|
|
3231
|
+
const localProject = await Project3.from("fs", {
|
|
3232
|
+
root: options8.workspace || "."
|
|
3233
|
+
});
|
|
3234
|
+
logger.success(`Loaded local project ${localProject.alias}`);
|
|
3235
|
+
const changed = await findLocallyChangedWorkflows(
|
|
3236
|
+
workspace2,
|
|
3237
|
+
localProject,
|
|
3238
|
+
"assume-ok"
|
|
3239
|
+
);
|
|
3240
|
+
if (changed.length && !options8.force) {
|
|
3241
|
+
logger.break();
|
|
3242
|
+
logger.warn(
|
|
3243
|
+
"WARNING: detected changes on your currently checked-out project"
|
|
3244
|
+
);
|
|
3245
|
+
logger.warn(
|
|
3246
|
+
`Changes may be lost by checking out ${localProject.alias} right now`
|
|
3247
|
+
);
|
|
3248
|
+
logger.warn(`Pass --force or -f to override this warning and continue`);
|
|
3249
|
+
const e = new Error(
|
|
3250
|
+
`The currently checked out project has diverged! Changes may be lost`
|
|
3251
|
+
);
|
|
3252
|
+
delete e.stack;
|
|
3253
|
+
throw e;
|
|
3254
|
+
}
|
|
3255
|
+
} catch (e) {
|
|
3256
|
+
if (e.message.match("ENOENT")) {
|
|
3257
|
+
logger.debug("No openfn.yaml found locally: skipping divergence test");
|
|
3258
|
+
} else {
|
|
3259
|
+
throw e;
|
|
3260
|
+
}
|
|
3261
|
+
}
|
|
3262
|
+
if (options8.clean) {
|
|
3263
|
+
await rimraf2(workspace2.workflowsPath);
|
|
3264
|
+
} else {
|
|
3265
|
+
await tidyWorkflowDir(currentProject, switchProject);
|
|
3266
|
+
}
|
|
3267
|
+
updateForkedFrom(switchProject);
|
|
3032
3268
|
const files = switchProject.serialize("fs");
|
|
3033
3269
|
for (const f in files) {
|
|
3034
3270
|
if (files[f]) {
|
|
3035
|
-
fs4.mkdirSync(
|
|
3271
|
+
fs4.mkdirSync(path14.join(workspacePath, path14.dirname(f)), {
|
|
3036
3272
|
recursive: true
|
|
3037
3273
|
});
|
|
3038
|
-
fs4.writeFileSync(
|
|
3274
|
+
fs4.writeFileSync(path14.join(workspacePath, f), files[f]);
|
|
3039
3275
|
} else {
|
|
3040
3276
|
logger.warn("WARNING! No content for file", f);
|
|
3041
3277
|
}
|
|
@@ -3071,11 +3307,30 @@ var command4 = {
|
|
|
3071
3307
|
handler: ensure("project-pull", options4)
|
|
3072
3308
|
};
|
|
3073
3309
|
async function handler4(options8, logger) {
|
|
3310
|
+
ensureProjectId(options8, logger);
|
|
3074
3311
|
await handler2(options8, logger);
|
|
3075
3312
|
logger.success(`Downloaded latest project version`);
|
|
3076
3313
|
await handler3(options8, logger);
|
|
3077
3314
|
logger.success(`Checked out project locally`);
|
|
3078
3315
|
}
|
|
3316
|
+
var ensureProjectId = (options8, logger) => {
|
|
3317
|
+
if (!options8.project) {
|
|
3318
|
+
logger?.debug(
|
|
3319
|
+
"No project ID specified: looking up checked out project in Workspace"
|
|
3320
|
+
);
|
|
3321
|
+
const ws = new Workspace6(options8.workspace);
|
|
3322
|
+
if (ws.activeProject) {
|
|
3323
|
+
options8.project = ws.activeProject.uuid;
|
|
3324
|
+
logger?.info(
|
|
3325
|
+
`Project id not provided: will default to ${options8.project}`
|
|
3326
|
+
);
|
|
3327
|
+
} else {
|
|
3328
|
+
throw new Error(
|
|
3329
|
+
"Project not provided: specify a project UUID, id or alias"
|
|
3330
|
+
);
|
|
3331
|
+
}
|
|
3332
|
+
}
|
|
3333
|
+
};
|
|
3079
3334
|
var pull_default = handler4;
|
|
3080
3335
|
|
|
3081
3336
|
// src/pull/handler.ts
|
|
@@ -3136,7 +3391,7 @@ async function pullHandler(options8, logger) {
|
|
|
3136
3391
|
process.exitCode = 1;
|
|
3137
3392
|
process.exit(1);
|
|
3138
3393
|
}
|
|
3139
|
-
const resolvedPath =
|
|
3394
|
+
const resolvedPath = path15.resolve(config2.specPath);
|
|
3140
3395
|
logger.debug("reading spec from", resolvedPath);
|
|
3141
3396
|
const updatedSpec = await syncRemoteSpec(
|
|
3142
3397
|
await res.text(),
|
|
@@ -3145,7 +3400,7 @@ async function pullHandler(options8, logger) {
|
|
|
3145
3400
|
logger
|
|
3146
3401
|
);
|
|
3147
3402
|
await fs5.writeFile(
|
|
3148
|
-
|
|
3403
|
+
path15.resolve(config2.statePath),
|
|
3149
3404
|
JSON.stringify(state, null, 2)
|
|
3150
3405
|
);
|
|
3151
3406
|
await fs5.writeFile(resolvedPath, updatedSpec);
|
|
@@ -3190,7 +3445,7 @@ __export(projects_exports, {
|
|
|
3190
3445
|
});
|
|
3191
3446
|
|
|
3192
3447
|
// src/projects/list.ts
|
|
3193
|
-
import { Workspace as
|
|
3448
|
+
import { Workspace as Workspace7 } from "@openfn/project";
|
|
3194
3449
|
var options5 = [log, workspace];
|
|
3195
3450
|
var command5 = {
|
|
3196
3451
|
command: "list [project-path]",
|
|
@@ -3203,7 +3458,7 @@ var handler5 = async (options8, logger) => {
|
|
|
3203
3458
|
logger.info("Searching for projects in workspace at:");
|
|
3204
3459
|
logger.info(" ", options8.workspace);
|
|
3205
3460
|
logger.break();
|
|
3206
|
-
const workspace2 = new
|
|
3461
|
+
const workspace2 = new Workspace7(options8.workspace);
|
|
3207
3462
|
if (!workspace2.valid) {
|
|
3208
3463
|
throw new Error("No OpenFn projects found");
|
|
3209
3464
|
}
|
|
@@ -3221,7 +3476,7 @@ ${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
|
3221
3476
|
}
|
|
3222
3477
|
|
|
3223
3478
|
// src/projects/version.ts
|
|
3224
|
-
import { Workspace as
|
|
3479
|
+
import { Workspace as Workspace8 } from "@openfn/project";
|
|
3225
3480
|
var options6 = [workflow, workspace, workflowMappings];
|
|
3226
3481
|
var command6 = {
|
|
3227
3482
|
command: "version [workflow]",
|
|
@@ -3230,7 +3485,7 @@ var command6 = {
|
|
|
3230
3485
|
builder: (yargs) => build(options6, yargs)
|
|
3231
3486
|
};
|
|
3232
3487
|
var handler6 = async (options8, logger) => {
|
|
3233
|
-
const workspace2 = new
|
|
3488
|
+
const workspace2 = new Workspace8(options8.workspace);
|
|
3234
3489
|
if (!workspace2.valid) {
|
|
3235
3490
|
logger.error("Command was run in an invalid openfn workspace");
|
|
3236
3491
|
return;
|
|
@@ -3265,8 +3520,8 @@ ${final}`);
|
|
|
3265
3520
|
};
|
|
3266
3521
|
|
|
3267
3522
|
// src/projects/merge.ts
|
|
3268
|
-
import Project5, { Workspace as
|
|
3269
|
-
import
|
|
3523
|
+
import Project5, { Workspace as Workspace9 } from "@openfn/project";
|
|
3524
|
+
import path16 from "node:path";
|
|
3270
3525
|
import fs6 from "node:fs/promises";
|
|
3271
3526
|
var options7 = [
|
|
3272
3527
|
removeUnmapped,
|
|
@@ -3301,14 +3556,14 @@ var command7 = {
|
|
|
3301
3556
|
};
|
|
3302
3557
|
var handler7 = async (options8, logger) => {
|
|
3303
3558
|
const workspacePath = options8.workspace;
|
|
3304
|
-
const workspace2 = new
|
|
3559
|
+
const workspace2 = new Workspace9(workspacePath);
|
|
3305
3560
|
if (!workspace2.valid) {
|
|
3306
3561
|
logger.error("Command was run in an invalid openfn workspace");
|
|
3307
3562
|
return;
|
|
3308
3563
|
}
|
|
3309
3564
|
let targetProject;
|
|
3310
3565
|
if (options8.base) {
|
|
3311
|
-
const basePath =
|
|
3566
|
+
const basePath = path16.resolve(options8.base);
|
|
3312
3567
|
logger.debug("Loading target project from path", basePath);
|
|
3313
3568
|
targetProject = await Project5.from("path", basePath);
|
|
3314
3569
|
} else {
|
|
@@ -3322,7 +3577,7 @@ var handler7 = async (options8, logger) => {
|
|
|
3322
3577
|
const sourceProjectIdentifier = options8.project;
|
|
3323
3578
|
let sourceProject;
|
|
3324
3579
|
if (/\.(ya?ml|json)$/.test(sourceProjectIdentifier)) {
|
|
3325
|
-
const filePath =
|
|
3580
|
+
const filePath = path16.join(workspacePath, sourceProjectIdentifier);
|
|
3326
3581
|
logger.debug("Loading source project from path ", filePath);
|
|
3327
3582
|
sourceProject = await Project5.from("path", filePath);
|
|
3328
3583
|
} else {
|
|
@@ -3385,7 +3640,7 @@ var handler7 = async (options8, logger) => {
|
|
|
3385
3640
|
|
|
3386
3641
|
// src/util/print-versions.ts
|
|
3387
3642
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
3388
|
-
import
|
|
3643
|
+
import path17 from "node:path";
|
|
3389
3644
|
import url from "node:url";
|
|
3390
3645
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
3391
3646
|
import { mainSymbols } from "figures";
|
|
@@ -3397,7 +3652,7 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
3397
3652
|
var loadVersionFromPath = (adaptorPath) => {
|
|
3398
3653
|
try {
|
|
3399
3654
|
const pkg = JSON.parse(
|
|
3400
|
-
readFileSync2(
|
|
3655
|
+
readFileSync2(path17.resolve(adaptorPath, "package.json"), "utf8")
|
|
3401
3656
|
);
|
|
3402
3657
|
return pkg.version;
|
|
3403
3658
|
} catch (e) {
|
|
@@ -3432,7 +3687,7 @@ var printVersions = async (logger, options8 = {}, includeComponents = false) =>
|
|
|
3432
3687
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
3433
3688
|
);
|
|
3434
3689
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
3435
|
-
const dirname3 =
|
|
3690
|
+
const dirname3 = path17.dirname(url.fileURLToPath(import.meta.url));
|
|
3436
3691
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
3437
3692
|
const { version, dependencies } = pkg;
|
|
3438
3693
|
const compilerVersion = dependencies["@openfn/compiler"];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.26.0",
|
|
4
4
|
"description": "CLI devtools for the OpenFn toolchain",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=18",
|
|
@@ -33,6 +33,7 @@
|
|
|
33
33
|
"@types/ws": "^8.18.1",
|
|
34
34
|
"@types/yargs": "^17.0.33",
|
|
35
35
|
"ava": "5.3.1",
|
|
36
|
+
"lodash-es": "^4.17.21",
|
|
36
37
|
"mock-fs": "^5.5.0",
|
|
37
38
|
"tslib": "^2.8.1",
|
|
38
39
|
"tsup": "^7.2.0",
|
|
@@ -49,13 +50,13 @@
|
|
|
49
50
|
"undici": "7.12.0",
|
|
50
51
|
"ws": "^8.18.3",
|
|
51
52
|
"yargs": "^17.7.2",
|
|
52
|
-
"@openfn/describe-package": "0.1.5",
|
|
53
|
-
"@openfn/lexicon": "^1.4.0",
|
|
54
53
|
"@openfn/compiler": "1.2.2",
|
|
55
|
-
"@openfn/logger": "1.1.1",
|
|
56
54
|
"@openfn/deploy": "0.11.5",
|
|
57
|
-
"@openfn/
|
|
58
|
-
"@openfn/
|
|
55
|
+
"@openfn/logger": "1.1.1",
|
|
56
|
+
"@openfn/lexicon": "^1.4.1",
|
|
57
|
+
"@openfn/project": "^0.13.0",
|
|
58
|
+
"@openfn/runtime": "1.8.3",
|
|
59
|
+
"@openfn/describe-package": "0.1.5"
|
|
59
60
|
},
|
|
60
61
|
"files": [
|
|
61
62
|
"dist",
|