@openfn/cli 1.25.0 → 1.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +8 -6
- package/dist/process/runner.js +251 -126
- package/package.json +5 -5
package/dist/index.js
CHANGED
|
@@ -1240,17 +1240,18 @@ var command2 = {
|
|
|
1240
1240
|
var version_default = command2;
|
|
1241
1241
|
|
|
1242
1242
|
// src/projects/merge.ts
|
|
1243
|
-
import Project3, { Workspace as
|
|
1243
|
+
import Project3, { Workspace as Workspace5 } from "@openfn/project";
|
|
1244
1244
|
|
|
1245
1245
|
// src/projects/checkout.ts
|
|
1246
|
-
import Project2, { Workspace as
|
|
1246
|
+
import Project2, { Workspace as Workspace4 } from "@openfn/project";
|
|
1247
1247
|
import { rimraf as rimraf2 } from "rimraf";
|
|
1248
1248
|
|
|
1249
1249
|
// src/projects/util.ts
|
|
1250
1250
|
import { rimraf } from "rimraf";
|
|
1251
|
+
import { versionsEqual } from "@openfn/project";
|
|
1251
1252
|
|
|
1252
1253
|
// src/projects/checkout.ts
|
|
1253
|
-
var options11 = [log, workspace, clean];
|
|
1254
|
+
var options11 = [log, workspace, clean, force];
|
|
1254
1255
|
var command3 = {
|
|
1255
1256
|
command: "checkout <project>",
|
|
1256
1257
|
describe: "Switch to a different OpenFn project in the same workspace",
|
|
@@ -1297,7 +1298,7 @@ var command4 = {
|
|
|
1297
1298
|
var merge_default = command4;
|
|
1298
1299
|
|
|
1299
1300
|
// src/projects/fetch.ts
|
|
1300
|
-
import Project4, { Workspace as
|
|
1301
|
+
import Project4, { Workspace as Workspace6 } from "@openfn/project";
|
|
1301
1302
|
var options13 = [
|
|
1302
1303
|
alias,
|
|
1303
1304
|
apiKey,
|
|
@@ -1328,7 +1329,7 @@ var command5 = {
|
|
|
1328
1329
|
var fetch_default = command5;
|
|
1329
1330
|
|
|
1330
1331
|
// src/projects/pull.ts
|
|
1331
|
-
import { Workspace as
|
|
1332
|
+
import { Workspace as Workspace7 } from "@openfn/project";
|
|
1332
1333
|
var options14 = [
|
|
1333
1334
|
alias,
|
|
1334
1335
|
env2,
|
|
@@ -1357,7 +1358,7 @@ var command6 = {
|
|
|
1357
1358
|
};
|
|
1358
1359
|
|
|
1359
1360
|
// src/projects/deploy.ts
|
|
1360
|
-
import Project5 from "@openfn/project";
|
|
1361
|
+
import Project5, { versionsEqual as versionsEqual2, Workspace as Workspace8 } from "@openfn/project";
|
|
1361
1362
|
import c2 from "chalk";
|
|
1362
1363
|
var options15 = [
|
|
1363
1364
|
env2,
|
|
@@ -1373,6 +1374,7 @@ var options15 = [
|
|
|
1373
1374
|
];
|
|
1374
1375
|
var command7 = {
|
|
1375
1376
|
command: "deploy",
|
|
1377
|
+
aliases: "push",
|
|
1376
1378
|
describe: `Deploy the checked out project to a Lightning Instance`,
|
|
1377
1379
|
builder: (yargs2) => build(options15, yargs2).positional("project", {
|
|
1378
1380
|
describe: "The UUID, local id or local alias of the project to deploy to"
|
package/dist/process/runner.js
CHANGED
|
@@ -133,14 +133,14 @@ var callApollo = async (apolloBaseUrl, serviceName, payload, logger) => {
|
|
|
133
133
|
});
|
|
134
134
|
});
|
|
135
135
|
};
|
|
136
|
-
var loadPayload = async (logger,
|
|
137
|
-
if (!
|
|
136
|
+
var loadPayload = async (logger, path18) => {
|
|
137
|
+
if (!path18) {
|
|
138
138
|
logger.warn("No JSON payload provided");
|
|
139
139
|
logger.warn("Most apollo services require JSON to be uploaded");
|
|
140
140
|
return {};
|
|
141
141
|
}
|
|
142
|
-
if (
|
|
143
|
-
const str = await readFile(
|
|
142
|
+
if (path18.endsWith(".json")) {
|
|
143
|
+
const str = await readFile(path18, "utf8");
|
|
144
144
|
const json = JSON.parse(str);
|
|
145
145
|
logger.debug("Loaded JSON payload");
|
|
146
146
|
return json;
|
|
@@ -276,13 +276,13 @@ var execute_default = async (plan, input, opts, logger) => {
|
|
|
276
276
|
};
|
|
277
277
|
function parseAdaptors(plan) {
|
|
278
278
|
const extractInfo = (specifier) => {
|
|
279
|
-
const [module,
|
|
279
|
+
const [module, path18] = specifier.split("=");
|
|
280
280
|
const { name, version } = getNameAndVersion(module);
|
|
281
281
|
const info = {
|
|
282
282
|
name
|
|
283
283
|
};
|
|
284
|
-
if (
|
|
285
|
-
info.path =
|
|
284
|
+
if (path18) {
|
|
285
|
+
info.path = path18;
|
|
286
286
|
}
|
|
287
287
|
if (version) {
|
|
288
288
|
info.version = version;
|
|
@@ -592,10 +592,10 @@ var stripVersionSpecifier = (specifier) => {
|
|
|
592
592
|
return specifier;
|
|
593
593
|
};
|
|
594
594
|
var resolveSpecifierPath = async (pattern, repoDir, log2) => {
|
|
595
|
-
const [specifier,
|
|
596
|
-
if (
|
|
597
|
-
log2.debug(`Resolved ${specifier} to path: ${
|
|
598
|
-
return
|
|
595
|
+
const [specifier, path18] = pattern.split("=");
|
|
596
|
+
if (path18) {
|
|
597
|
+
log2.debug(`Resolved ${specifier} to path: ${path18}`);
|
|
598
|
+
return path18;
|
|
599
599
|
}
|
|
600
600
|
const repoPath = await getModulePath(specifier, repoDir, log2);
|
|
601
601
|
if (repoPath) {
|
|
@@ -614,12 +614,12 @@ var loadTransformOptions = async (opts, log2) => {
|
|
|
614
614
|
let exports;
|
|
615
615
|
const [specifier] = adaptorInput.split("=");
|
|
616
616
|
log2.debug(`Trying to preload types for ${specifier}`);
|
|
617
|
-
const
|
|
618
|
-
if (
|
|
617
|
+
const path18 = await resolveSpecifierPath(adaptorInput, opts.repoDir, log2);
|
|
618
|
+
if (path18) {
|
|
619
619
|
try {
|
|
620
|
-
exports = await preloadAdaptorExports(
|
|
620
|
+
exports = await preloadAdaptorExports(path18, log2);
|
|
621
621
|
} catch (e) {
|
|
622
|
-
log2.error(`Failed to load adaptor typedefs from path ${
|
|
622
|
+
log2.error(`Failed to load adaptor typedefs from path ${path18}`);
|
|
623
623
|
log2.error(e);
|
|
624
624
|
}
|
|
625
625
|
}
|
|
@@ -843,8 +843,8 @@ var map_adaptors_to_monorepo_default = mapAdaptorsToMonorepo;
|
|
|
843
843
|
// src/util/resolve-path.ts
|
|
844
844
|
import nodepath from "node:path";
|
|
845
845
|
import os from "node:os";
|
|
846
|
-
var resolve_path_default = (
|
|
847
|
-
return
|
|
846
|
+
var resolve_path_default = (path18, root) => {
|
|
847
|
+
return path18.startsWith("~") ? path18.replace(`~`, os.homedir) : nodepath.resolve(root ?? "", path18);
|
|
848
848
|
};
|
|
849
849
|
|
|
850
850
|
// src/util/load-plan.ts
|
|
@@ -1148,8 +1148,8 @@ var loadXPlan = async (plan, options8, logger, defaultName = "") => {
|
|
|
1148
1148
|
};
|
|
1149
1149
|
|
|
1150
1150
|
// src/util/assert-path.ts
|
|
1151
|
-
var assert_path_default = (
|
|
1152
|
-
if (!
|
|
1151
|
+
var assert_path_default = (path18) => {
|
|
1152
|
+
if (!path18) {
|
|
1153
1153
|
console.error("ERROR: no path provided!");
|
|
1154
1154
|
console.error("\nUsage:");
|
|
1155
1155
|
console.error(" open path/to/job");
|
|
@@ -1212,7 +1212,8 @@ var assertStepStructure = (step, index) => {
|
|
|
1212
1212
|
"state",
|
|
1213
1213
|
"configuration",
|
|
1214
1214
|
"linker",
|
|
1215
|
-
"openfn"
|
|
1215
|
+
"openfn",
|
|
1216
|
+
"enabled"
|
|
1216
1217
|
];
|
|
1217
1218
|
for (const key in step) {
|
|
1218
1219
|
if (!allowedKeys.includes(key)) {
|
|
@@ -1781,8 +1782,10 @@ import {
|
|
|
1781
1782
|
} from "@openfn/deploy";
|
|
1782
1783
|
|
|
1783
1784
|
// src/projects/deploy.ts
|
|
1784
|
-
import Project from "@openfn/project";
|
|
1785
|
+
import Project, { versionsEqual as versionsEqual2, Workspace as Workspace3 } from "@openfn/project";
|
|
1785
1786
|
import c2 from "chalk";
|
|
1787
|
+
import { writeFile as writeFile6 } from "node:fs/promises";
|
|
1788
|
+
import path10 from "node:path";
|
|
1786
1789
|
|
|
1787
1790
|
// src/util/ensure-log-opts.ts
|
|
1788
1791
|
var defaultLoggerOptions = {
|
|
@@ -2049,6 +2052,7 @@ var CLIError = class extends Error {
|
|
|
2049
2052
|
|
|
2050
2053
|
// src/projects/util.ts
|
|
2051
2054
|
import { rimraf } from "rimraf";
|
|
2055
|
+
import { versionsEqual } from "@openfn/project";
|
|
2052
2056
|
var loadAppAuthConfig = (options8, logger) => {
|
|
2053
2057
|
const { OPENFN_API_KEY, OPENFN_ENDPOINT } = process.env;
|
|
2054
2058
|
const config2 = {
|
|
@@ -2096,10 +2100,10 @@ var serialize = async (project, outputPath2, formatOverride, dryRun2 = false) =>
|
|
|
2096
2100
|
}
|
|
2097
2101
|
return finalPath;
|
|
2098
2102
|
};
|
|
2099
|
-
var getLightningUrl = (endpoint2,
|
|
2103
|
+
var getLightningUrl = (endpoint2, path18 = "", snapshots2) => {
|
|
2100
2104
|
const params = new URLSearchParams();
|
|
2101
2105
|
snapshots2?.forEach((snapshot) => params.append("snapshots[]", snapshot));
|
|
2102
|
-
return new URL(`/api/provision/${
|
|
2106
|
+
return new URL(`/api/provision/${path18}?${params.toString()}`, endpoint2);
|
|
2103
2107
|
};
|
|
2104
2108
|
async function fetchProject(endpoint2, apiKey2, projectId, logger, snapshots2) {
|
|
2105
2109
|
const url2 = getLightningUrl(endpoint2, projectId, snapshots2);
|
|
@@ -2143,9 +2147,16 @@ async function deployProject(endpoint2, apiKey2, state, logger) {
|
|
|
2143
2147
|
body: JSON.stringify(state)
|
|
2144
2148
|
});
|
|
2145
2149
|
if (!response.ok) {
|
|
2146
|
-
|
|
2150
|
+
logger?.error(`Deploy failed with code `, response.status);
|
|
2147
2151
|
logger?.error("Failed to deploy project:");
|
|
2148
|
-
|
|
2152
|
+
const contentType = response.headers.get("content-type") ?? "";
|
|
2153
|
+
if (contentType.match("application/json ")) {
|
|
2154
|
+
const body = await response.json();
|
|
2155
|
+
logger?.error(JSON.stringify(body, null, 2));
|
|
2156
|
+
} else {
|
|
2157
|
+
const content = await response.text();
|
|
2158
|
+
logger?.error(content);
|
|
2159
|
+
}
|
|
2149
2160
|
throw new CLIError(
|
|
2150
2161
|
`Failed to deploy project ${state.name}: ${response.status}`
|
|
2151
2162
|
);
|
|
@@ -2175,9 +2186,9 @@ async function tidyWorkflowDir(currentProject, incomingProject, dryRun2 = false)
|
|
|
2175
2186
|
const currentFiles = currentProject.serialize("fs");
|
|
2176
2187
|
const newFiles = incomingProject.serialize("fs");
|
|
2177
2188
|
const toRemove = [];
|
|
2178
|
-
for (const
|
|
2179
|
-
if (!newFiles[
|
|
2180
|
-
toRemove.push(
|
|
2189
|
+
for (const path18 in currentFiles) {
|
|
2190
|
+
if (!newFiles[path18]) {
|
|
2191
|
+
toRemove.push(path18);
|
|
2181
2192
|
}
|
|
2182
2193
|
}
|
|
2183
2194
|
if (!dryRun2) {
|
|
@@ -2185,6 +2196,41 @@ async function tidyWorkflowDir(currentProject, incomingProject, dryRun2 = false)
|
|
|
2185
2196
|
}
|
|
2186
2197
|
return toRemove.sort();
|
|
2187
2198
|
}
|
|
2199
|
+
var updateForkedFrom = (proj) => {
|
|
2200
|
+
proj.cli.forked_from = proj.workflows.reduce((obj, wf) => {
|
|
2201
|
+
if (wf.history.length) {
|
|
2202
|
+
obj[wf.id] = wf.history.at(-1);
|
|
2203
|
+
}
|
|
2204
|
+
return obj;
|
|
2205
|
+
}, {});
|
|
2206
|
+
return proj;
|
|
2207
|
+
};
|
|
2208
|
+
var findLocallyChangedWorkflows = async (workspace2, project, ifNoForkedFrom = "assume-diverged") => {
|
|
2209
|
+
const { forked_from } = workspace2.activeProject ?? {};
|
|
2210
|
+
if (!forked_from || Object.keys(forked_from).length === 0) {
|
|
2211
|
+
if (ifNoForkedFrom === "assume-ok") {
|
|
2212
|
+
return [];
|
|
2213
|
+
}
|
|
2214
|
+
return project.workflows.map((w) => w.id);
|
|
2215
|
+
}
|
|
2216
|
+
const changedWorkflows = [];
|
|
2217
|
+
for (const workflow2 of project.workflows) {
|
|
2218
|
+
const currentHash = workflow2.getVersionHash();
|
|
2219
|
+
const forkedHash = forked_from[workflow2.id];
|
|
2220
|
+
if (forkedHash === void 0) {
|
|
2221
|
+
changedWorkflows.push(workflow2.id);
|
|
2222
|
+
} else if (!versionsEqual(currentHash, forkedHash)) {
|
|
2223
|
+
changedWorkflows.push(workflow2.id);
|
|
2224
|
+
}
|
|
2225
|
+
}
|
|
2226
|
+
const currentWorkflowIds = new Set(project.workflows.map((w) => w.id));
|
|
2227
|
+
for (const workflowId in forked_from) {
|
|
2228
|
+
if (!currentWorkflowIds.has(workflowId)) {
|
|
2229
|
+
changedWorkflows.push(workflowId);
|
|
2230
|
+
}
|
|
2231
|
+
}
|
|
2232
|
+
return changedWorkflows;
|
|
2233
|
+
};
|
|
2188
2234
|
|
|
2189
2235
|
// src/util/command-builders.ts
|
|
2190
2236
|
import c from "chalk";
|
|
@@ -2248,6 +2294,7 @@ var options = [
|
|
|
2248
2294
|
var printProjectName = (project) => `${project.id} (${project.openfn?.uuid || "<no UUID>"})`;
|
|
2249
2295
|
var command = {
|
|
2250
2296
|
command: "deploy",
|
|
2297
|
+
aliases: "push",
|
|
2251
2298
|
describe: `Deploy the checked out project to a Lightning Instance`,
|
|
2252
2299
|
builder: (yargs) => build(options, yargs).positional("project", {
|
|
2253
2300
|
describe: "The UUID, local id or local alias of the project to deploy to"
|
|
@@ -2257,14 +2304,34 @@ var command = {
|
|
|
2257
2304
|
),
|
|
2258
2305
|
handler: ensure("project-deploy", options)
|
|
2259
2306
|
};
|
|
2307
|
+
var hasRemoteDiverged = (local, remote, workflows = []) => {
|
|
2308
|
+
let diverged = null;
|
|
2309
|
+
const refs = local.cli.forked_from ?? {};
|
|
2310
|
+
const filteredWorkflows = workflows.length ? local.workflows.filter((w) => workflows.includes(w.id)) : local.workflows;
|
|
2311
|
+
for (const wf of filteredWorkflows) {
|
|
2312
|
+
if (wf.id in refs) {
|
|
2313
|
+
const forkedVersion = refs[wf.id];
|
|
2314
|
+
const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1);
|
|
2315
|
+
if (!versionsEqual2(forkedVersion, remoteVersion)) {
|
|
2316
|
+
diverged ??= [];
|
|
2317
|
+
diverged.push(wf.id);
|
|
2318
|
+
}
|
|
2319
|
+
} else {
|
|
2320
|
+
}
|
|
2321
|
+
}
|
|
2322
|
+
return diverged;
|
|
2323
|
+
};
|
|
2260
2324
|
async function handler(options8, logger) {
|
|
2261
2325
|
logger.warn(
|
|
2262
2326
|
"WARNING: the project deploy command is in BETA and may not be stable. Use cautiously on production projects."
|
|
2263
2327
|
);
|
|
2264
2328
|
const config2 = loadAppAuthConfig(options8, logger);
|
|
2265
2329
|
logger.info("Attempting to load checked-out project from workspace");
|
|
2330
|
+
const ws = new Workspace3(options8.workspace || ".");
|
|
2331
|
+
const { alias: alias2 } = ws.getActiveProject();
|
|
2266
2332
|
const localProject = await Project.from("fs", {
|
|
2267
|
-
root: options8.workspace || "."
|
|
2333
|
+
root: options8.workspace || ".",
|
|
2334
|
+
alias: alias2
|
|
2268
2335
|
});
|
|
2269
2336
|
logger.success(`Loaded local project ${printProjectName(localProject)}`);
|
|
2270
2337
|
let remoteProject;
|
|
@@ -2291,39 +2358,61 @@ Your local project (${localProject.uuid}) has a different UUID to the remote pro
|
|
|
2291
2358
|
Pass --force to override this error and deploy anyway.`);
|
|
2292
2359
|
return false;
|
|
2293
2360
|
}
|
|
2294
|
-
const
|
|
2361
|
+
const locallyChangedWorkflows = await findLocallyChangedWorkflows(
|
|
2362
|
+
ws,
|
|
2363
|
+
localProject
|
|
2364
|
+
);
|
|
2365
|
+
const diffs = reportDiff(
|
|
2366
|
+
localProject,
|
|
2367
|
+
remoteProject,
|
|
2368
|
+
locallyChangedWorkflows,
|
|
2369
|
+
logger
|
|
2370
|
+
);
|
|
2295
2371
|
if (!diffs.length) {
|
|
2296
2372
|
logger.success("Nothing to deploy");
|
|
2297
2373
|
return;
|
|
2298
2374
|
}
|
|
2299
|
-
const skipVersionTest =
|
|
2375
|
+
const skipVersionTest = remoteProject.workflows.find(
|
|
2376
|
+
(wf) => wf.history.length === 0
|
|
2377
|
+
);
|
|
2300
2378
|
if (skipVersionTest) {
|
|
2301
2379
|
logger.warn(
|
|
2302
2380
|
"Skipping compatibility check as no local version history detected"
|
|
2303
2381
|
);
|
|
2304
|
-
logger.warn("Pushing these changes may
|
|
2305
|
-
} else
|
|
2306
|
-
|
|
2307
|
-
|
|
2382
|
+
logger.warn("Pushing these changes may overwrite changes made to the app");
|
|
2383
|
+
} else {
|
|
2384
|
+
const divergentWorkflows = hasRemoteDiverged(
|
|
2385
|
+
localProject,
|
|
2386
|
+
remoteProject,
|
|
2387
|
+
locallyChangedWorkflows
|
|
2388
|
+
);
|
|
2389
|
+
if (divergentWorkflows) {
|
|
2390
|
+
logger.warn(
|
|
2391
|
+
`The following workflows have diverged: ${divergentWorkflows}`
|
|
2392
|
+
);
|
|
2393
|
+
if (!options8.force) {
|
|
2394
|
+
logger.error(`Error: Projects have diverged!
|
|
2308
2395
|
|
|
2309
|
-
The remote project has been edited since the local project was branched. Changes may be lost.
|
|
2396
|
+
The remote project has been edited since the local project was branched. Changes may be lost.
|
|
2310
2397
|
|
|
2311
|
-
Pass --force to override this error and deploy anyway.`);
|
|
2312
|
-
|
|
2398
|
+
Pass --force to override this error and deploy anyway.`);
|
|
2399
|
+
return;
|
|
2400
|
+
} else {
|
|
2401
|
+
logger.warn(
|
|
2402
|
+
"Remote project has diverged from local project! Pushing anyway as -f passed"
|
|
2403
|
+
);
|
|
2404
|
+
}
|
|
2313
2405
|
} else {
|
|
2314
|
-
logger.
|
|
2315
|
-
"Remote project has not diverged from local project
|
|
2406
|
+
logger.info(
|
|
2407
|
+
"Remote project has not diverged from local project - it is safe to deploy \u{1F389}"
|
|
2316
2408
|
);
|
|
2317
2409
|
}
|
|
2318
|
-
} else {
|
|
2319
|
-
logger.info(
|
|
2320
|
-
"Remote project has not diverged from local project - it is safe to deploy \u{1F389}"
|
|
2321
|
-
);
|
|
2322
2410
|
}
|
|
2323
2411
|
logger.info("Merging changes into remote project");
|
|
2324
2412
|
const merged = Project.merge(localProject, remoteProject, {
|
|
2325
2413
|
mode: "replace",
|
|
2326
|
-
force: true
|
|
2414
|
+
force: true,
|
|
2415
|
+
onlyUpdated: true
|
|
2327
2416
|
});
|
|
2328
2417
|
const state = merged.serialize("state", {
|
|
2329
2418
|
format: "json"
|
|
@@ -2357,14 +2446,20 @@ Pass --force to override this error and deploy anyway.`);
|
|
|
2357
2446
|
},
|
|
2358
2447
|
merged.config
|
|
2359
2448
|
);
|
|
2449
|
+
updateForkedFrom(finalProject);
|
|
2450
|
+
const configData = finalProject.generateConfig();
|
|
2451
|
+
await writeFile6(
|
|
2452
|
+
path10.resolve(options8.workspace, configData.path),
|
|
2453
|
+
configData.content
|
|
2454
|
+
);
|
|
2360
2455
|
const finalOutputPath = getSerializePath(localProject, options8.workspace);
|
|
2361
|
-
|
|
2362
|
-
|
|
2456
|
+
const fullFinalPath = await serialize(finalProject, finalOutputPath);
|
|
2457
|
+
logger.debug("Updated local project at ", fullFinalPath);
|
|
2458
|
+
logger.success("Updated project at", config2.endpoint);
|
|
2363
2459
|
}
|
|
2364
|
-
logger.success("Updated project at", config2.endpoint);
|
|
2365
2460
|
}
|
|
2366
|
-
var reportDiff = (local, remote, logger) => {
|
|
2367
|
-
const diffs = remote.diff(local);
|
|
2461
|
+
var reportDiff = (local, remote, locallyChangedWorkflows, logger) => {
|
|
2462
|
+
const diffs = remote.diff(local, locallyChangedWorkflows);
|
|
2368
2463
|
if (diffs.length === 0) {
|
|
2369
2464
|
logger.info("No workflow changes detected");
|
|
2370
2465
|
return diffs;
|
|
@@ -2451,30 +2546,30 @@ function pickFirst(...args) {
|
|
|
2451
2546
|
var handler_default6 = deployHandler;
|
|
2452
2547
|
|
|
2453
2548
|
// src/docgen/handler.ts
|
|
2454
|
-
import { writeFile as
|
|
2549
|
+
import { writeFile as writeFile7 } from "node:fs/promises";
|
|
2455
2550
|
import { readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
|
|
2456
|
-
import
|
|
2551
|
+
import path11 from "node:path";
|
|
2457
2552
|
import { describePackage } from "@openfn/describe-package";
|
|
2458
2553
|
import { getNameAndVersion as getNameAndVersion4 } from "@openfn/runtime";
|
|
2459
2554
|
var RETRY_DURATION = 500;
|
|
2460
2555
|
var RETRY_COUNT = 20;
|
|
2461
2556
|
var TIMEOUT_MS = 1e3 * 60;
|
|
2462
2557
|
var actualDocGen = (specifier) => describePackage(specifier, {});
|
|
2463
|
-
var ensurePath = (filePath) => mkdirSync(
|
|
2464
|
-
var generatePlaceholder = (
|
|
2465
|
-
writeFileSync(
|
|
2558
|
+
var ensurePath = (filePath) => mkdirSync(path11.dirname(filePath), { recursive: true });
|
|
2559
|
+
var generatePlaceholder = (path18) => {
|
|
2560
|
+
writeFileSync(path18, `{ "loading": true, "timestamp": ${Date.now()}}`);
|
|
2466
2561
|
};
|
|
2467
2562
|
var finish = (logger, resultPath) => {
|
|
2468
2563
|
logger.success("Done! Docs can be found at:\n");
|
|
2469
|
-
logger.print(` ${
|
|
2564
|
+
logger.print(` ${path11.resolve(resultPath)}`);
|
|
2470
2565
|
};
|
|
2471
|
-
var generateDocs = async (specifier,
|
|
2566
|
+
var generateDocs = async (specifier, path18, docgen, logger) => {
|
|
2472
2567
|
const result = await docgen(specifier);
|
|
2473
|
-
await
|
|
2474
|
-
finish(logger,
|
|
2475
|
-
return
|
|
2568
|
+
await writeFile7(path18, JSON.stringify(result, null, 2));
|
|
2569
|
+
finish(logger, path18);
|
|
2570
|
+
return path18;
|
|
2476
2571
|
};
|
|
2477
|
-
var waitForDocs = async (docs,
|
|
2572
|
+
var waitForDocs = async (docs, path18, logger, retryDuration = RETRY_DURATION) => {
|
|
2478
2573
|
try {
|
|
2479
2574
|
if (docs.hasOwnProperty("loading")) {
|
|
2480
2575
|
logger.info("Docs are being loaded by another process. Waiting.");
|
|
@@ -2486,19 +2581,19 @@ var waitForDocs = async (docs, path17, logger, retryDuration = RETRY_DURATION) =
|
|
|
2486
2581
|
clearInterval(i);
|
|
2487
2582
|
reject(new Error("Timed out waiting for docs to load"));
|
|
2488
2583
|
}
|
|
2489
|
-
const updated = JSON.parse(readFileSync(
|
|
2584
|
+
const updated = JSON.parse(readFileSync(path18, "utf8"));
|
|
2490
2585
|
if (!updated.hasOwnProperty("loading")) {
|
|
2491
2586
|
logger.info("Docs found!");
|
|
2492
2587
|
clearInterval(i);
|
|
2493
|
-
resolve(
|
|
2588
|
+
resolve(path18);
|
|
2494
2589
|
}
|
|
2495
2590
|
count++;
|
|
2496
2591
|
}, retryDuration);
|
|
2497
2592
|
});
|
|
2498
2593
|
} else {
|
|
2499
|
-
logger.info(`Docs already written to cache at ${
|
|
2500
|
-
finish(logger,
|
|
2501
|
-
return
|
|
2594
|
+
logger.info(`Docs already written to cache at ${path18}`);
|
|
2595
|
+
finish(logger, path18);
|
|
2596
|
+
return path18;
|
|
2502
2597
|
}
|
|
2503
2598
|
} catch (e) {
|
|
2504
2599
|
logger.error("Existing doc JSON corrupt. Aborting");
|
|
@@ -2515,28 +2610,28 @@ var docgenHandler = (options8, logger, docgen = actualDocGen, retryDuration = RE
|
|
|
2515
2610
|
process.exit(9);
|
|
2516
2611
|
}
|
|
2517
2612
|
logger.success(`Generating docs for ${specifier}`);
|
|
2518
|
-
const
|
|
2519
|
-
ensurePath(
|
|
2613
|
+
const path18 = `${repoDir}/docs/${specifier}.json`;
|
|
2614
|
+
ensurePath(path18);
|
|
2520
2615
|
const handleError2 = () => {
|
|
2521
2616
|
logger.info("Removing placeholder");
|
|
2522
|
-
rmSync(
|
|
2617
|
+
rmSync(path18);
|
|
2523
2618
|
};
|
|
2524
2619
|
try {
|
|
2525
|
-
const existing = readFileSync(
|
|
2620
|
+
const existing = readFileSync(path18, "utf8");
|
|
2526
2621
|
const json = JSON.parse(existing);
|
|
2527
2622
|
if (json && json.timeout && Date.now() - json.timeout >= TIMEOUT_MS) {
|
|
2528
2623
|
logger.info(`Expired placeholder found. Removing.`);
|
|
2529
|
-
rmSync(
|
|
2624
|
+
rmSync(path18);
|
|
2530
2625
|
throw new Error("TIMEOUT");
|
|
2531
2626
|
}
|
|
2532
|
-
return waitForDocs(json,
|
|
2627
|
+
return waitForDocs(json, path18, logger, retryDuration);
|
|
2533
2628
|
} catch (e) {
|
|
2534
2629
|
if (e.message !== "TIMEOUT") {
|
|
2535
|
-
logger.info(`Docs JSON not found at ${
|
|
2630
|
+
logger.info(`Docs JSON not found at ${path18}`);
|
|
2536
2631
|
}
|
|
2537
2632
|
logger.debug("Generating placeholder");
|
|
2538
|
-
generatePlaceholder(
|
|
2539
|
-
return generateDocs(specifier,
|
|
2633
|
+
generatePlaceholder(path18);
|
|
2634
|
+
return generateDocs(specifier, path18, docgen, logger).catch((e2) => {
|
|
2540
2635
|
logger.error("Error generating documentation");
|
|
2541
2636
|
logger.error(e2);
|
|
2542
2637
|
handleError2();
|
|
@@ -2587,7 +2682,7 @@ var docsHandler = async (options8, logger) => {
|
|
|
2587
2682
|
logger.success(`Showing docs for ${adaptorName} v${version}`);
|
|
2588
2683
|
}
|
|
2589
2684
|
logger.info("Generating/loading documentation...");
|
|
2590
|
-
const
|
|
2685
|
+
const path18 = await handler_default7(
|
|
2591
2686
|
{
|
|
2592
2687
|
specifier: `${name}@${version}`,
|
|
2593
2688
|
repoDir
|
|
@@ -2596,8 +2691,8 @@ var docsHandler = async (options8, logger) => {
|
|
|
2596
2691
|
createNullLogger()
|
|
2597
2692
|
);
|
|
2598
2693
|
let didError = false;
|
|
2599
|
-
if (
|
|
2600
|
-
const source = await readFile5(
|
|
2694
|
+
if (path18) {
|
|
2695
|
+
const source = await readFile5(path18, "utf8");
|
|
2601
2696
|
const data = JSON.parse(source);
|
|
2602
2697
|
let desc;
|
|
2603
2698
|
if (operation) {
|
|
@@ -2635,13 +2730,13 @@ var handler_default8 = docsHandler;
|
|
|
2635
2730
|
// src/metadata/cache.ts
|
|
2636
2731
|
import { getNameAndVersion as getNameAndVersion6 } from "@openfn/runtime";
|
|
2637
2732
|
import { createHash } from "node:crypto";
|
|
2638
|
-
import { mkdir as mkdir4, readFile as readFile6, writeFile as
|
|
2639
|
-
import
|
|
2733
|
+
import { mkdir as mkdir4, readFile as readFile6, writeFile as writeFile8, readdir, rm } from "node:fs/promises";
|
|
2734
|
+
import path12 from "node:path";
|
|
2640
2735
|
var UNSUPPORTED_FILE_NAME = "unsupported.json";
|
|
2641
2736
|
var getCachePath2 = (repoDir, key) => {
|
|
2642
|
-
const base =
|
|
2737
|
+
const base = path12.join(repoDir, "meta");
|
|
2643
2738
|
if (key) {
|
|
2644
|
-
return
|
|
2739
|
+
return path12.join(base, key.endsWith(".json") ? key : `${key}.json`);
|
|
2645
2740
|
}
|
|
2646
2741
|
return base;
|
|
2647
2742
|
};
|
|
@@ -2683,8 +2778,8 @@ var get2 = async (repoPath, key) => {
|
|
|
2683
2778
|
};
|
|
2684
2779
|
var set2 = async (repoPath, key, result) => {
|
|
2685
2780
|
const p = getCachePath2(repoPath, key);
|
|
2686
|
-
await mkdir4(
|
|
2687
|
-
await
|
|
2781
|
+
await mkdir4(path12.dirname(p), { recursive: true });
|
|
2782
|
+
await writeFile8(p, JSON.stringify(result));
|
|
2688
2783
|
};
|
|
2689
2784
|
var getUnsupportedCachePath = (repoDir) => {
|
|
2690
2785
|
return getCachePath2(repoDir, UNSUPPORTED_FILE_NAME);
|
|
@@ -2742,8 +2837,8 @@ var markAdaptorAsUnsupported = async (adaptorSpecifier, repoDir) => {
|
|
|
2742
2837
|
majorMinor: parsed.majorMinor,
|
|
2743
2838
|
timestamp: Date.now()
|
|
2744
2839
|
};
|
|
2745
|
-
await mkdir4(
|
|
2746
|
-
await
|
|
2840
|
+
await mkdir4(path12.dirname(cachePath), { recursive: true });
|
|
2841
|
+
await writeFile8(cachePath, JSON.stringify(cache, null, 2));
|
|
2747
2842
|
}
|
|
2748
2843
|
};
|
|
2749
2844
|
|
|
@@ -2864,7 +2959,7 @@ var metadataHandler = async (options8, logger) => {
|
|
|
2864
2959
|
var handler_default9 = metadataHandler;
|
|
2865
2960
|
|
|
2866
2961
|
// src/pull/handler.ts
|
|
2867
|
-
import
|
|
2962
|
+
import path15 from "path";
|
|
2868
2963
|
import fs5 from "node:fs/promises";
|
|
2869
2964
|
import {
|
|
2870
2965
|
getConfig as getConfig2,
|
|
@@ -2875,12 +2970,12 @@ import {
|
|
|
2875
2970
|
} from "@openfn/deploy";
|
|
2876
2971
|
|
|
2877
2972
|
// src/projects/pull.ts
|
|
2878
|
-
import { Workspace as
|
|
2973
|
+
import { Workspace as Workspace6 } from "@openfn/project";
|
|
2879
2974
|
|
|
2880
2975
|
// src/projects/fetch.ts
|
|
2881
|
-
import
|
|
2882
|
-
import Project2, { Workspace as
|
|
2883
|
-
import { writeFile as
|
|
2976
|
+
import path13 from "node:path";
|
|
2977
|
+
import Project2, { Workspace as Workspace4 } from "@openfn/project";
|
|
2978
|
+
import { writeFile as writeFile9 } from "node:fs/promises";
|
|
2884
2979
|
var options2 = [
|
|
2885
2980
|
alias,
|
|
2886
2981
|
apiKey,
|
|
@@ -2912,7 +3007,7 @@ var printProjectName2 = (project) => `${project.qname} (${project.id})`;
|
|
|
2912
3007
|
var fetchV1 = async (options8, logger) => {
|
|
2913
3008
|
const workspacePath = options8.workspace ?? process.cwd();
|
|
2914
3009
|
logger.debug("Using workspace at", workspacePath);
|
|
2915
|
-
const workspace2 = new
|
|
3010
|
+
const workspace2 = new Workspace4(workspacePath, logger, false);
|
|
2916
3011
|
const localProject = workspace2.get(options8.project);
|
|
2917
3012
|
if (localProject) {
|
|
2918
3013
|
logger.debug(
|
|
@@ -2938,7 +3033,7 @@ var fetchV1 = async (options8, logger) => {
|
|
|
2938
3033
|
options8.outputPath
|
|
2939
3034
|
);
|
|
2940
3035
|
logger.success(`Fetched project file to ${finalOutputPath}`);
|
|
2941
|
-
await
|
|
3036
|
+
await writeFile9(finalOutputPath, JSON.stringify(data, null, 2));
|
|
2942
3037
|
return data;
|
|
2943
3038
|
};
|
|
2944
3039
|
var handler2 = async (options8, logger) => {
|
|
@@ -2950,9 +3045,14 @@ var handler2 = async (options8, logger) => {
|
|
|
2950
3045
|
var fetchV2 = async (options8, logger) => {
|
|
2951
3046
|
const workspacePath = options8.workspace ?? process.cwd();
|
|
2952
3047
|
logger.debug("Using workspace at", workspacePath);
|
|
2953
|
-
const workspace2 = new
|
|
3048
|
+
const workspace2 = new Workspace4(workspacePath, logger, false);
|
|
2954
3049
|
const { outputPath: outputPath2 } = options8;
|
|
2955
3050
|
const remoteProject = await fetchRemoteProject(workspace2, options8, logger);
|
|
3051
|
+
if (!options8.alias && remoteProject.sandbox?.parentId) {
|
|
3052
|
+
options8.alias = remoteProject.id;
|
|
3053
|
+
remoteProject.cli.alias = options8.alias;
|
|
3054
|
+
logger.debug("Defaulting alias to sandbox id", options8.alias);
|
|
3055
|
+
}
|
|
2956
3056
|
if (!options8.force && options8.format !== "state") {
|
|
2957
3057
|
const localTargetProject = await resolveOutputProject(
|
|
2958
3058
|
workspace2,
|
|
@@ -2968,7 +3068,7 @@ var fetchV2 = async (options8, logger) => {
|
|
|
2968
3068
|
);
|
|
2969
3069
|
let format2 = options8.format;
|
|
2970
3070
|
if (outputPath2) {
|
|
2971
|
-
const ext =
|
|
3071
|
+
const ext = path13.extname(outputPath2).substring(1);
|
|
2972
3072
|
if (ext.length) {
|
|
2973
3073
|
format2 = ext;
|
|
2974
3074
|
}
|
|
@@ -3036,7 +3136,7 @@ async function fetchRemoteProject(workspace2, options8, logger) {
|
|
|
3036
3136
|
logger.debug(
|
|
3037
3137
|
`Resolved ${options8.project} to UUID ${projectUUID} from local project ${printProjectName2(
|
|
3038
3138
|
localProject
|
|
3039
|
-
)}
|
|
3139
|
+
)}`
|
|
3040
3140
|
);
|
|
3041
3141
|
}
|
|
3042
3142
|
const projectEndpoint = localProject?.openfn?.endpoint ?? config2.endpoint;
|
|
@@ -3090,23 +3190,15 @@ To ignore this error and override the local file, pass --force (-f)
|
|
|
3090
3190
|
delete error.stack;
|
|
3091
3191
|
throw error;
|
|
3092
3192
|
}
|
|
3093
|
-
const hasAnyHistory = remoteProject.workflows.find(
|
|
3094
|
-
(w) => w.workflow.history?.length
|
|
3095
|
-
);
|
|
3096
|
-
const skipVersionCheck = options8.force || // The user forced the checkout
|
|
3097
|
-
!hasAnyHistory;
|
|
3098
|
-
if (!skipVersionCheck && !remoteProject.canMergeInto(localProject)) {
|
|
3099
|
-
throw new Error("Error! An incompatible project exists at this location");
|
|
3100
|
-
}
|
|
3101
3193
|
}
|
|
3102
3194
|
}
|
|
3103
3195
|
|
|
3104
3196
|
// src/projects/checkout.ts
|
|
3105
|
-
import Project3, { Workspace as
|
|
3106
|
-
import
|
|
3197
|
+
import Project3, { Workspace as Workspace5 } from "@openfn/project";
|
|
3198
|
+
import path14 from "path";
|
|
3107
3199
|
import fs4 from "fs";
|
|
3108
3200
|
import { rimraf as rimraf2 } from "rimraf";
|
|
3109
|
-
var options3 = [log, workspace, clean2];
|
|
3201
|
+
var options3 = [log, workspace, clean2, force];
|
|
3110
3202
|
var command3 = {
|
|
3111
3203
|
command: "checkout <project>",
|
|
3112
3204
|
describe: "Switch to a different OpenFn project in the same workspace",
|
|
@@ -3119,12 +3211,12 @@ var command3 = {
|
|
|
3119
3211
|
var handler3 = async (options8, logger) => {
|
|
3120
3212
|
const projectIdentifier = options8.project;
|
|
3121
3213
|
const workspacePath = options8.workspace ?? process.cwd();
|
|
3122
|
-
const workspace2 = new
|
|
3214
|
+
const workspace2 = new Workspace5(workspacePath, logger);
|
|
3123
3215
|
const { project: _, ...config2 } = workspace2.getConfig();
|
|
3124
3216
|
const currentProject = workspace2.getActiveProject();
|
|
3125
3217
|
let switchProject;
|
|
3126
3218
|
if (/\.(yaml|json)$/.test(projectIdentifier)) {
|
|
3127
|
-
const filePath = projectIdentifier.startsWith("/") ? projectIdentifier :
|
|
3219
|
+
const filePath = projectIdentifier.startsWith("/") ? projectIdentifier : path14.join(workspacePath, projectIdentifier);
|
|
3128
3220
|
logger.debug("Loading project from path ", filePath);
|
|
3129
3221
|
switchProject = await Project3.from("path", filePath, config2);
|
|
3130
3222
|
} else {
|
|
@@ -3135,18 +3227,51 @@ var handler3 = async (options8, logger) => {
|
|
|
3135
3227
|
`Project with id ${projectIdentifier} not found in the workspace`
|
|
3136
3228
|
);
|
|
3137
3229
|
}
|
|
3230
|
+
try {
|
|
3231
|
+
const localProject = await Project3.from("fs", {
|
|
3232
|
+
root: options8.workspace || "."
|
|
3233
|
+
});
|
|
3234
|
+
logger.success(`Loaded local project ${localProject.alias}`);
|
|
3235
|
+
const changed = await findLocallyChangedWorkflows(
|
|
3236
|
+
workspace2,
|
|
3237
|
+
localProject,
|
|
3238
|
+
"assume-ok"
|
|
3239
|
+
);
|
|
3240
|
+
if (changed.length && !options8.force) {
|
|
3241
|
+
logger.break();
|
|
3242
|
+
logger.warn(
|
|
3243
|
+
"WARNING: detected changes on your currently checked-out project"
|
|
3244
|
+
);
|
|
3245
|
+
logger.warn(
|
|
3246
|
+
`Changes may be lost by checking out ${localProject.alias} right now`
|
|
3247
|
+
);
|
|
3248
|
+
logger.warn(`Pass --force or -f to override this warning and continue`);
|
|
3249
|
+
const e = new Error(
|
|
3250
|
+
`The currently checked out project has diverged! Changes may be lost`
|
|
3251
|
+
);
|
|
3252
|
+
delete e.stack;
|
|
3253
|
+
throw e;
|
|
3254
|
+
}
|
|
3255
|
+
} catch (e) {
|
|
3256
|
+
if (e.message.match("ENOENT")) {
|
|
3257
|
+
logger.debug("No openfn.yaml found locally: skipping divergence test");
|
|
3258
|
+
} else {
|
|
3259
|
+
throw e;
|
|
3260
|
+
}
|
|
3261
|
+
}
|
|
3138
3262
|
if (options8.clean) {
|
|
3139
3263
|
await rimraf2(workspace2.workflowsPath);
|
|
3140
3264
|
} else {
|
|
3141
3265
|
await tidyWorkflowDir(currentProject, switchProject);
|
|
3142
3266
|
}
|
|
3267
|
+
updateForkedFrom(switchProject);
|
|
3143
3268
|
const files = switchProject.serialize("fs");
|
|
3144
3269
|
for (const f in files) {
|
|
3145
3270
|
if (files[f]) {
|
|
3146
|
-
fs4.mkdirSync(
|
|
3271
|
+
fs4.mkdirSync(path14.join(workspacePath, path14.dirname(f)), {
|
|
3147
3272
|
recursive: true
|
|
3148
3273
|
});
|
|
3149
|
-
fs4.writeFileSync(
|
|
3274
|
+
fs4.writeFileSync(path14.join(workspacePath, f), files[f]);
|
|
3150
3275
|
} else {
|
|
3151
3276
|
logger.warn("WARNING! No content for file", f);
|
|
3152
3277
|
}
|
|
@@ -3193,7 +3318,7 @@ var ensureProjectId = (options8, logger) => {
|
|
|
3193
3318
|
logger?.debug(
|
|
3194
3319
|
"No project ID specified: looking up checked out project in Workspace"
|
|
3195
3320
|
);
|
|
3196
|
-
const ws = new
|
|
3321
|
+
const ws = new Workspace6(options8.workspace);
|
|
3197
3322
|
if (ws.activeProject) {
|
|
3198
3323
|
options8.project = ws.activeProject.uuid;
|
|
3199
3324
|
logger?.info(
|
|
@@ -3266,7 +3391,7 @@ async function pullHandler(options8, logger) {
|
|
|
3266
3391
|
process.exitCode = 1;
|
|
3267
3392
|
process.exit(1);
|
|
3268
3393
|
}
|
|
3269
|
-
const resolvedPath =
|
|
3394
|
+
const resolvedPath = path15.resolve(config2.specPath);
|
|
3270
3395
|
logger.debug("reading spec from", resolvedPath);
|
|
3271
3396
|
const updatedSpec = await syncRemoteSpec(
|
|
3272
3397
|
await res.text(),
|
|
@@ -3275,7 +3400,7 @@ async function pullHandler(options8, logger) {
|
|
|
3275
3400
|
logger
|
|
3276
3401
|
);
|
|
3277
3402
|
await fs5.writeFile(
|
|
3278
|
-
|
|
3403
|
+
path15.resolve(config2.statePath),
|
|
3279
3404
|
JSON.stringify(state, null, 2)
|
|
3280
3405
|
);
|
|
3281
3406
|
await fs5.writeFile(resolvedPath, updatedSpec);
|
|
@@ -3320,7 +3445,7 @@ __export(projects_exports, {
|
|
|
3320
3445
|
});
|
|
3321
3446
|
|
|
3322
3447
|
// src/projects/list.ts
|
|
3323
|
-
import { Workspace as
|
|
3448
|
+
import { Workspace as Workspace7 } from "@openfn/project";
|
|
3324
3449
|
var options5 = [log, workspace];
|
|
3325
3450
|
var command5 = {
|
|
3326
3451
|
command: "list [project-path]",
|
|
@@ -3333,7 +3458,7 @@ var handler5 = async (options8, logger) => {
|
|
|
3333
3458
|
logger.info("Searching for projects in workspace at:");
|
|
3334
3459
|
logger.info(" ", options8.workspace);
|
|
3335
3460
|
logger.break();
|
|
3336
|
-
const workspace2 = new
|
|
3461
|
+
const workspace2 = new Workspace7(options8.workspace);
|
|
3337
3462
|
if (!workspace2.valid) {
|
|
3338
3463
|
throw new Error("No OpenFn projects found");
|
|
3339
3464
|
}
|
|
@@ -3351,7 +3476,7 @@ ${project.workflows.map((w) => " - " + w.id).join("\n")}`;
|
|
|
3351
3476
|
}
|
|
3352
3477
|
|
|
3353
3478
|
// src/projects/version.ts
|
|
3354
|
-
import { Workspace as
|
|
3479
|
+
import { Workspace as Workspace8 } from "@openfn/project";
|
|
3355
3480
|
var options6 = [workflow, workspace, workflowMappings];
|
|
3356
3481
|
var command6 = {
|
|
3357
3482
|
command: "version [workflow]",
|
|
@@ -3360,7 +3485,7 @@ var command6 = {
|
|
|
3360
3485
|
builder: (yargs) => build(options6, yargs)
|
|
3361
3486
|
};
|
|
3362
3487
|
var handler6 = async (options8, logger) => {
|
|
3363
|
-
const workspace2 = new
|
|
3488
|
+
const workspace2 = new Workspace8(options8.workspace);
|
|
3364
3489
|
if (!workspace2.valid) {
|
|
3365
3490
|
logger.error("Command was run in an invalid openfn workspace");
|
|
3366
3491
|
return;
|
|
@@ -3395,8 +3520,8 @@ ${final}`);
|
|
|
3395
3520
|
};
|
|
3396
3521
|
|
|
3397
3522
|
// src/projects/merge.ts
|
|
3398
|
-
import Project5, { Workspace as
|
|
3399
|
-
import
|
|
3523
|
+
import Project5, { Workspace as Workspace9 } from "@openfn/project";
|
|
3524
|
+
import path16 from "node:path";
|
|
3400
3525
|
import fs6 from "node:fs/promises";
|
|
3401
3526
|
var options7 = [
|
|
3402
3527
|
removeUnmapped,
|
|
@@ -3431,14 +3556,14 @@ var command7 = {
|
|
|
3431
3556
|
};
|
|
3432
3557
|
var handler7 = async (options8, logger) => {
|
|
3433
3558
|
const workspacePath = options8.workspace;
|
|
3434
|
-
const workspace2 = new
|
|
3559
|
+
const workspace2 = new Workspace9(workspacePath);
|
|
3435
3560
|
if (!workspace2.valid) {
|
|
3436
3561
|
logger.error("Command was run in an invalid openfn workspace");
|
|
3437
3562
|
return;
|
|
3438
3563
|
}
|
|
3439
3564
|
let targetProject;
|
|
3440
3565
|
if (options8.base) {
|
|
3441
|
-
const basePath =
|
|
3566
|
+
const basePath = path16.resolve(options8.base);
|
|
3442
3567
|
logger.debug("Loading target project from path", basePath);
|
|
3443
3568
|
targetProject = await Project5.from("path", basePath);
|
|
3444
3569
|
} else {
|
|
@@ -3452,7 +3577,7 @@ var handler7 = async (options8, logger) => {
|
|
|
3452
3577
|
const sourceProjectIdentifier = options8.project;
|
|
3453
3578
|
let sourceProject;
|
|
3454
3579
|
if (/\.(ya?ml|json)$/.test(sourceProjectIdentifier)) {
|
|
3455
|
-
const filePath =
|
|
3580
|
+
const filePath = path16.join(workspacePath, sourceProjectIdentifier);
|
|
3456
3581
|
logger.debug("Loading source project from path ", filePath);
|
|
3457
3582
|
sourceProject = await Project5.from("path", filePath);
|
|
3458
3583
|
} else {
|
|
@@ -3515,7 +3640,7 @@ var handler7 = async (options8, logger) => {
|
|
|
3515
3640
|
|
|
3516
3641
|
// src/util/print-versions.ts
|
|
3517
3642
|
import { readFileSync as readFileSync2 } from "node:fs";
|
|
3518
|
-
import
|
|
3643
|
+
import path17 from "node:path";
|
|
3519
3644
|
import url from "node:url";
|
|
3520
3645
|
import { getNameAndVersion as getNameAndVersion7 } from "@openfn/runtime";
|
|
3521
3646
|
import { mainSymbols } from "figures";
|
|
@@ -3527,7 +3652,7 @@ var { triangleRightSmall: t } = mainSymbols;
|
|
|
3527
3652
|
var loadVersionFromPath = (adaptorPath) => {
|
|
3528
3653
|
try {
|
|
3529
3654
|
const pkg = JSON.parse(
|
|
3530
|
-
readFileSync2(
|
|
3655
|
+
readFileSync2(path17.resolve(adaptorPath, "package.json"), "utf8")
|
|
3531
3656
|
);
|
|
3532
3657
|
return pkg.version;
|
|
3533
3658
|
} catch (e) {
|
|
@@ -3562,7 +3687,7 @@ var printVersions = async (logger, options8 = {}, includeComponents = false) =>
|
|
|
3562
3687
|
...[NODE, CLI2, RUNTIME2, COMPILER2, longestAdaptorName].map((s) => s.length)
|
|
3563
3688
|
);
|
|
3564
3689
|
const prefix = (str) => ` ${t} ${str.padEnd(longest + 4, " ")}`;
|
|
3565
|
-
const dirname3 =
|
|
3690
|
+
const dirname3 = path17.dirname(url.fileURLToPath(import.meta.url));
|
|
3566
3691
|
const pkg = JSON.parse(readFileSync2(`${dirname3}/../../package.json`, "utf8"));
|
|
3567
3692
|
const { version, dependencies } = pkg;
|
|
3568
3693
|
const compilerVersion = dependencies["@openfn/compiler"];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.26.0",
|
|
4
4
|
"description": "CLI devtools for the OpenFn toolchain",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=18",
|
|
@@ -50,13 +50,13 @@
|
|
|
50
50
|
"undici": "7.12.0",
|
|
51
51
|
"ws": "^8.18.3",
|
|
52
52
|
"yargs": "^17.7.2",
|
|
53
|
-
"@openfn/describe-package": "0.1.5",
|
|
54
|
-
"@openfn/lexicon": "^1.4.0",
|
|
55
53
|
"@openfn/compiler": "1.2.2",
|
|
56
54
|
"@openfn/deploy": "0.11.5",
|
|
57
55
|
"@openfn/logger": "1.1.1",
|
|
58
|
-
"@openfn/
|
|
59
|
-
"@openfn/
|
|
56
|
+
"@openfn/lexicon": "^1.4.1",
|
|
57
|
+
"@openfn/project": "^0.13.0",
|
|
58
|
+
"@openfn/runtime": "1.8.3",
|
|
59
|
+
"@openfn/describe-package": "0.1.5"
|
|
60
60
|
},
|
|
61
61
|
"files": [
|
|
62
62
|
"dist",
|