@openfn/project 0.11.0 → 0.12.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -0
- package/dist/index.d.ts +38 -2
- package/dist/index.js +121 -34
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -8,6 +8,18 @@ A single Project can be Checked Out to disk at a time, meaning its source workfl
|
|
|
8
8
|
|
|
9
9
|
A Workspace is a set of related Projects , including a Project and its associated Sandboxes, or a Project deployed to apps in multiple web domains
|
|
10
10
|
|
|
11
|
+
## Structure and Artifects
|
|
12
|
+
|
|
13
|
+
openfn.yaml
|
|
14
|
+
|
|
15
|
+
project file
|
|
16
|
+
|
|
17
|
+
sort of a mix of project.yaml, state.json and config.json
|
|
18
|
+
|
|
19
|
+
This is strictly a representation of a server-side project, it's like the last-sync-state. CLI-only or offline projects do not have one.
|
|
20
|
+
|
|
21
|
+
It's also a portable representation of the project
|
|
22
|
+
|
|
11
23
|
### Serializing and Parsing
|
|
12
24
|
|
|
13
25
|
The main idea of Projects is that a Project represents a set of OpenFn workflows defined in any format and present a standard JS-friendly interface to manipulate and reason about them.
|
package/dist/index.d.ts
CHANGED
|
@@ -1,8 +1,36 @@
|
|
|
1
1
|
import * as l from '@openfn/lexicon';
|
|
2
|
-
import l__default, { WorkspaceConfig, UUID } from '@openfn/lexicon';
|
|
2
|
+
import l__default, { SandboxMeta, WorkspaceConfig, UUID } from '@openfn/lexicon';
|
|
3
3
|
import { Logger } from '@openfn/logger';
|
|
4
4
|
import { Provisioner } from '@openfn/lexicon/lightning';
|
|
5
5
|
|
|
6
|
+
type DiffType = 'added' | 'changed' | 'removed';
|
|
7
|
+
type WorkflowDiff = {
|
|
8
|
+
id: string;
|
|
9
|
+
type: DiffType;
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* Compare two projects and return a list of workflow changes showing how
|
|
13
|
+
* project B has diverged from project A.
|
|
14
|
+
*
|
|
15
|
+
* Workflows are identified by their ID and compared using version hashes.
|
|
16
|
+
*
|
|
17
|
+
* @param a - The baseline project (e.g., main branch)
|
|
18
|
+
* @param b - The comparison project (e.g., staging branch)
|
|
19
|
+
* @returns Array of workflow diffs indicating how B differs from A:
|
|
20
|
+
* - 'added': workflow exists in B but not in A
|
|
21
|
+
* - 'removed': workflow exists in A but not in B
|
|
22
|
+
* - 'changed': workflow exists in both but has different version hashes
|
|
23
|
+
*
|
|
24
|
+
* @example
|
|
25
|
+
* ```typescript
|
|
26
|
+
* const main = await Project.from('fs', { root: '.' });
|
|
27
|
+
* const staging = await Project.from('state', stagingState);
|
|
28
|
+
* const diffs = diff(main, staging);
|
|
29
|
+
* // Shows how staging has diverged from main
|
|
30
|
+
* ```
|
|
31
|
+
*/
|
|
32
|
+
declare function diff(a: Project, b: Project): WorkflowDiff[];
|
|
33
|
+
|
|
6
34
|
type WithMeta<T> = T & {
|
|
7
35
|
openfn?: l.NodeMeta;
|
|
8
36
|
};
|
|
@@ -30,6 +58,7 @@ declare class Workflow {
|
|
|
30
58
|
getUUIDMap(): Record<string, string>;
|
|
31
59
|
getVersionHash(): string;
|
|
32
60
|
pushHistory(versionHash: string): void;
|
|
61
|
+
get history(): string[];
|
|
33
62
|
canMergeInto(target: Workflow): boolean;
|
|
34
63
|
}
|
|
35
64
|
|
|
@@ -60,10 +89,13 @@ type SerializedWorkflow = {
|
|
|
60
89
|
openfn?: l.ProjectMeta;
|
|
61
90
|
};
|
|
62
91
|
|
|
92
|
+
declare const SANDBOX_MERGE = "sandbox";
|
|
93
|
+
declare const REPLACE_MERGE = "replace";
|
|
63
94
|
type MergeProjectOptions = {
|
|
64
95
|
workflowMappings: Record<string, string>;
|
|
65
96
|
removeUnmapped: boolean;
|
|
66
97
|
force: boolean;
|
|
98
|
+
mode: typeof SANDBOX_MERGE | typeof REPLACE_MERGE;
|
|
67
99
|
};
|
|
68
100
|
|
|
69
101
|
declare class Workspace {
|
|
@@ -77,6 +109,8 @@ declare class Workspace {
|
|
|
77
109
|
constructor(workspacePath: string, logger?: Logger, validate?: boolean);
|
|
78
110
|
loadProject(): void;
|
|
79
111
|
list(): Project[];
|
|
112
|
+
get projectsPath(): string;
|
|
113
|
+
get workflowsPath(): string;
|
|
80
114
|
/** Get a project by its alias, id or UUID. Can also include a UUID */
|
|
81
115
|
get(nameyThing: string): Project | null;
|
|
82
116
|
getProjectPath(id: string): string | undefined;
|
|
@@ -118,6 +152,7 @@ declare class Project {
|
|
|
118
152
|
config: l__default.WorkspaceConfig;
|
|
119
153
|
collections: any;
|
|
120
154
|
credentials: string[];
|
|
155
|
+
sandbox?: SandboxMeta;
|
|
121
156
|
static from(type: 'project', data: any, options: never): Promise<Project>;
|
|
122
157
|
static from(type: 'state', data: Provisioner.Project, meta?: Partial<l__default.ProjectMeta>, config?: fromAppStateConfig): Promise<Project>;
|
|
123
158
|
static from(type: 'fs', options: FromFsConfig): Promise<Project>;
|
|
@@ -142,6 +177,7 @@ declare class Project {
|
|
|
142
177
|
* Returns a map of ids:uuids for everything in the project
|
|
143
178
|
*/
|
|
144
179
|
getUUIDMap(): UUIDMap;
|
|
180
|
+
diff(project: Project): WorkflowDiff[];
|
|
145
181
|
canMergeInto(target: Project): boolean;
|
|
146
182
|
}
|
|
147
183
|
|
|
@@ -166,4 +202,4 @@ type GenerateProjectOptions = GenerateWorkflowOptions & {
|
|
|
166
202
|
declare function generateWorkflow(def: string, options?: Partial<GenerateWorkflowOptions>): Workflow;
|
|
167
203
|
declare function generateProject(name: string, workflowDefs: string[], options?: Partial<GenerateProjectOptions>): Project;
|
|
168
204
|
|
|
169
|
-
export { Workspace, Project as default, generateProject, generateWorkflow, jsonToYaml, yamlToJson };
|
|
205
|
+
export { DiffType, WorkflowDiff, Workspace, Project as default, diff, generateProject, generateWorkflow, jsonToYaml, yamlToJson };
|
package/dist/index.js
CHANGED
|
@@ -236,6 +236,9 @@ var Workflow = class {
|
|
|
236
236
|
pushHistory(versionHash) {
|
|
237
237
|
this.workflow.history?.push(versionHash);
|
|
238
238
|
}
|
|
239
|
+
get history() {
|
|
240
|
+
return this.workflow.history ?? [];
|
|
241
|
+
}
|
|
239
242
|
// return true if the current workflow can be merged into the target workflow without losing any changes
|
|
240
243
|
canMergeInto(target) {
|
|
241
244
|
const thisHistory = this.workflow.history?.concat(this.getVersionHash()) ?? [];
|
|
@@ -305,7 +308,10 @@ function to_app_state_default(project, options = {}) {
|
|
|
305
308
|
state.id = uuid;
|
|
306
309
|
Object.assign(state, rest, project.options);
|
|
307
310
|
state.project_credentials = project.credentials ?? [];
|
|
308
|
-
state.workflows = project.workflows.map(mapWorkflow)
|
|
311
|
+
state.workflows = project.workflows.map(mapWorkflow).reduce((obj, wf) => {
|
|
312
|
+
obj[slugify(wf.name ?? wf.id)] = wf;
|
|
313
|
+
return obj;
|
|
314
|
+
}, {});
|
|
309
315
|
const shouldReturnYaml = options.format === "yaml" || !options.format && project.config.formats.project === "yaml";
|
|
310
316
|
if (shouldReturnYaml) {
|
|
311
317
|
return jsonToYaml(state);
|
|
@@ -320,9 +326,9 @@ var mapWorkflow = (workflow) => {
|
|
|
320
326
|
const wfState = {
|
|
321
327
|
...originalOpenfnProps,
|
|
322
328
|
id: workflow.openfn?.uuid ?? randomUUID(),
|
|
323
|
-
jobs:
|
|
324
|
-
triggers:
|
|
325
|
-
edges:
|
|
329
|
+
jobs: {},
|
|
330
|
+
triggers: {},
|
|
331
|
+
edges: {},
|
|
326
332
|
lock_version: workflow.openfn?.lock_version ?? null
|
|
327
333
|
// TODO needs testing
|
|
328
334
|
};
|
|
@@ -346,7 +352,7 @@ var mapWorkflow = (workflow) => {
|
|
|
346
352
|
type: s.type,
|
|
347
353
|
...renameKeys(s.openfn, { uuid: "id" })
|
|
348
354
|
};
|
|
349
|
-
wfState.triggers.
|
|
355
|
+
wfState.triggers[node.type] = node;
|
|
350
356
|
} else {
|
|
351
357
|
node = omitBy(pick(s, ["name", "adaptor"]), isNil);
|
|
352
358
|
const { uuid: uuid2, ...otherOpenFnProps } = s.openfn ?? {};
|
|
@@ -358,7 +364,7 @@ var mapWorkflow = (workflow) => {
|
|
|
358
364
|
otherOpenFnProps.project_credential_id = s.configuration;
|
|
359
365
|
}
|
|
360
366
|
Object.assign(node, defaultJobProps, otherOpenFnProps);
|
|
361
|
-
wfState.jobs.
|
|
367
|
+
wfState.jobs[s.id ?? slugify(s.name)] = node;
|
|
362
368
|
}
|
|
363
369
|
Object.keys(s.next ?? {}).forEach((next) => {
|
|
364
370
|
const rules = s.next[next];
|
|
@@ -388,10 +394,15 @@ var mapWorkflow = (workflow) => {
|
|
|
388
394
|
e.condition_expression = rules.condition;
|
|
389
395
|
}
|
|
390
396
|
}
|
|
391
|
-
wfState.edges.
|
|
397
|
+
wfState.edges[`${s.id}->${next}`] = e;
|
|
392
398
|
});
|
|
393
399
|
});
|
|
394
|
-
wfState.edges =
|
|
400
|
+
wfState.edges = Object.keys(wfState.edges).sort(
|
|
401
|
+
(a, b) => `${wfState.edges[a].id}`.localeCompare("" + wfState.edges[b].id)
|
|
402
|
+
).reduce((obj, key) => {
|
|
403
|
+
obj[key] = wfState.edges[key];
|
|
404
|
+
return obj;
|
|
405
|
+
}, {});
|
|
395
406
|
return wfState;
|
|
396
407
|
};
|
|
397
408
|
|
|
@@ -416,16 +427,19 @@ var buildConfig = (config = {}) => ({
|
|
|
416
427
|
workflow: config.formats?.workflow ?? "yaml"
|
|
417
428
|
}
|
|
418
429
|
});
|
|
419
|
-
var extractConfig = (source) => {
|
|
430
|
+
var extractConfig = (source, format) => {
|
|
420
431
|
const project = {
|
|
421
432
|
...source.openfn || {},
|
|
422
433
|
id: source.id
|
|
423
434
|
};
|
|
435
|
+
if (source.name) {
|
|
436
|
+
project.name = source.name;
|
|
437
|
+
}
|
|
424
438
|
const workspace = {
|
|
425
439
|
...source.config
|
|
426
440
|
};
|
|
427
441
|
const content = { project, workspace };
|
|
428
|
-
|
|
442
|
+
format = format ?? workspace.formats.openfn;
|
|
429
443
|
if (format === "yaml") {
|
|
430
444
|
return {
|
|
431
445
|
path: "openfn.yaml",
|
|
@@ -621,6 +635,11 @@ var to_project_default = (project, options = {}) => {
|
|
|
621
635
|
},
|
|
622
636
|
isNil4
|
|
623
637
|
);
|
|
638
|
+
if (project.sandbox?.parentId) {
|
|
639
|
+
proj.sandbox = {
|
|
640
|
+
parentId: project.sandbox.parentId
|
|
641
|
+
};
|
|
642
|
+
}
|
|
624
643
|
const format = options.format ?? proj.config?.formats.project;
|
|
625
644
|
if (format === "json") {
|
|
626
645
|
return proj;
|
|
@@ -654,6 +673,7 @@ var from_app_state_default = (state, meta = {}, config = {}) => {
|
|
|
654
673
|
collections,
|
|
655
674
|
inserted_at,
|
|
656
675
|
updated_at,
|
|
676
|
+
parent_id,
|
|
657
677
|
...options
|
|
658
678
|
} = stateJson;
|
|
659
679
|
const proj = {
|
|
@@ -672,7 +692,12 @@ var from_app_state_default = (state, meta = {}, config = {}) => {
|
|
|
672
692
|
inserted_at,
|
|
673
693
|
updated_at
|
|
674
694
|
};
|
|
675
|
-
|
|
695
|
+
if (parent_id) {
|
|
696
|
+
proj.sandbox = {
|
|
697
|
+
parentId: parent_id
|
|
698
|
+
};
|
|
699
|
+
}
|
|
700
|
+
proj.workflows = Object.values(stateJson.workflows).map(mapWorkflow2);
|
|
676
701
|
return new Project(proj, config);
|
|
677
702
|
};
|
|
678
703
|
var mapEdge = (edge) => {
|
|
@@ -705,20 +730,22 @@ var mapWorkflow2 = (workflow) => {
|
|
|
705
730
|
if (workflow.name) {
|
|
706
731
|
mapped.id = slugify(workflow.name);
|
|
707
732
|
}
|
|
708
|
-
workflow.triggers.forEach((trigger) => {
|
|
733
|
+
Object.values(workflow.triggers).forEach((trigger) => {
|
|
709
734
|
const { type, ...otherProps } = trigger;
|
|
710
735
|
if (!mapped.start) {
|
|
711
|
-
mapped.start =
|
|
736
|
+
mapped.start = type;
|
|
712
737
|
}
|
|
713
|
-
const connectedEdges = edges.filter(
|
|
738
|
+
const connectedEdges = Object.values(edges).filter(
|
|
714
739
|
(e) => e.source_trigger_id === trigger.id
|
|
715
740
|
);
|
|
716
741
|
mapped.steps.push({
|
|
717
|
-
id:
|
|
742
|
+
id: type,
|
|
718
743
|
type,
|
|
719
744
|
openfn: renameKeys(otherProps, { id: "uuid" }),
|
|
720
745
|
next: connectedEdges.reduce((obj, edge) => {
|
|
721
|
-
const target = jobs.find(
|
|
746
|
+
const target = Object.values(jobs).find(
|
|
747
|
+
(j) => j.id === edge.target_job_id
|
|
748
|
+
);
|
|
722
749
|
if (!target) {
|
|
723
750
|
throw new Error(`Failed to find ${edge.target_job_id}`);
|
|
724
751
|
}
|
|
@@ -727,8 +754,8 @@ var mapWorkflow2 = (workflow) => {
|
|
|
727
754
|
}, {})
|
|
728
755
|
});
|
|
729
756
|
});
|
|
730
|
-
workflow.jobs.forEach((step) => {
|
|
731
|
-
const outboundEdges = edges.filter(
|
|
757
|
+
Object.values(workflow.jobs).forEach((step) => {
|
|
758
|
+
const outboundEdges = Object.values(edges).filter(
|
|
732
759
|
(e) => e.source_job_id === step.id || e.source_trigger_id === step.id
|
|
733
760
|
);
|
|
734
761
|
const {
|
|
@@ -751,7 +778,9 @@ var mapWorkflow2 = (workflow) => {
|
|
|
751
778
|
}
|
|
752
779
|
if (outboundEdges.length) {
|
|
753
780
|
s.next = outboundEdges.reduce((next, edge) => {
|
|
754
|
-
const target = jobs.find(
|
|
781
|
+
const target = Object.values(jobs).find(
|
|
782
|
+
(j) => j.id === edge.target_job_id
|
|
783
|
+
);
|
|
755
784
|
next[slugify(target.name)] = mapEdge(edge);
|
|
756
785
|
return next;
|
|
757
786
|
}, {});
|
|
@@ -908,7 +937,7 @@ function baseMerge(target, source, sourceKeys, assigns = {}) {
|
|
|
908
937
|
return assign(target, { ...pickedSource, ...assigns });
|
|
909
938
|
}
|
|
910
939
|
|
|
911
|
-
// src/merge/merge-
|
|
940
|
+
// src/merge/merge-workflow.ts
|
|
912
941
|
var clone2 = (obj) => JSON.parse(JSON.stringify(obj));
|
|
913
942
|
function mergeWorkflows(source, target, mappings) {
|
|
914
943
|
const targetNodes = {};
|
|
@@ -953,8 +982,17 @@ function mergeWorkflows(source, target, mappings) {
|
|
|
953
982
|
return {
|
|
954
983
|
...target,
|
|
955
984
|
...newSource,
|
|
956
|
-
|
|
957
|
-
|
|
985
|
+
history: source.history ?? target.history,
|
|
986
|
+
openfn: {
|
|
987
|
+
...target.openfn,
|
|
988
|
+
...source.openfn,
|
|
989
|
+
// preserving the target uuid. we might need a proper helper function for this
|
|
990
|
+
uuid: target.openfn?.uuid
|
|
991
|
+
},
|
|
992
|
+
options: {
|
|
993
|
+
...target.options,
|
|
994
|
+
...source.options
|
|
995
|
+
}
|
|
958
996
|
};
|
|
959
997
|
}
|
|
960
998
|
|
|
@@ -1206,14 +1244,20 @@ function getDuplicates(arr) {
|
|
|
1206
1244
|
}
|
|
1207
1245
|
|
|
1208
1246
|
// src/merge/merge-project.ts
|
|
1247
|
+
var SANDBOX_MERGE = "sandbox";
|
|
1209
1248
|
var UnsafeMergeError = class extends Error {
|
|
1210
1249
|
};
|
|
1250
|
+
var defaultOptions = {
|
|
1251
|
+
workflowMappings: {},
|
|
1252
|
+
removeUnmapped: false,
|
|
1253
|
+
force: true,
|
|
1254
|
+
/**
|
|
1255
|
+
* If mode is sandbox, basically only content will be merged and all metadata/settings/options/config is ignored
|
|
1256
|
+
* If mode is replace, all properties on the source will override the target (including UUIDs, name)
|
|
1257
|
+
*/
|
|
1258
|
+
mode: SANDBOX_MERGE
|
|
1259
|
+
};
|
|
1211
1260
|
function merge(source, target, opts) {
|
|
1212
|
-
const defaultOptions = {
|
|
1213
|
-
workflowMappings: {},
|
|
1214
|
-
removeUnmapped: false,
|
|
1215
|
-
force: true
|
|
1216
|
-
};
|
|
1217
1261
|
const options = defaultsDeep(
|
|
1218
1262
|
opts,
|
|
1219
1263
|
defaultOptions
|
|
@@ -1274,13 +1318,47 @@ Pass --force to force the merge anyway`
|
|
|
1274
1318
|
}
|
|
1275
1319
|
}
|
|
1276
1320
|
}
|
|
1321
|
+
const assigns = options.mode === SANDBOX_MERGE ? {
|
|
1322
|
+
workflows: finalWorkflows
|
|
1323
|
+
} : {
|
|
1324
|
+
workflows: finalWorkflows,
|
|
1325
|
+
openfn: {
|
|
1326
|
+
...target.openfn,
|
|
1327
|
+
...source.openfn
|
|
1328
|
+
},
|
|
1329
|
+
options: {
|
|
1330
|
+
...target.options,
|
|
1331
|
+
...source.options
|
|
1332
|
+
},
|
|
1333
|
+
name: source.name ?? target.name,
|
|
1334
|
+
description: source.description ?? target.description,
|
|
1335
|
+
credentials: source.credentials ?? target.credentials,
|
|
1336
|
+
collections: source.collections ?? target.collections
|
|
1337
|
+
};
|
|
1277
1338
|
return new Project(
|
|
1278
|
-
baseMerge(target, source, ["collections"],
|
|
1279
|
-
workflows: finalWorkflows
|
|
1280
|
-
})
|
|
1339
|
+
baseMerge(target, source, ["collections"], assigns)
|
|
1281
1340
|
);
|
|
1282
1341
|
}
|
|
1283
1342
|
|
|
1343
|
+
// src/util/project-diff.ts
|
|
1344
|
+
function diff(a, b) {
|
|
1345
|
+
const diffs = [];
|
|
1346
|
+
for (const workflowA of a.workflows) {
|
|
1347
|
+
const workflowB = b.getWorkflow(workflowA.id);
|
|
1348
|
+
if (!workflowB) {
|
|
1349
|
+
diffs.push({ id: workflowA.id, type: "removed" });
|
|
1350
|
+
} else if (workflowA.getVersionHash() !== workflowB.getVersionHash()) {
|
|
1351
|
+
diffs.push({ id: workflowA.id, type: "changed" });
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
for (const workflowB of b.workflows) {
|
|
1355
|
+
if (!a.getWorkflow(workflowB.id)) {
|
|
1356
|
+
diffs.push({ id: workflowB.id, type: "added" });
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
return diffs;
|
|
1360
|
+
}
|
|
1361
|
+
|
|
1284
1362
|
// src/Project.ts
|
|
1285
1363
|
var maybeCreateWorkflow = (wf) => wf instanceof Workflow_default ? wf : new Workflow_default(wf);
|
|
1286
1364
|
var Project = class {
|
|
@@ -1308,6 +1386,7 @@ var Project = class {
|
|
|
1308
1386
|
config;
|
|
1309
1387
|
collections;
|
|
1310
1388
|
credentials;
|
|
1389
|
+
sandbox;
|
|
1311
1390
|
static async from(type, data, ...rest) {
|
|
1312
1391
|
switch (type) {
|
|
1313
1392
|
case "project":
|
|
@@ -1332,10 +1411,6 @@ var Project = class {
|
|
|
1332
1411
|
static merge(source, target, options) {
|
|
1333
1412
|
return merge(source, target, options);
|
|
1334
1413
|
}
|
|
1335
|
-
// env is excluded because it's not really part of the project
|
|
1336
|
-
// uh maybe
|
|
1337
|
-
// maybe this second arg is config - like env, branch rules, serialisation rules
|
|
1338
|
-
// stuff that's external to the actual project and managed by the repo
|
|
1339
1414
|
// TODO maybe the constructor is (data, Workspace)
|
|
1340
1415
|
constructor(data = {}, meta) {
|
|
1341
1416
|
this.id = data.id ?? (data.name ? slugify(data.name) : humanId({ separator: "-", capitalize: false }));
|
|
@@ -1354,6 +1429,7 @@ var Project = class {
|
|
|
1354
1429
|
this.workflows = data.workflows?.map(maybeCreateWorkflow) ?? [];
|
|
1355
1430
|
this.collections = data.collections;
|
|
1356
1431
|
this.credentials = data.credentials;
|
|
1432
|
+
this.sandbox = data.sandbox;
|
|
1357
1433
|
}
|
|
1358
1434
|
/** Local alias for the project. Comes from the file name. Not shared with Lightning. */
|
|
1359
1435
|
get alias() {
|
|
@@ -1413,6 +1489,10 @@ var Project = class {
|
|
|
1413
1489
|
}
|
|
1414
1490
|
return result;
|
|
1415
1491
|
}
|
|
1492
|
+
// Compare this project with another and return a list of workflow changes
|
|
1493
|
+
diff(project) {
|
|
1494
|
+
return diff(this, project);
|
|
1495
|
+
}
|
|
1416
1496
|
canMergeInto(target) {
|
|
1417
1497
|
const potentialConflicts = {};
|
|
1418
1498
|
for (const sourceWorkflow of this.workflows) {
|
|
@@ -1547,6 +1627,12 @@ var Workspace = class {
|
|
|
1547
1627
|
list() {
|
|
1548
1628
|
return this.projects;
|
|
1549
1629
|
}
|
|
1630
|
+
get projectsPath() {
|
|
1631
|
+
return path4.join(this.root, this.config.dirs.projects);
|
|
1632
|
+
}
|
|
1633
|
+
get workflowsPath() {
|
|
1634
|
+
return path4.join(this.root, this.config.dirs.workflows);
|
|
1635
|
+
}
|
|
1550
1636
|
/** Get a project by its alias, id or UUID. Can also include a UUID */
|
|
1551
1637
|
get(nameyThing) {
|
|
1552
1638
|
return match_project_default(nameyThing, this.projects);
|
|
@@ -1780,6 +1866,7 @@ var src_default = Project;
|
|
|
1780
1866
|
export {
|
|
1781
1867
|
Workspace,
|
|
1782
1868
|
src_default as default,
|
|
1869
|
+
diff,
|
|
1783
1870
|
generateProject,
|
|
1784
1871
|
generateWorkflow,
|
|
1785
1872
|
jsonToYaml,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/project",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.12.1",
|
|
4
4
|
"description": "Read, serialize, replicate and sync OpenFn projects",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"exports": {
|
|
@@ -34,7 +34,7 @@
|
|
|
34
34
|
"lodash-es": "^4.17.21",
|
|
35
35
|
"ohm-js": "^17.2.1",
|
|
36
36
|
"yaml": "^2.2.2",
|
|
37
|
-
"@openfn/lexicon": "^1.
|
|
37
|
+
"@openfn/lexicon": "^1.4.0",
|
|
38
38
|
"@openfn/logger": "1.1.1"
|
|
39
39
|
},
|
|
40
40
|
"files": [
|