@openfn/project 0.1.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,5 +1,45 @@
1
1
  import * as l from '@openfn/lexicon';
2
2
 
3
+ type OpenfnMeta = {
4
+ uuid?: string;
5
+ };
6
+ type WithMeta<T> = T & {
7
+ openfn?: OpenfnMeta;
8
+ };
9
+ declare class Workflow {
10
+ #private;
11
+ workflow: l.Workflow;
12
+ index: {
13
+ steps: {};
14
+ edges: {};
15
+ uuid: {};
16
+ id: {};
17
+ };
18
+ name: string;
19
+ id: string;
20
+ openfn: OpenfnMeta;
21
+ steps: WithMeta<l.Job | l.Trigger>[];
22
+ constructor(workflow: l.Workflow);
23
+ get steps(): (l.Job | l.Trigger)[];
24
+ set(id: string, props: Parital<l.Job, l.Edge>): this;
25
+ get(id: any): WithMeta<l.Step | l.Trigger | l.Edge>;
26
+ meta(id: any): OpenfnMeta;
27
+ getEdge(from: any, to: any): WithMeta<l.ConditionalStepEdge>;
28
+ getAllEdges(): Record<string, string[]>;
29
+ getStep(id: string): Workflow["steps"][number];
30
+ getRoot(): (l.Trigger & {
31
+ openfn?: OpenfnMeta;
32
+ }) | undefined;
33
+ getUUID(id: any): string;
34
+ toJSON(): JSON.Object;
35
+ }
36
+
37
+ type MergeProjectOptions = Partial<{
38
+ workflowMappings: Record<string, string>;
39
+ removeUnmapped: boolean;
40
+ force: boolean;
41
+ }>;
42
+
3
43
  type FileFormats = 'yaml' | 'json';
4
44
  type RepoOptions = {
5
45
  /**default workflow root when serializing to fs (relative to openfn.yaml) */
@@ -15,28 +55,27 @@ declare class Project {
15
55
  name?: string;
16
56
  description?: string;
17
57
  history: string[];
18
- workflows: l.Workflow[];
58
+ workflows: Workflow[];
19
59
  options: any;
20
60
  meta: any;
21
61
  openfn?: l.ProjectConfig;
22
62
  repo?: Required<RepoOptions>;
63
+ collections: any;
23
64
  static from(type: 'state', data: any, options: Partial<l.ProjectConfig>): Project;
24
65
  static from(type: 'fs', options: {
25
66
  root: string;
26
67
  }): Project;
27
68
  static from(type: 'path', data: any): Project;
28
69
  static diff(a: Project, b: Project): void;
70
+ static merge(source: Project, target: Project, options: MergeProjectOptions): Project;
29
71
  constructor(data: l.Project, repoConfig?: RepoOptions);
30
72
  serialize(type?: 'json' | 'yaml' | 'fs' | 'state', options?: any): any;
31
73
  getVersionHash(): void;
32
- merge(project: Project, options: any): void;
33
- getWorkflow(id: string): l.Workflow | undefined;
74
+ getWorkflow(id: string): Workflow | undefined;
34
75
  getIdentifier(): string;
35
76
  compare(proj: Project): void;
36
77
  getUUID(workflow: string | Workflow, stepId: string, otherStep?: string): any;
37
78
  }
38
- declare class Workflow {
39
- }
40
79
 
41
80
  declare function yamlToJson(y: string): any;
42
81
  declare function jsonToYaml(json: string | JSONObject): string;
package/dist/index.js CHANGED
@@ -4,6 +4,129 @@ var __export = (target, all) => {
4
4
  __defProp(target, name, { get: all[name], enumerable: true });
5
5
  };
6
6
 
7
+ // src/Workflow.ts
8
+ var clone = (obj) => JSON.parse(JSON.stringify(obj));
9
+ var Workflow = class {
10
+ workflow;
11
+ // this is the raw workflow JSON representation
12
+ index;
13
+ name;
14
+ id;
15
+ openfn;
16
+ steps;
17
+ constructor(workflow) {
18
+ this.index = {
19
+ steps: {},
20
+ // steps by id
21
+ edges: {},
22
+ // edges by from-id id
23
+ uuid: {},
24
+ // id to uuid
25
+ id: {}
26
+ // uuid to ids
27
+ };
28
+ this.workflow = clone(workflow);
29
+ const { id, name, openfn, steps, ...options } = workflow;
30
+ this.id = id;
31
+ this.name = name;
32
+ this.openfn = openfn;
33
+ this.options = options;
34
+ this.#buildIndex();
35
+ }
36
+ get steps() {
37
+ return this.workflow.steps;
38
+ }
39
+ #buildIndex() {
40
+ for (const s of this.workflow.steps) {
41
+ this.index.steps[s.id] = s;
42
+ this.index.uuid[s.id] = s.openfn?.uuid;
43
+ if (s.openfn?.uuid) {
44
+ this.index.id[s.openfn.uuid] = s.id;
45
+ }
46
+ const edges = s.next ?? {};
47
+ for (const next in edges) {
48
+ const edgeId = `${s.id}-${next}`;
49
+ const edge = edges[next];
50
+ this.index.edges[edgeId] = edge;
51
+ this.index.uuid[edgeId] = edge.openfn?.uuid;
52
+ if (edge.openfn?.uuid) {
53
+ this.index.id[edge.openfn.uuid] = edgeId;
54
+ }
55
+ }
56
+ }
57
+ }
58
+ // Set properties on any step or edge by id
59
+ set(id, props) {
60
+ const item = this.index.edges[id] || this.index.steps[id];
61
+ if (!item) {
62
+ throw new Error(`step/edge with id "${id}" does not exist in workflow`);
63
+ }
64
+ Object.assign(item, props);
65
+ return this;
66
+ }
67
+ // Get properties on any step or edge by id
68
+ get(id) {
69
+ const item = this.index.edges[id] || this.index.steps[id];
70
+ if (!item) {
71
+ throw new Error(`step/edge with id "${id}" does not exist in workflow`);
72
+ }
73
+ return item;
74
+ }
75
+ // TODO needs unit tests and maybe setter
76
+ meta(id) {
77
+ const item = this.index.edges[id] || this.index.steps[id];
78
+ if (!item) {
79
+ throw new Error(`step/edge with id "${id}" does not exist in workflow`);
80
+ }
81
+ return item.openfn ?? {};
82
+ }
83
+ // Get an edge based on its source and target
84
+ getEdge(from, to) {
85
+ const edgeId = [from, to].join("-");
86
+ const edge = this.index.edges[edgeId];
87
+ if (!edge) {
88
+ throw new Error(`edge with id "${edgeId}" does not exist in workflow`);
89
+ }
90
+ return edge;
91
+ }
92
+ getAllEdges() {
93
+ const edges = {};
94
+ for (const step of this.steps) {
95
+ const next = typeof step.next === "string" ? { [step.next]: true } : step.next || {};
96
+ for (const toNode of Object.keys(next)) {
97
+ if (!Array.isArray(edges[step.id]))
98
+ edges[step.id] = [toNode];
99
+ else
100
+ edges[step.id].push(toNode);
101
+ }
102
+ }
103
+ return edges;
104
+ }
105
+ getStep(id) {
106
+ return this.index.steps[id];
107
+ }
108
+ getRoot() {
109
+ const edges = this.getAllEdges();
110
+ const all_children = [];
111
+ const all_parents = [];
112
+ for (const [parent, children] of Object.entries(edges)) {
113
+ all_children.push(...children);
114
+ all_parents.push(parent);
115
+ }
116
+ const root = all_parents.find((parent) => !all_children.includes(parent));
117
+ if (!root)
118
+ return;
119
+ return this.index.steps[root];
120
+ }
121
+ getUUID(id) {
122
+ return this.index.uuid[id];
123
+ }
124
+ toJSON() {
125
+ return this.workflow;
126
+ }
127
+ };
128
+ var Workflow_default = Workflow;
129
+
7
130
  // src/serialize/index.ts
8
131
  var serialize_exports = {};
9
132
  __export(serialize_exports, {
@@ -29,6 +152,16 @@ function to_json_default(project) {
29
152
  };
30
153
  }
31
154
 
155
+ // src/util/rename-keys.ts
156
+ function renameKeys(props, keyMap) {
157
+ return Object.fromEntries(
158
+ Object.entries(props).map(([key, value]) => [
159
+ keyMap[key] ? keyMap[key] : key,
160
+ value
161
+ ])
162
+ );
163
+ }
164
+
32
165
  // src/util/yaml.ts
33
166
  import yaml from "yaml";
34
167
  function yamlToJson(y) {
@@ -46,7 +179,7 @@ function jsonToYaml(json) {
46
179
  // src/serialize/to-app-state.ts
47
180
  import { randomUUID } from "node:crypto";
48
181
  function to_app_state_default(project, options = {}) {
49
- const { projectId: id, endpoint, env, ...rest } = project.openfn;
182
+ const { uuid: id, endpoint, env, ...rest } = project.openfn;
50
183
  const state = {
51
184
  id,
52
185
  name: project.name,
@@ -64,19 +197,22 @@ function to_app_state_default(project, options = {}) {
64
197
  return state;
65
198
  }
66
199
  var mapWorkflow = (workflow) => {
200
+ if (workflow instanceof Workflow_default) {
201
+ workflow = workflow.toJSON();
202
+ }
67
203
  const wfState = {
68
204
  name: workflow.name,
69
- ...workflow.openfn,
205
+ ...renameKeys(workflow.openfn, { uuid: "id" }),
70
206
  jobs: [],
71
207
  triggers: [],
72
208
  edges: []
73
209
  };
74
210
  const lookup = workflow.steps.reduce((obj, next) => {
75
- if (!next.openfn?.id) {
211
+ if (!next.openfn?.uuid) {
76
212
  next.openfn ??= {};
77
- next.openfn.id = randomUUID();
213
+ next.openfn.uuid = randomUUID();
78
214
  }
79
- obj[next.id] = next.openfn.id;
215
+ obj[next.id] = next.openfn.uuid;
80
216
  return obj;
81
217
  }, {});
82
218
  workflow.steps.forEach((s) => {
@@ -86,7 +222,7 @@ var mapWorkflow = (workflow) => {
86
222
  isTrigger = true;
87
223
  node = {
88
224
  type: s.type,
89
- ...s.openfn
225
+ ...renameKeys(s.openfn, { uuid: "id" })
90
226
  };
91
227
  wfState.triggers.push(node);
92
228
  } else {
@@ -94,14 +230,14 @@ var mapWorkflow = (workflow) => {
94
230
  name: s.name,
95
231
  body: s.expression,
96
232
  adaptor: s.adaptor,
97
- ...s.openfn
233
+ ...renameKeys(s.openfn, { uuid: "id" })
98
234
  };
99
235
  wfState.jobs.push(node);
100
236
  }
101
237
  Object.keys(s.next ?? {}).forEach((next) => {
102
238
  const rules = s.next[next];
103
239
  const e = {
104
- id: rules.openfn?.id ?? randomUUID(),
240
+ id: rules.openfn?.uuid ?? randomUUID(),
105
241
  target_job_id: lookup[next],
106
242
  enabled: !rules.disabled
107
243
  };
@@ -153,6 +289,8 @@ var extractWorkflow = (project, workflowId2) => {
153
289
  const wf = {
154
290
  id: workflow.id,
155
291
  name: workflow.name,
292
+ // Note: if no options are defined, options will serialize to an empty object
293
+ // Not crazy about this - maybe we should do something better? Or do we like the consistency?
156
294
  options: workflow.options,
157
295
  steps: workflow.steps.map((step) => {
158
296
  const { openfn, expression, ...mapped } = step;
@@ -233,7 +371,7 @@ var from_app_state_default = (state, config) => {
233
371
  };
234
372
  const repoConfig = {};
235
373
  proj.openfn = {
236
- projectId: id,
374
+ uuid: id,
237
375
  endpoint: config.endpoint,
238
376
  env: config.env,
239
377
  inserted_at,
@@ -257,7 +395,7 @@ var mapTriggerEdgeCondition = (edge) => {
257
395
  e.condition = edge.condition_expression;
258
396
  }
259
397
  e.openfn = {
260
- id: edge.id
398
+ uuid: edge.id
261
399
  };
262
400
  return e;
263
401
  };
@@ -267,7 +405,7 @@ var mapWorkflow2 = (workflow) => {
267
405
  id: slugify(workflow.name),
268
406
  name: workflow.name,
269
407
  steps: [],
270
- openfn: remoteProps
408
+ openfn: renameKeys(remoteProps, { id: "uuid" })
271
409
  };
272
410
  workflow.triggers.forEach((trigger) => {
273
411
  const { type, ...otherProps } = trigger;
@@ -277,7 +415,7 @@ var mapWorkflow2 = (workflow) => {
277
415
  mapped.steps.push({
278
416
  id: "trigger",
279
417
  type,
280
- openfn: otherProps,
418
+ openfn: renameKeys(otherProps, { id: "uuid" }),
281
419
  next: connectedEdges.reduce((obj, edge) => {
282
420
  const target = jobs.find((j) => j.id === edge.target_job_id);
283
421
  if (!target) {
@@ -298,7 +436,7 @@ var mapWorkflow2 = (workflow) => {
298
436
  name: name2,
299
437
  expression,
300
438
  adaptor,
301
- openfn: remoteProps2
439
+ openfn: renameKeys(remoteProps2, { id: "uuid" })
302
440
  };
303
441
  if (outboundEdges.length) {
304
442
  s.next = outboundEdges.reduce((next, edge) => {
@@ -382,7 +520,7 @@ var parseProject = async (options = {}) => {
382
520
  if (wf.id && Array.isArray(wf.steps)) {
383
521
  const wfState = (state && state.getWorkflow(wf.id)) ?? {};
384
522
  wf.openfn = {
385
- id: wfState.openfn?.id ?? null
523
+ uuid: wfState.openfn?.uuid ?? null
386
524
  // TODO do we need to transfer more stuff?
387
525
  };
388
526
  console.log("Loading workflow at ", filePath);
@@ -398,14 +536,14 @@ var parseProject = async (options = {}) => {
398
536
  }
399
537
  }
400
538
  const uuid = state?.getUUID(wf.id, step.id) ?? null;
401
- step.openfn = { id: uuid };
539
+ step.openfn = { uuid };
402
540
  for (const target in step.next || {}) {
403
541
  if (typeof step.next[target] === "boolean") {
404
542
  const bool = step.next[target];
405
543
  step.next[target] = { condition: bool };
406
544
  }
407
545
  const uuid2 = state?.getUUID(wf.id, step.id, target) ?? null;
408
- step.next[target].openfn = { id: uuid2 };
546
+ step.next[target].openfn = { uuid: uuid2 };
409
547
  }
410
548
  }
411
549
  workflows.push(wf);
@@ -427,7 +565,7 @@ var getUuidForStep = (project, workflow, stepId) => {
427
565
  }
428
566
  for (const step of wf.steps) {
429
567
  if (step.id === stepId) {
430
- return step.openfn?.id ?? null;
568
+ return step.openfn?.uuid ?? null;
431
569
  }
432
570
  }
433
571
  return null;
@@ -443,7 +581,7 @@ var getUuidForEdge = (project, workflow, from, to) => {
443
581
  if (step.id === from) {
444
582
  for (const edge in step.next) {
445
583
  if (edge === to) {
446
- return step.next[edge].openfn?.id ?? null;
584
+ return step.next[edge].openfn?.uuid ?? null;
447
585
  }
448
586
  }
449
587
  break;
@@ -452,6 +590,359 @@ var getUuidForEdge = (project, workflow, from, to) => {
452
590
  return null;
453
591
  };
454
592
 
593
+ // src/merge/merge-project.ts
594
+ import { defaultsDeep, isEmpty } from "lodash-es";
595
+
596
+ // src/util/base-merge.ts
597
+ import { pick, assign } from "lodash-es";
598
+ function baseMerge(target, source, sourceKeys, assigns = {}) {
599
+ const pickedSource = sourceKeys ? pick(source, sourceKeys) : source;
600
+ return assign(target, { ...pickedSource, ...assigns });
601
+ }
602
+
603
+ // src/merge/merge-node.ts
604
+ function mergeWorkflows(source, target, mappings) {
605
+ const targetNodes = {};
606
+ for (const tstep2 of target.steps)
607
+ targetNodes[tstep2.openfn.uuid || tstep2.id] = tstep2;
608
+ const steps = [];
609
+ for (const sstep of source.steps) {
610
+ let newNode = sstep;
611
+ if (typeof mappings.nodes[sstep.id] === "string") {
612
+ const preservedId = mappings.nodes[sstep.id];
613
+ const preservedEdgeIds = {};
614
+ for (const toNode of Object.keys(
615
+ typeof sstep.next === "string" ? { [tstep.next]: true } : sstep.next || {}
616
+ )) {
617
+ const key = sstep.id + "-" + toNode;
618
+ if (typeof mappings.edges[key] === "string") {
619
+ const preservedEdgeId = mappings.edges[key];
620
+ const toEdge = sstep.next?.[toNode] || {};
621
+ preservedEdgeIds[toNode] = sstep.next[toNode] = {
622
+ ...toEdge,
623
+ openfn: { ...toEdge?.openfn || {}, uuid: preservedEdgeId }
624
+ };
625
+ }
626
+ }
627
+ newNode = baseMerge(targetNodes[preservedId], sstep, [
628
+ "id",
629
+ "name",
630
+ "adaptor",
631
+ "expression",
632
+ "next",
633
+ "previous"
634
+ ]);
635
+ } else {
636
+ }
637
+ steps.push(newNode);
638
+ }
639
+ const newSource = { ...source, steps };
640
+ return {
641
+ ...target,
642
+ ...newSource,
643
+ openfn: { ...target.openfn }
644
+ // preserving the target uuid. we might need a proper helper function for this.
645
+ };
646
+ }
647
+
648
+ // src/merge/map-uuids.ts
649
+ var map_uuids_default = (source, target) => {
650
+ const targetEdges = target.getAllEdges();
651
+ const sourceEdges = source.getAllEdges();
652
+ const initialMapping = mapStepsById(source.steps, target.steps);
653
+ let nodeMapping = initialMapping.mapping;
654
+ let unmappedSource = initialMapping.pool.source;
655
+ let unmappedTarget = initialMapping.pool.target;
656
+ let idMap = initialMapping.idMap;
657
+ mapRootNodes(source, target, idMap, nodeMapping);
658
+ const getMappedId = (id) => idMap.get(id) || id;
659
+ let prevUnmapped = -1;
660
+ let remainingUnmapped = findRemainingUnmappedNodes(unmappedSource, idMap);
661
+ let lastIteration = false;
662
+ while (prevUnmapped !== remainingUnmapped.length || !lastIteration) {
663
+ lastIteration = prevUnmapped === remainingUnmapped.length;
664
+ for (const sourceStep of remainingUnmapped) {
665
+ const candidates = getUnmappedCandidates(unmappedTarget, idMap);
666
+ const mappingResult = findBestMatch(
667
+ sourceStep,
668
+ candidates,
669
+ sourceEdges,
670
+ targetEdges,
671
+ getMappedId,
672
+ lastIteration
673
+ // isLastIteration
674
+ );
675
+ if (mappingResult) {
676
+ nodeMapping[sourceStep.id] = getStepUuid(mappingResult);
677
+ idMap.set(sourceStep.id, mappingResult.id);
678
+ }
679
+ }
680
+ prevUnmapped = remainingUnmapped.length;
681
+ remainingUnmapped = findRemainingUnmappedNodes(unmappedSource, idMap);
682
+ }
683
+ const edgeMapping = mapEdges(
684
+ sourceEdges,
685
+ targetEdges,
686
+ idMap,
687
+ target.getUUID.bind(target)
688
+ );
689
+ return {
690
+ nodes: nodeMapping,
691
+ edges: edgeMapping
692
+ };
693
+ };
694
+ function mapRootNodes(source, target, idMap, nodeMapping) {
695
+ const sourceRoot = source.getRoot();
696
+ const targetRoot = target.getRoot();
697
+ if (sourceRoot && targetRoot) {
698
+ idMap.set(sourceRoot.id, targetRoot.id);
699
+ nodeMapping[sourceRoot.id] = getStepUuid(targetRoot);
700
+ }
701
+ }
702
+ function findRemainingUnmappedNodes(unmappedSource, idMap) {
703
+ return unmappedSource.filter((step) => step.id && !idMap.has(step.id));
704
+ }
705
+ function getUnmappedCandidates(unmappedTarget, idMap) {
706
+ const mappedIds = new Set(idMap.values());
707
+ return unmappedTarget.filter((step) => !mappedIds.has(step.id));
708
+ }
709
+ function findBestMatch(sourceStep, candidates, sourceEdges, targetEdges, getMappedId, isLastIteration) {
710
+ if (candidates.length === 0)
711
+ return null;
712
+ let bestCandidates = candidates;
713
+ let topResult = null;
714
+ let didStructuralFilter = false;
715
+ const parentResult = mapStepByParent(
716
+ sourceStep,
717
+ bestCandidates,
718
+ sourceEdges,
719
+ targetEdges,
720
+ getMappedId
721
+ );
722
+ if (parentResult.candidates.length > 0) {
723
+ bestCandidates = parentResult.candidates;
724
+ topResult = bestCandidates[0];
725
+ didStructuralFilter ||= parentResult.filtered;
726
+ }
727
+ if (bestCandidates.length === 1) {
728
+ return bestCandidates[0];
729
+ }
730
+ const childrenResult = mapStepByChildren(
731
+ sourceStep,
732
+ bestCandidates,
733
+ sourceEdges,
734
+ targetEdges,
735
+ getMappedId
736
+ );
737
+ if (childrenResult.candidates.length > 0) {
738
+ bestCandidates = childrenResult.candidates;
739
+ topResult = bestCandidates[0];
740
+ didStructuralFilter ||= childrenResult.filtered;
741
+ }
742
+ if (bestCandidates.length === 1) {
743
+ return bestCandidates[0];
744
+ }
745
+ const expressionCandidates = mapStepByExpression(sourceStep, bestCandidates);
746
+ if (expressionCandidates.length > 0) {
747
+ bestCandidates = expressionCandidates;
748
+ }
749
+ if (bestCandidates.length === 1) {
750
+ return bestCandidates[0];
751
+ }
752
+ if (isLastIteration && didStructuralFilter && topResult) {
753
+ return topResult;
754
+ }
755
+ return null;
756
+ }
757
+ function mapEdges(sourceEdges, targetEdges, idMap, getTargetUUID) {
758
+ const edgeMapping = {};
759
+ for (const [parentId, children] of Object.entries(sourceEdges)) {
760
+ for (const childId of children) {
761
+ const sourceEdgeKey = `${parentId}-${childId}`;
762
+ const mappedParentId = idMap.get(parentId) || parentId;
763
+ const mappedChildId = idMap.get(childId) || childId;
764
+ const targetEdgeId = getTargetUUID(`${mappedParentId}-${mappedChildId}`);
765
+ if (targetEdgeId) {
766
+ edgeMapping[sourceEdgeKey] = targetEdgeId;
767
+ }
768
+ }
769
+ }
770
+ return edgeMapping;
771
+ }
772
+ function getStepUuid(step) {
773
+ return step?.openfn?.uuid || step.id;
774
+ }
775
+ function mapStepsById(source, target) {
776
+ const targetIndex = {};
777
+ const mapping = {};
778
+ const idMap = /* @__PURE__ */ new Map();
779
+ for (const targetStep of target) {
780
+ targetIndex[targetStep.id] = targetStep;
781
+ }
782
+ const unmappedSourceIndices = [];
783
+ const unmappedTarget = [...target];
784
+ for (let i = 0; i < source.length; i++) {
785
+ const sourceStep = source[i];
786
+ const matchingTarget = targetIndex[sourceStep.id];
787
+ if (matchingTarget) {
788
+ mapping[sourceStep.id] = getStepUuid(matchingTarget);
789
+ idMap.set(sourceStep.id, matchingTarget.id);
790
+ const targetIndex2 = unmappedTarget.findIndex(
791
+ (t) => t.id === matchingTarget.id
792
+ );
793
+ if (targetIndex2 !== -1) {
794
+ unmappedTarget.splice(targetIndex2, 1);
795
+ }
796
+ } else {
797
+ unmappedSourceIndices.push(i);
798
+ }
799
+ }
800
+ return {
801
+ mapping,
802
+ idMap,
803
+ pool: {
804
+ source: source.filter((_, i) => unmappedSourceIndices.includes(i)),
805
+ target: unmappedTarget
806
+ }
807
+ };
808
+ }
809
+ function getParent(id, edges) {
810
+ return Object.entries(edges).filter(([, children]) => children.includes(id)).map(([parentId]) => parentId);
811
+ }
812
+ function findByExpression(expression, steps) {
813
+ return steps.filter(
814
+ (step) => step.expression && step.expression.trim() && step.expression === expression
815
+ );
816
+ }
817
+ function findByParent(parentIds, edges, steps) {
818
+ const matches = [];
819
+ for (const parentId of parentIds) {
820
+ const children = edges[parentId];
821
+ if (!children || children.length === 0)
822
+ continue;
823
+ const matchingSteps = steps.filter((step) => children.includes(step.id));
824
+ matches.push(...matchingSteps);
825
+ }
826
+ return matches;
827
+ }
828
+ function findByChildren(childIds, edges, steps) {
829
+ const childMatchCount = {};
830
+ for (const [parentId, children] of Object.entries(edges)) {
831
+ const matchCount = children.filter(
832
+ (childId) => childIds.includes(childId)
833
+ ).length;
834
+ if (matchCount > 0) {
835
+ childMatchCount[parentId] = matchCount;
836
+ }
837
+ }
838
+ const sortedParentIds = Object.entries(childMatchCount).sort(([, count1], [, count2]) => count2 - count1).map(([parentId]) => parentId);
839
+ const stepIndex = steps.reduce((index, step) => {
840
+ index[step.id] = step;
841
+ return index;
842
+ }, {});
843
+ return sortedParentIds.filter((parentId) => stepIndex[parentId]).map((parentId) => stepIndex[parentId]);
844
+ }
845
+ function mapStepByParent(sourceStep, candidates, sourceEdges, targetEdges, getMappedId) {
846
+ const sourceParents = getParent(sourceStep.id, sourceEdges);
847
+ if (sourceParents.length === 0) {
848
+ return { filtered: false, candidates };
849
+ }
850
+ const mappedParentIds = sourceParents.map(getMappedId);
851
+ const matchingCandidates = findByParent(
852
+ mappedParentIds,
853
+ targetEdges,
854
+ candidates
855
+ );
856
+ return {
857
+ filtered: true,
858
+ candidates: matchingCandidates
859
+ };
860
+ }
861
+ function mapStepByChildren(sourceStep, candidates, sourceEdges, targetEdges, getMappedId) {
862
+ const sourceChildren = sourceEdges[sourceStep.id];
863
+ if (!sourceChildren) {
864
+ return { filtered: false, candidates };
865
+ }
866
+ const mappedChildIds = sourceChildren.map(getMappedId);
867
+ const matchingCandidates = findByChildren(
868
+ mappedChildIds,
869
+ targetEdges,
870
+ candidates
871
+ );
872
+ return {
873
+ filtered: true,
874
+ candidates: matchingCandidates
875
+ };
876
+ }
877
+ function mapStepByExpression(sourceStep, candidates) {
878
+ const expression = sourceStep.expression;
879
+ return findByExpression(expression, candidates);
880
+ }
881
+
882
+ // src/util/get-duplicates.ts
883
+ function getDuplicates(arr) {
884
+ const hmap = {};
885
+ const duplicates = /* @__PURE__ */ new Set();
886
+ for (let i = 0; i < arr.length; i++) {
887
+ const item = arr[i];
888
+ if (hmap[item])
889
+ duplicates.add(item);
890
+ else
891
+ hmap[item] = true;
892
+ }
893
+ return Array.from(duplicates);
894
+ }
895
+
896
+ // src/merge/merge-project.ts
897
+ function merge(source, target, options) {
898
+ const defaultOptions = {
899
+ workflowMappings: {},
900
+ removeUnmapped: false
901
+ };
902
+ options = defaultsDeep(options, defaultOptions);
903
+ const dupTargetMappings = getDuplicates(
904
+ Object.values(options?.workflowMappings)
905
+ );
906
+ if (dupTargetMappings.length) {
907
+ throw new Error(
908
+ `The following target workflows have multiple source workflows merging into them: ${dupTargetMappings.join(
909
+ ", "
910
+ )}`
911
+ );
912
+ }
913
+ const finalWorkflows = [];
914
+ const usedTargetIds = /* @__PURE__ */ new Set();
915
+ const noMappings = isEmpty(options?.workflowMappings);
916
+ let sourceWorkflows = source.workflows.filter((w) => {
917
+ if (noMappings)
918
+ return true;
919
+ return !!options?.workflowMappings[w.id];
920
+ });
921
+ for (const sourceWorkflow of sourceWorkflows) {
922
+ const targetId = options.workflowMappings?.[sourceWorkflow.id] ?? sourceWorkflow.id;
923
+ const targetWorkflow = target.getWorkflow(targetId);
924
+ if (targetWorkflow) {
925
+ usedTargetIds.add(targetWorkflow.id);
926
+ const mappings = map_uuids_default(sourceWorkflow, targetWorkflow);
927
+ finalWorkflows.push(
928
+ mergeWorkflows(sourceWorkflow, targetWorkflow, mappings)
929
+ );
930
+ } else {
931
+ finalWorkflows.push(sourceWorkflow);
932
+ }
933
+ }
934
+ if (!options?.removeUnmapped) {
935
+ for (const targetWorkflow of target.workflows) {
936
+ if (!usedTargetIds.has(targetWorkflow.id)) {
937
+ finalWorkflows.push(targetWorkflow);
938
+ }
939
+ }
940
+ }
941
+ return new Project(
942
+ baseMerge(target, source, ["collections"], { workflows: finalWorkflows })
943
+ );
944
+ }
945
+
455
946
  // src/Project.ts
456
947
  var setConfigDefaults = (config = {}) => ({
457
948
  workflowRoot: config.workflowRoot ?? "workflows",
@@ -484,6 +975,12 @@ var Project = class {
484
975
  // these should be shared across projects
485
976
  // and saved to an openfn.yaml file
486
977
  repo;
978
+ // load a project from a state file (project.json)
979
+ // or from a path (the file system)
980
+ // TODO presumably we can detect a state file? Not a big deal?
981
+ // collections for the project
982
+ // TODO to be well typed
983
+ collections;
487
984
  static from(type, data, options) {
488
985
  if (type === "state") {
489
986
  return from_app_state_default(data, options);
@@ -496,18 +993,23 @@ var Project = class {
496
993
  // Diff two projects
497
994
  static diff(a, b) {
498
995
  }
996
+ // Merge a source project (staging) into the target project (main)
997
+ // Returns a new Project
998
+ // TODO: throw if histories have diverged
999
+ static merge(source, target, options) {
1000
+ return merge(source, target, options);
1001
+ }
499
1002
  // env is excluded because it's not really part of the project
500
1003
  // uh maybe
501
1004
  // maybe this second arg is config - like env, branch rules, serialisation rules
502
1005
  // stuff that's external to the actual project and managed by the repo
503
1006
  constructor(data, repoConfig = {}) {
504
1007
  this.repo = setConfigDefaults(repoConfig);
505
- this.id = data.id;
506
1008
  this.name = data.name;
507
1009
  this.description = data.description;
508
1010
  this.openfn = data.openfn;
509
1011
  this.options = data.options;
510
- this.workflows = data.workflows;
1012
+ this.workflows = data.workflows?.map((w) => new Workflow_default(w)) ?? [];
511
1013
  this.collections = data.collections;
512
1014
  this.credentials = data.credentials;
513
1015
  this.meta = data.meta;
@@ -523,11 +1025,6 @@ var Project = class {
523
1025
  // this builds a version string for the current state
524
1026
  getVersionHash() {
525
1027
  }
526
- // take a second project and merge its data into this one
527
- // Throws if there's a conflict, unless force is true
528
- // It's basically an overwrite
529
- merge(project, options) {
530
- }
531
1028
  // what else might we need?
532
1029
  // get workflow by name or id
533
1030
  // this is fuzzy, but is that wrong?
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openfn/project",
3
- "version": "0.1.1",
3
+ "version": "0.2.0",
4
4
  "description": "Read, serialize, replicate and sync OpenFn projects",
5
5
  "type": "module",
6
6
  "exports": {
@@ -17,6 +17,7 @@
17
17
  "author": "Open Function Group <admin@openfn.org>",
18
18
  "license": "ISC",
19
19
  "devDependencies": {
20
+ "@types/lodash": "^4.17.20",
20
21
  "@types/mock-fs": "~4.13.4",
21
22
  "ava": "5.3.1",
22
23
  "mock-fs": "^5.5.0",
@@ -26,8 +27,9 @@
26
27
  "typescript": "^5.9.2"
27
28
  },
28
29
  "dependencies": {
29
- "glob": "^11.0.3",
30
- "yaml": "^2.8.1",
30
+ "glob": "^11.0.2",
31
+ "lodash-es": "^4.17.21",
32
+ "yaml": "^2.2.2",
31
33
  "@openfn/lexicon": "^1.2.3",
32
34
  "@openfn/logger": "1.0.6"
33
35
  },