@openfn/project 0.1.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +78 -8
- package/dist/index.js +599 -39
- package/package.json +5 -3
package/dist/index.d.ts
CHANGED
|
@@ -1,6 +1,66 @@
|
|
|
1
1
|
import * as l from '@openfn/lexicon';
|
|
2
2
|
|
|
3
|
+
type OpenfnMeta = {
|
|
4
|
+
uuid?: string;
|
|
5
|
+
};
|
|
6
|
+
type WithMeta<T> = T & {
|
|
7
|
+
openfn?: OpenfnMeta;
|
|
8
|
+
};
|
|
9
|
+
declare class Workflow {
|
|
10
|
+
#private;
|
|
11
|
+
workflow: l.Workflow;
|
|
12
|
+
index: {
|
|
13
|
+
steps: {};
|
|
14
|
+
edges: {};
|
|
15
|
+
uuid: {};
|
|
16
|
+
id: {};
|
|
17
|
+
};
|
|
18
|
+
name: string;
|
|
19
|
+
id: string;
|
|
20
|
+
openfn: OpenfnMeta;
|
|
21
|
+
steps: WithMeta<l.Job | l.Trigger>[];
|
|
22
|
+
constructor(workflow: l.Workflow);
|
|
23
|
+
get steps(): (l.Job | l.Trigger)[];
|
|
24
|
+
set(id: string, props: Parital<l.Job, l.Edge>): this;
|
|
25
|
+
get(id: any): WithMeta<l.Step | l.Trigger | l.Edge>;
|
|
26
|
+
meta(id: any): OpenfnMeta;
|
|
27
|
+
getEdge(from: any, to: any): WithMeta<l.ConditionalStepEdge>;
|
|
28
|
+
getAllEdges(): Record<string, string[]>;
|
|
29
|
+
getStep(id: string): Workflow["steps"][number];
|
|
30
|
+
getRoot(): (l.Trigger & {
|
|
31
|
+
openfn?: OpenfnMeta;
|
|
32
|
+
}) | undefined;
|
|
33
|
+
getUUID(id: any): string;
|
|
34
|
+
toJSON(): JSON.Object;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
type FromFsConfig = {
|
|
38
|
+
root: string;
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
type MergeProjectOptions = Partial<{
|
|
42
|
+
workflowMappings: Record<string, string>;
|
|
43
|
+
removeUnmapped: boolean;
|
|
44
|
+
force: boolean;
|
|
45
|
+
}>;
|
|
46
|
+
|
|
3
47
|
type FileFormats = 'yaml' | 'json';
|
|
48
|
+
interface OpenfnConfig {
|
|
49
|
+
name: string;
|
|
50
|
+
workflowRoot: string;
|
|
51
|
+
formats: {
|
|
52
|
+
openfn: FileFormats;
|
|
53
|
+
project: FileFormats;
|
|
54
|
+
workflow: FileFormats;
|
|
55
|
+
};
|
|
56
|
+
project: {
|
|
57
|
+
projectId: string;
|
|
58
|
+
endpoint: string;
|
|
59
|
+
env: string;
|
|
60
|
+
inserted_at: string;
|
|
61
|
+
updated_at: string;
|
|
62
|
+
};
|
|
63
|
+
}
|
|
4
64
|
type RepoOptions = {
|
|
5
65
|
/**default workflow root when serializing to fs (relative to openfn.yaml) */
|
|
6
66
|
workflowRoot?: string;
|
|
@@ -15,30 +75,40 @@ declare class Project {
|
|
|
15
75
|
name?: string;
|
|
16
76
|
description?: string;
|
|
17
77
|
history: string[];
|
|
18
|
-
workflows:
|
|
78
|
+
workflows: Workflow[];
|
|
19
79
|
options: any;
|
|
20
80
|
meta: any;
|
|
21
81
|
openfn?: l.ProjectConfig;
|
|
22
82
|
repo?: Required<RepoOptions>;
|
|
83
|
+
collections: any;
|
|
23
84
|
static from(type: 'state', data: any, options: Partial<l.ProjectConfig>): Project;
|
|
24
|
-
static from(type: 'fs', options:
|
|
25
|
-
root: string;
|
|
26
|
-
}): Project;
|
|
85
|
+
static from(type: 'fs', options: FromFsConfig): Project;
|
|
27
86
|
static from(type: 'path', data: any): Project;
|
|
28
87
|
static diff(a: Project, b: Project): void;
|
|
88
|
+
static merge(source: Project, target: Project, options: MergeProjectOptions): Project;
|
|
29
89
|
constructor(data: l.Project, repoConfig?: RepoOptions);
|
|
30
90
|
serialize(type?: 'json' | 'yaml' | 'fs' | 'state', options?: any): any;
|
|
31
91
|
getVersionHash(): void;
|
|
32
|
-
|
|
33
|
-
getWorkflow(id: string): l.Workflow | undefined;
|
|
92
|
+
getWorkflow(id: string): Workflow | undefined;
|
|
34
93
|
getIdentifier(): string;
|
|
35
94
|
compare(proj: Project): void;
|
|
36
95
|
getUUID(workflow: string | Workflow, stepId: string, otherStep?: string): any;
|
|
37
96
|
}
|
|
38
|
-
|
|
97
|
+
|
|
98
|
+
declare class Workspace {
|
|
99
|
+
private config?;
|
|
100
|
+
private projects;
|
|
101
|
+
private isValid;
|
|
102
|
+
constructor(workspacePath: string);
|
|
103
|
+
list(): Project[];
|
|
104
|
+
get(id: string): Project | undefined;
|
|
105
|
+
getActiveProject(): Project | undefined;
|
|
106
|
+
getConfig(): OpenfnConfig | undefined;
|
|
107
|
+
get activeProjectId(): string | undefined;
|
|
108
|
+
get valid(): boolean;
|
|
39
109
|
}
|
|
40
110
|
|
|
41
111
|
declare function yamlToJson(y: string): any;
|
|
42
112
|
declare function jsonToYaml(json: string | JSONObject): string;
|
|
43
113
|
|
|
44
|
-
export { Project as default, jsonToYaml, yamlToJson };
|
|
114
|
+
export { Workspace, Project as default, jsonToYaml, yamlToJson };
|
package/dist/index.js
CHANGED
|
@@ -4,6 +4,129 @@ var __export = (target, all) => {
|
|
|
4
4
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
5
5
|
};
|
|
6
6
|
|
|
7
|
+
// src/Workflow.ts
|
|
8
|
+
var clone = (obj) => JSON.parse(JSON.stringify(obj));
|
|
9
|
+
var Workflow = class {
|
|
10
|
+
workflow;
|
|
11
|
+
// this is the raw workflow JSON representation
|
|
12
|
+
index;
|
|
13
|
+
name;
|
|
14
|
+
id;
|
|
15
|
+
openfn;
|
|
16
|
+
steps;
|
|
17
|
+
constructor(workflow) {
|
|
18
|
+
this.index = {
|
|
19
|
+
steps: {},
|
|
20
|
+
// steps by id
|
|
21
|
+
edges: {},
|
|
22
|
+
// edges by from-id id
|
|
23
|
+
uuid: {},
|
|
24
|
+
// id to uuid
|
|
25
|
+
id: {}
|
|
26
|
+
// uuid to ids
|
|
27
|
+
};
|
|
28
|
+
this.workflow = clone(workflow);
|
|
29
|
+
const { id, name, openfn, steps, ...options } = workflow;
|
|
30
|
+
this.id = id;
|
|
31
|
+
this.name = name;
|
|
32
|
+
this.openfn = openfn;
|
|
33
|
+
this.options = options;
|
|
34
|
+
this.#buildIndex();
|
|
35
|
+
}
|
|
36
|
+
get steps() {
|
|
37
|
+
return this.workflow.steps;
|
|
38
|
+
}
|
|
39
|
+
#buildIndex() {
|
|
40
|
+
for (const s of this.workflow.steps) {
|
|
41
|
+
this.index.steps[s.id] = s;
|
|
42
|
+
this.index.uuid[s.id] = s.openfn?.uuid;
|
|
43
|
+
if (s.openfn?.uuid) {
|
|
44
|
+
this.index.id[s.openfn.uuid] = s.id;
|
|
45
|
+
}
|
|
46
|
+
const edges = s.next ?? {};
|
|
47
|
+
for (const next in edges) {
|
|
48
|
+
const edgeId = `${s.id}-${next}`;
|
|
49
|
+
const edge = edges[next];
|
|
50
|
+
this.index.edges[edgeId] = edge;
|
|
51
|
+
this.index.uuid[edgeId] = edge.openfn?.uuid;
|
|
52
|
+
if (edge.openfn?.uuid) {
|
|
53
|
+
this.index.id[edge.openfn.uuid] = edgeId;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
// Set properties on any step or edge by id
|
|
59
|
+
set(id, props) {
|
|
60
|
+
const item = this.index.edges[id] || this.index.steps[id];
|
|
61
|
+
if (!item) {
|
|
62
|
+
throw new Error(`step/edge with id "${id}" does not exist in workflow`);
|
|
63
|
+
}
|
|
64
|
+
Object.assign(item, props);
|
|
65
|
+
return this;
|
|
66
|
+
}
|
|
67
|
+
// Get properties on any step or edge by id
|
|
68
|
+
get(id) {
|
|
69
|
+
const item = this.index.edges[id] || this.index.steps[id];
|
|
70
|
+
if (!item) {
|
|
71
|
+
throw new Error(`step/edge with id "${id}" does not exist in workflow`);
|
|
72
|
+
}
|
|
73
|
+
return item;
|
|
74
|
+
}
|
|
75
|
+
// TODO needs unit tests and maybe setter
|
|
76
|
+
meta(id) {
|
|
77
|
+
const item = this.index.edges[id] || this.index.steps[id];
|
|
78
|
+
if (!item) {
|
|
79
|
+
throw new Error(`step/edge with id "${id}" does not exist in workflow`);
|
|
80
|
+
}
|
|
81
|
+
return item.openfn ?? {};
|
|
82
|
+
}
|
|
83
|
+
// Get an edge based on its source and target
|
|
84
|
+
getEdge(from, to) {
|
|
85
|
+
const edgeId = [from, to].join("-");
|
|
86
|
+
const edge = this.index.edges[edgeId];
|
|
87
|
+
if (!edge) {
|
|
88
|
+
throw new Error(`edge with id "${edgeId}" does not exist in workflow`);
|
|
89
|
+
}
|
|
90
|
+
return edge;
|
|
91
|
+
}
|
|
92
|
+
getAllEdges() {
|
|
93
|
+
const edges = {};
|
|
94
|
+
for (const step of this.steps) {
|
|
95
|
+
const next = typeof step.next === "string" ? { [step.next]: true } : step.next || {};
|
|
96
|
+
for (const toNode of Object.keys(next)) {
|
|
97
|
+
if (!Array.isArray(edges[step.id]))
|
|
98
|
+
edges[step.id] = [toNode];
|
|
99
|
+
else
|
|
100
|
+
edges[step.id].push(toNode);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return edges;
|
|
104
|
+
}
|
|
105
|
+
getStep(id) {
|
|
106
|
+
return this.index.steps[id];
|
|
107
|
+
}
|
|
108
|
+
getRoot() {
|
|
109
|
+
const edges = this.getAllEdges();
|
|
110
|
+
const all_children = [];
|
|
111
|
+
const all_parents = [];
|
|
112
|
+
for (const [parent, children] of Object.entries(edges)) {
|
|
113
|
+
all_children.push(...children);
|
|
114
|
+
all_parents.push(parent);
|
|
115
|
+
}
|
|
116
|
+
const root = all_parents.find((parent) => !all_children.includes(parent));
|
|
117
|
+
if (!root)
|
|
118
|
+
return;
|
|
119
|
+
return this.index.steps[root];
|
|
120
|
+
}
|
|
121
|
+
getUUID(id) {
|
|
122
|
+
return this.index.uuid[id];
|
|
123
|
+
}
|
|
124
|
+
toJSON() {
|
|
125
|
+
return this.workflow;
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
var Workflow_default = Workflow;
|
|
129
|
+
|
|
7
130
|
// src/serialize/index.ts
|
|
8
131
|
var serialize_exports = {};
|
|
9
132
|
__export(serialize_exports, {
|
|
@@ -29,6 +152,16 @@ function to_json_default(project) {
|
|
|
29
152
|
};
|
|
30
153
|
}
|
|
31
154
|
|
|
155
|
+
// src/util/rename-keys.ts
|
|
156
|
+
function renameKeys(props, keyMap) {
|
|
157
|
+
return Object.fromEntries(
|
|
158
|
+
Object.entries(props).map(([key, value]) => [
|
|
159
|
+
keyMap[key] ? keyMap[key] : key,
|
|
160
|
+
value
|
|
161
|
+
])
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
|
|
32
165
|
// src/util/yaml.ts
|
|
33
166
|
import yaml from "yaml";
|
|
34
167
|
function yamlToJson(y) {
|
|
@@ -46,7 +179,7 @@ function jsonToYaml(json) {
|
|
|
46
179
|
// src/serialize/to-app-state.ts
|
|
47
180
|
import { randomUUID } from "node:crypto";
|
|
48
181
|
function to_app_state_default(project, options = {}) {
|
|
49
|
-
const {
|
|
182
|
+
const { uuid: id, endpoint, env, ...rest } = project.openfn;
|
|
50
183
|
const state = {
|
|
51
184
|
id,
|
|
52
185
|
name: project.name,
|
|
@@ -64,19 +197,22 @@ function to_app_state_default(project, options = {}) {
|
|
|
64
197
|
return state;
|
|
65
198
|
}
|
|
66
199
|
var mapWorkflow = (workflow) => {
|
|
200
|
+
if (workflow instanceof Workflow_default) {
|
|
201
|
+
workflow = workflow.toJSON();
|
|
202
|
+
}
|
|
67
203
|
const wfState = {
|
|
68
204
|
name: workflow.name,
|
|
69
|
-
...workflow.openfn,
|
|
205
|
+
...renameKeys(workflow.openfn, { uuid: "id" }),
|
|
70
206
|
jobs: [],
|
|
71
207
|
triggers: [],
|
|
72
208
|
edges: []
|
|
73
209
|
};
|
|
74
210
|
const lookup = workflow.steps.reduce((obj, next) => {
|
|
75
|
-
if (!next.openfn?.
|
|
211
|
+
if (!next.openfn?.uuid) {
|
|
76
212
|
next.openfn ??= {};
|
|
77
|
-
next.openfn.
|
|
213
|
+
next.openfn.uuid = randomUUID();
|
|
78
214
|
}
|
|
79
|
-
obj[next.id] = next.openfn.
|
|
215
|
+
obj[next.id] = next.openfn.uuid;
|
|
80
216
|
return obj;
|
|
81
217
|
}, {});
|
|
82
218
|
workflow.steps.forEach((s) => {
|
|
@@ -86,7 +222,7 @@ var mapWorkflow = (workflow) => {
|
|
|
86
222
|
isTrigger = true;
|
|
87
223
|
node = {
|
|
88
224
|
type: s.type,
|
|
89
|
-
...s.openfn
|
|
225
|
+
...renameKeys(s.openfn, { uuid: "id" })
|
|
90
226
|
};
|
|
91
227
|
wfState.triggers.push(node);
|
|
92
228
|
} else {
|
|
@@ -94,14 +230,14 @@ var mapWorkflow = (workflow) => {
|
|
|
94
230
|
name: s.name,
|
|
95
231
|
body: s.expression,
|
|
96
232
|
adaptor: s.adaptor,
|
|
97
|
-
...s.openfn
|
|
233
|
+
...renameKeys(s.openfn, { uuid: "id" })
|
|
98
234
|
};
|
|
99
235
|
wfState.jobs.push(node);
|
|
100
236
|
}
|
|
101
237
|
Object.keys(s.next ?? {}).forEach((next) => {
|
|
102
238
|
const rules = s.next[next];
|
|
103
239
|
const e = {
|
|
104
|
-
id: rules.openfn?.
|
|
240
|
+
id: rules.openfn?.uuid ?? randomUUID(),
|
|
105
241
|
target_job_id: lookup[next],
|
|
106
242
|
enabled: !rules.disabled
|
|
107
243
|
};
|
|
@@ -127,16 +263,16 @@ import nodepath from "path";
|
|
|
127
263
|
var stringify = (json) => JSON.stringify(json, null, 2);
|
|
128
264
|
function to_fs_default(project) {
|
|
129
265
|
const files = {};
|
|
130
|
-
const { path:
|
|
131
|
-
files[
|
|
266
|
+
const { path: path3, content } = extractRepoConfig(project);
|
|
267
|
+
files[path3] = content;
|
|
132
268
|
for (const wf of project.workflows) {
|
|
133
|
-
const { path:
|
|
134
|
-
files[
|
|
269
|
+
const { path: path4, content: content2 } = extractWorkflow(project, wf.id);
|
|
270
|
+
files[path4] = content2;
|
|
135
271
|
for (const s of wf.steps) {
|
|
136
272
|
const result = extractStep(project, wf.id, s.id);
|
|
137
273
|
if (result) {
|
|
138
|
-
const { path:
|
|
139
|
-
files[
|
|
274
|
+
const { path: path5, content: content3 } = result;
|
|
275
|
+
files[path5] = content3;
|
|
140
276
|
}
|
|
141
277
|
}
|
|
142
278
|
}
|
|
@@ -149,10 +285,12 @@ var extractWorkflow = (project, workflowId2) => {
|
|
|
149
285
|
throw new Error(`workflow not found: ${workflowId2}`);
|
|
150
286
|
}
|
|
151
287
|
const root = project.repo?.workflowRoot ?? "workflows/";
|
|
152
|
-
const
|
|
288
|
+
const path3 = nodepath.join(root, workflow.id, workflow.id);
|
|
153
289
|
const wf = {
|
|
154
290
|
id: workflow.id,
|
|
155
291
|
name: workflow.name,
|
|
292
|
+
// Note: if no options are defined, options will serialize to an empty object
|
|
293
|
+
// Not crazy about this - maybe we should do something better? Or do we like the consistency?
|
|
156
294
|
options: workflow.options,
|
|
157
295
|
steps: workflow.steps.map((step) => {
|
|
158
296
|
const { openfn, expression, ...mapped } = step;
|
|
@@ -162,7 +300,7 @@ var extractWorkflow = (project, workflowId2) => {
|
|
|
162
300
|
return mapped;
|
|
163
301
|
})
|
|
164
302
|
};
|
|
165
|
-
return handleOutput(wf,
|
|
303
|
+
return handleOutput(wf, path3, format);
|
|
166
304
|
};
|
|
167
305
|
var extractStep = (project, workflowId2, stepId) => {
|
|
168
306
|
const workflow = project.getWorkflow(workflowId2);
|
|
@@ -175,9 +313,9 @@ var extractStep = (project, workflowId2, stepId) => {
|
|
|
175
313
|
}
|
|
176
314
|
if (step.expression) {
|
|
177
315
|
const root = project.config?.workflowRoot ?? "workflows/";
|
|
178
|
-
const
|
|
316
|
+
const path3 = nodepath.join(root, `${workflow.id}/${step.id}.js`);
|
|
179
317
|
const content = step.expression;
|
|
180
|
-
return { path:
|
|
318
|
+
return { path: path3, content };
|
|
181
319
|
}
|
|
182
320
|
};
|
|
183
321
|
var extractRepoConfig = (project) => {
|
|
@@ -190,7 +328,7 @@ var extractRepoConfig = (project) => {
|
|
|
190
328
|
return handleOutput(config, "openfn", format);
|
|
191
329
|
};
|
|
192
330
|
var handleOutput = (data, filePath, format) => {
|
|
193
|
-
const
|
|
331
|
+
const path3 = `${filePath}.${format}`;
|
|
194
332
|
let content;
|
|
195
333
|
if (format === "json") {
|
|
196
334
|
content = stringify(data, null, 2);
|
|
@@ -199,7 +337,7 @@ var handleOutput = (data, filePath, format) => {
|
|
|
199
337
|
} else {
|
|
200
338
|
throw new Error(`Unrecognised format: ${format}`);
|
|
201
339
|
}
|
|
202
|
-
return { path:
|
|
340
|
+
return { path: path3, content };
|
|
203
341
|
};
|
|
204
342
|
|
|
205
343
|
// src/parse/from-app-state.ts
|
|
@@ -233,7 +371,7 @@ var from_app_state_default = (state, config) => {
|
|
|
233
371
|
};
|
|
234
372
|
const repoConfig = {};
|
|
235
373
|
proj.openfn = {
|
|
236
|
-
|
|
374
|
+
uuid: id,
|
|
237
375
|
endpoint: config.endpoint,
|
|
238
376
|
env: config.env,
|
|
239
377
|
inserted_at,
|
|
@@ -257,7 +395,7 @@ var mapTriggerEdgeCondition = (edge) => {
|
|
|
257
395
|
e.condition = edge.condition_expression;
|
|
258
396
|
}
|
|
259
397
|
e.openfn = {
|
|
260
|
-
|
|
398
|
+
uuid: edge.id
|
|
261
399
|
};
|
|
262
400
|
return e;
|
|
263
401
|
};
|
|
@@ -267,7 +405,7 @@ var mapWorkflow2 = (workflow) => {
|
|
|
267
405
|
id: slugify(workflow.name),
|
|
268
406
|
name: workflow.name,
|
|
269
407
|
steps: [],
|
|
270
|
-
openfn: remoteProps
|
|
408
|
+
openfn: renameKeys(remoteProps, { id: "uuid" })
|
|
271
409
|
};
|
|
272
410
|
workflow.triggers.forEach((trigger) => {
|
|
273
411
|
const { type, ...otherProps } = trigger;
|
|
@@ -277,7 +415,7 @@ var mapWorkflow2 = (workflow) => {
|
|
|
277
415
|
mapped.steps.push({
|
|
278
416
|
id: "trigger",
|
|
279
417
|
type,
|
|
280
|
-
openfn: otherProps,
|
|
418
|
+
openfn: renameKeys(otherProps, { id: "uuid" }),
|
|
281
419
|
next: connectedEdges.reduce((obj, edge) => {
|
|
282
420
|
const target = jobs.find((j) => j.id === edge.target_job_id);
|
|
283
421
|
if (!target) {
|
|
@@ -298,7 +436,7 @@ var mapWorkflow2 = (workflow) => {
|
|
|
298
436
|
name: name2,
|
|
299
437
|
expression,
|
|
300
438
|
adaptor,
|
|
301
|
-
openfn: remoteProps2
|
|
439
|
+
openfn: renameKeys(remoteProps2, { id: "uuid" })
|
|
302
440
|
};
|
|
303
441
|
if (outboundEdges.length) {
|
|
304
442
|
s.next = outboundEdges.reduce((next, edge) => {
|
|
@@ -382,7 +520,7 @@ var parseProject = async (options = {}) => {
|
|
|
382
520
|
if (wf.id && Array.isArray(wf.steps)) {
|
|
383
521
|
const wfState = (state && state.getWorkflow(wf.id)) ?? {};
|
|
384
522
|
wf.openfn = {
|
|
385
|
-
|
|
523
|
+
uuid: wfState.openfn?.uuid ?? null
|
|
386
524
|
// TODO do we need to transfer more stuff?
|
|
387
525
|
};
|
|
388
526
|
console.log("Loading workflow at ", filePath);
|
|
@@ -398,14 +536,14 @@ var parseProject = async (options = {}) => {
|
|
|
398
536
|
}
|
|
399
537
|
}
|
|
400
538
|
const uuid = state?.getUUID(wf.id, step.id) ?? null;
|
|
401
|
-
step.openfn = {
|
|
539
|
+
step.openfn = { uuid };
|
|
402
540
|
for (const target in step.next || {}) {
|
|
403
541
|
if (typeof step.next[target] === "boolean") {
|
|
404
542
|
const bool = step.next[target];
|
|
405
543
|
step.next[target] = { condition: bool };
|
|
406
544
|
}
|
|
407
545
|
const uuid2 = state?.getUUID(wf.id, step.id, target) ?? null;
|
|
408
|
-
step.next[target].openfn = {
|
|
546
|
+
step.next[target].openfn = { uuid: uuid2 };
|
|
409
547
|
}
|
|
410
548
|
}
|
|
411
549
|
workflows.push(wf);
|
|
@@ -427,7 +565,7 @@ var getUuidForStep = (project, workflow, stepId) => {
|
|
|
427
565
|
}
|
|
428
566
|
for (const step of wf.steps) {
|
|
429
567
|
if (step.id === stepId) {
|
|
430
|
-
return step.openfn?.
|
|
568
|
+
return step.openfn?.uuid ?? null;
|
|
431
569
|
}
|
|
432
570
|
}
|
|
433
571
|
return null;
|
|
@@ -443,7 +581,7 @@ var getUuidForEdge = (project, workflow, from, to) => {
|
|
|
443
581
|
if (step.id === from) {
|
|
444
582
|
for (const edge in step.next) {
|
|
445
583
|
if (edge === to) {
|
|
446
|
-
return step.next[edge].openfn?.
|
|
584
|
+
return step.next[edge].openfn?.uuid ?? null;
|
|
447
585
|
}
|
|
448
586
|
}
|
|
449
587
|
break;
|
|
@@ -452,6 +590,359 @@ var getUuidForEdge = (project, workflow, from, to) => {
|
|
|
452
590
|
return null;
|
|
453
591
|
};
|
|
454
592
|
|
|
593
|
+
// src/merge/merge-project.ts
|
|
594
|
+
import { defaultsDeep, isEmpty } from "lodash-es";
|
|
595
|
+
|
|
596
|
+
// src/util/base-merge.ts
|
|
597
|
+
import { pick, assign } from "lodash-es";
|
|
598
|
+
function baseMerge(target, source, sourceKeys, assigns = {}) {
|
|
599
|
+
const pickedSource = sourceKeys ? pick(source, sourceKeys) : source;
|
|
600
|
+
return assign(target, { ...pickedSource, ...assigns });
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
// src/merge/merge-node.ts
|
|
604
|
+
function mergeWorkflows(source, target, mappings) {
|
|
605
|
+
const targetNodes = {};
|
|
606
|
+
for (const tstep2 of target.steps)
|
|
607
|
+
targetNodes[tstep2.openfn.uuid || tstep2.id] = tstep2;
|
|
608
|
+
const steps = [];
|
|
609
|
+
for (const sstep of source.steps) {
|
|
610
|
+
let newNode = sstep;
|
|
611
|
+
if (typeof mappings.nodes[sstep.id] === "string") {
|
|
612
|
+
const preservedId = mappings.nodes[sstep.id];
|
|
613
|
+
const preservedEdgeIds = {};
|
|
614
|
+
for (const toNode of Object.keys(
|
|
615
|
+
typeof sstep.next === "string" ? { [tstep.next]: true } : sstep.next || {}
|
|
616
|
+
)) {
|
|
617
|
+
const key = sstep.id + "-" + toNode;
|
|
618
|
+
if (typeof mappings.edges[key] === "string") {
|
|
619
|
+
const preservedEdgeId = mappings.edges[key];
|
|
620
|
+
const toEdge = sstep.next?.[toNode] || {};
|
|
621
|
+
preservedEdgeIds[toNode] = sstep.next[toNode] = {
|
|
622
|
+
...toEdge,
|
|
623
|
+
openfn: { ...toEdge?.openfn || {}, uuid: preservedEdgeId }
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
newNode = baseMerge(targetNodes[preservedId], sstep, [
|
|
628
|
+
"id",
|
|
629
|
+
"name",
|
|
630
|
+
"adaptor",
|
|
631
|
+
"expression",
|
|
632
|
+
"next",
|
|
633
|
+
"previous"
|
|
634
|
+
]);
|
|
635
|
+
} else {
|
|
636
|
+
}
|
|
637
|
+
steps.push(newNode);
|
|
638
|
+
}
|
|
639
|
+
const newSource = { ...source, steps };
|
|
640
|
+
return {
|
|
641
|
+
...target,
|
|
642
|
+
...newSource,
|
|
643
|
+
openfn: { ...target.openfn }
|
|
644
|
+
// preserving the target uuid. we might need a proper helper function for this.
|
|
645
|
+
};
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
// src/merge/map-uuids.ts
|
|
649
|
+
var map_uuids_default = (source, target) => {
|
|
650
|
+
const targetEdges = target.getAllEdges();
|
|
651
|
+
const sourceEdges = source.getAllEdges();
|
|
652
|
+
const initialMapping = mapStepsById(source.steps, target.steps);
|
|
653
|
+
let nodeMapping = initialMapping.mapping;
|
|
654
|
+
let unmappedSource = initialMapping.pool.source;
|
|
655
|
+
let unmappedTarget = initialMapping.pool.target;
|
|
656
|
+
let idMap = initialMapping.idMap;
|
|
657
|
+
mapRootNodes(source, target, idMap, nodeMapping);
|
|
658
|
+
const getMappedId = (id) => idMap.get(id) || id;
|
|
659
|
+
let prevUnmapped = -1;
|
|
660
|
+
let remainingUnmapped = findRemainingUnmappedNodes(unmappedSource, idMap);
|
|
661
|
+
let lastIteration = false;
|
|
662
|
+
while (prevUnmapped !== remainingUnmapped.length || !lastIteration) {
|
|
663
|
+
lastIteration = prevUnmapped === remainingUnmapped.length;
|
|
664
|
+
for (const sourceStep of remainingUnmapped) {
|
|
665
|
+
const candidates = getUnmappedCandidates(unmappedTarget, idMap);
|
|
666
|
+
const mappingResult = findBestMatch(
|
|
667
|
+
sourceStep,
|
|
668
|
+
candidates,
|
|
669
|
+
sourceEdges,
|
|
670
|
+
targetEdges,
|
|
671
|
+
getMappedId,
|
|
672
|
+
lastIteration
|
|
673
|
+
// isLastIteration
|
|
674
|
+
);
|
|
675
|
+
if (mappingResult) {
|
|
676
|
+
nodeMapping[sourceStep.id] = getStepUuid(mappingResult);
|
|
677
|
+
idMap.set(sourceStep.id, mappingResult.id);
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
prevUnmapped = remainingUnmapped.length;
|
|
681
|
+
remainingUnmapped = findRemainingUnmappedNodes(unmappedSource, idMap);
|
|
682
|
+
}
|
|
683
|
+
const edgeMapping = mapEdges(
|
|
684
|
+
sourceEdges,
|
|
685
|
+
targetEdges,
|
|
686
|
+
idMap,
|
|
687
|
+
target.getUUID.bind(target)
|
|
688
|
+
);
|
|
689
|
+
return {
|
|
690
|
+
nodes: nodeMapping,
|
|
691
|
+
edges: edgeMapping
|
|
692
|
+
};
|
|
693
|
+
};
|
|
694
|
+
function mapRootNodes(source, target, idMap, nodeMapping) {
|
|
695
|
+
const sourceRoot = source.getRoot();
|
|
696
|
+
const targetRoot = target.getRoot();
|
|
697
|
+
if (sourceRoot && targetRoot) {
|
|
698
|
+
idMap.set(sourceRoot.id, targetRoot.id);
|
|
699
|
+
nodeMapping[sourceRoot.id] = getStepUuid(targetRoot);
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
function findRemainingUnmappedNodes(unmappedSource, idMap) {
|
|
703
|
+
return unmappedSource.filter((step) => step.id && !idMap.has(step.id));
|
|
704
|
+
}
|
|
705
|
+
function getUnmappedCandidates(unmappedTarget, idMap) {
|
|
706
|
+
const mappedIds = new Set(idMap.values());
|
|
707
|
+
return unmappedTarget.filter((step) => !mappedIds.has(step.id));
|
|
708
|
+
}
|
|
709
|
+
function findBestMatch(sourceStep, candidates, sourceEdges, targetEdges, getMappedId, isLastIteration) {
|
|
710
|
+
if (candidates.length === 0)
|
|
711
|
+
return null;
|
|
712
|
+
let bestCandidates = candidates;
|
|
713
|
+
let topResult = null;
|
|
714
|
+
let didStructuralFilter = false;
|
|
715
|
+
const parentResult = mapStepByParent(
|
|
716
|
+
sourceStep,
|
|
717
|
+
bestCandidates,
|
|
718
|
+
sourceEdges,
|
|
719
|
+
targetEdges,
|
|
720
|
+
getMappedId
|
|
721
|
+
);
|
|
722
|
+
if (parentResult.candidates.length > 0) {
|
|
723
|
+
bestCandidates = parentResult.candidates;
|
|
724
|
+
topResult = bestCandidates[0];
|
|
725
|
+
didStructuralFilter ||= parentResult.filtered;
|
|
726
|
+
}
|
|
727
|
+
if (bestCandidates.length === 1) {
|
|
728
|
+
return bestCandidates[0];
|
|
729
|
+
}
|
|
730
|
+
const childrenResult = mapStepByChildren(
|
|
731
|
+
sourceStep,
|
|
732
|
+
bestCandidates,
|
|
733
|
+
sourceEdges,
|
|
734
|
+
targetEdges,
|
|
735
|
+
getMappedId
|
|
736
|
+
);
|
|
737
|
+
if (childrenResult.candidates.length > 0) {
|
|
738
|
+
bestCandidates = childrenResult.candidates;
|
|
739
|
+
topResult = bestCandidates[0];
|
|
740
|
+
didStructuralFilter ||= childrenResult.filtered;
|
|
741
|
+
}
|
|
742
|
+
if (bestCandidates.length === 1) {
|
|
743
|
+
return bestCandidates[0];
|
|
744
|
+
}
|
|
745
|
+
const expressionCandidates = mapStepByExpression(sourceStep, bestCandidates);
|
|
746
|
+
if (expressionCandidates.length > 0) {
|
|
747
|
+
bestCandidates = expressionCandidates;
|
|
748
|
+
}
|
|
749
|
+
if (bestCandidates.length === 1) {
|
|
750
|
+
return bestCandidates[0];
|
|
751
|
+
}
|
|
752
|
+
if (isLastIteration && didStructuralFilter && topResult) {
|
|
753
|
+
return topResult;
|
|
754
|
+
}
|
|
755
|
+
return null;
|
|
756
|
+
}
|
|
757
|
+
function mapEdges(sourceEdges, targetEdges, idMap, getTargetUUID) {
|
|
758
|
+
const edgeMapping = {};
|
|
759
|
+
for (const [parentId, children] of Object.entries(sourceEdges)) {
|
|
760
|
+
for (const childId of children) {
|
|
761
|
+
const sourceEdgeKey = `${parentId}-${childId}`;
|
|
762
|
+
const mappedParentId = idMap.get(parentId) || parentId;
|
|
763
|
+
const mappedChildId = idMap.get(childId) || childId;
|
|
764
|
+
const targetEdgeId = getTargetUUID(`${mappedParentId}-${mappedChildId}`);
|
|
765
|
+
if (targetEdgeId) {
|
|
766
|
+
edgeMapping[sourceEdgeKey] = targetEdgeId;
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
return edgeMapping;
|
|
771
|
+
}
|
|
772
|
+
function getStepUuid(step) {
|
|
773
|
+
return step?.openfn?.uuid || step.id;
|
|
774
|
+
}
|
|
775
|
+
function mapStepsById(source, target) {
|
|
776
|
+
const targetIndex = {};
|
|
777
|
+
const mapping = {};
|
|
778
|
+
const idMap = /* @__PURE__ */ new Map();
|
|
779
|
+
for (const targetStep of target) {
|
|
780
|
+
targetIndex[targetStep.id] = targetStep;
|
|
781
|
+
}
|
|
782
|
+
const unmappedSourceIndices = [];
|
|
783
|
+
const unmappedTarget = [...target];
|
|
784
|
+
for (let i = 0; i < source.length; i++) {
|
|
785
|
+
const sourceStep = source[i];
|
|
786
|
+
const matchingTarget = targetIndex[sourceStep.id];
|
|
787
|
+
if (matchingTarget) {
|
|
788
|
+
mapping[sourceStep.id] = getStepUuid(matchingTarget);
|
|
789
|
+
idMap.set(sourceStep.id, matchingTarget.id);
|
|
790
|
+
const targetIndex2 = unmappedTarget.findIndex(
|
|
791
|
+
(t) => t.id === matchingTarget.id
|
|
792
|
+
);
|
|
793
|
+
if (targetIndex2 !== -1) {
|
|
794
|
+
unmappedTarget.splice(targetIndex2, 1);
|
|
795
|
+
}
|
|
796
|
+
} else {
|
|
797
|
+
unmappedSourceIndices.push(i);
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
return {
|
|
801
|
+
mapping,
|
|
802
|
+
idMap,
|
|
803
|
+
pool: {
|
|
804
|
+
source: source.filter((_, i) => unmappedSourceIndices.includes(i)),
|
|
805
|
+
target: unmappedTarget
|
|
806
|
+
}
|
|
807
|
+
};
|
|
808
|
+
}
|
|
809
|
+
function getParent(id, edges) {
|
|
810
|
+
return Object.entries(edges).filter(([, children]) => children.includes(id)).map(([parentId]) => parentId);
|
|
811
|
+
}
|
|
812
|
+
function findByExpression(expression, steps) {
|
|
813
|
+
return steps.filter(
|
|
814
|
+
(step) => step.expression && step.expression.trim() && step.expression === expression
|
|
815
|
+
);
|
|
816
|
+
}
|
|
817
|
+
function findByParent(parentIds, edges, steps) {
|
|
818
|
+
const matches = [];
|
|
819
|
+
for (const parentId of parentIds) {
|
|
820
|
+
const children = edges[parentId];
|
|
821
|
+
if (!children || children.length === 0)
|
|
822
|
+
continue;
|
|
823
|
+
const matchingSteps = steps.filter((step) => children.includes(step.id));
|
|
824
|
+
matches.push(...matchingSteps);
|
|
825
|
+
}
|
|
826
|
+
return matches;
|
|
827
|
+
}
|
|
828
|
+
function findByChildren(childIds, edges, steps) {
|
|
829
|
+
const childMatchCount = {};
|
|
830
|
+
for (const [parentId, children] of Object.entries(edges)) {
|
|
831
|
+
const matchCount = children.filter(
|
|
832
|
+
(childId) => childIds.includes(childId)
|
|
833
|
+
).length;
|
|
834
|
+
if (matchCount > 0) {
|
|
835
|
+
childMatchCount[parentId] = matchCount;
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
const sortedParentIds = Object.entries(childMatchCount).sort(([, count1], [, count2]) => count2 - count1).map(([parentId]) => parentId);
|
|
839
|
+
const stepIndex = steps.reduce((index, step) => {
|
|
840
|
+
index[step.id] = step;
|
|
841
|
+
return index;
|
|
842
|
+
}, {});
|
|
843
|
+
return sortedParentIds.filter((parentId) => stepIndex[parentId]).map((parentId) => stepIndex[parentId]);
|
|
844
|
+
}
|
|
845
|
+
function mapStepByParent(sourceStep, candidates, sourceEdges, targetEdges, getMappedId) {
|
|
846
|
+
const sourceParents = getParent(sourceStep.id, sourceEdges);
|
|
847
|
+
if (sourceParents.length === 0) {
|
|
848
|
+
return { filtered: false, candidates };
|
|
849
|
+
}
|
|
850
|
+
const mappedParentIds = sourceParents.map(getMappedId);
|
|
851
|
+
const matchingCandidates = findByParent(
|
|
852
|
+
mappedParentIds,
|
|
853
|
+
targetEdges,
|
|
854
|
+
candidates
|
|
855
|
+
);
|
|
856
|
+
return {
|
|
857
|
+
filtered: true,
|
|
858
|
+
candidates: matchingCandidates
|
|
859
|
+
};
|
|
860
|
+
}
|
|
861
|
+
function mapStepByChildren(sourceStep, candidates, sourceEdges, targetEdges, getMappedId) {
|
|
862
|
+
const sourceChildren = sourceEdges[sourceStep.id];
|
|
863
|
+
if (!sourceChildren) {
|
|
864
|
+
return { filtered: false, candidates };
|
|
865
|
+
}
|
|
866
|
+
const mappedChildIds = sourceChildren.map(getMappedId);
|
|
867
|
+
const matchingCandidates = findByChildren(
|
|
868
|
+
mappedChildIds,
|
|
869
|
+
targetEdges,
|
|
870
|
+
candidates
|
|
871
|
+
);
|
|
872
|
+
return {
|
|
873
|
+
filtered: true,
|
|
874
|
+
candidates: matchingCandidates
|
|
875
|
+
};
|
|
876
|
+
}
|
|
877
|
+
function mapStepByExpression(sourceStep, candidates) {
|
|
878
|
+
const expression = sourceStep.expression;
|
|
879
|
+
return findByExpression(expression, candidates);
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
// src/util/get-duplicates.ts
|
|
883
|
+
function getDuplicates(arr) {
|
|
884
|
+
const hmap = {};
|
|
885
|
+
const duplicates = /* @__PURE__ */ new Set();
|
|
886
|
+
for (let i = 0; i < arr.length; i++) {
|
|
887
|
+
const item = arr[i];
|
|
888
|
+
if (hmap[item])
|
|
889
|
+
duplicates.add(item);
|
|
890
|
+
else
|
|
891
|
+
hmap[item] = true;
|
|
892
|
+
}
|
|
893
|
+
return Array.from(duplicates);
|
|
894
|
+
}
|
|
895
|
+
|
|
896
|
+
// src/merge/merge-project.ts
|
|
897
|
+
function merge(source, target, options) {
|
|
898
|
+
const defaultOptions = {
|
|
899
|
+
workflowMappings: {},
|
|
900
|
+
removeUnmapped: false
|
|
901
|
+
};
|
|
902
|
+
options = defaultsDeep(options, defaultOptions);
|
|
903
|
+
const dupTargetMappings = getDuplicates(
|
|
904
|
+
Object.values(options?.workflowMappings)
|
|
905
|
+
);
|
|
906
|
+
if (dupTargetMappings.length) {
|
|
907
|
+
throw new Error(
|
|
908
|
+
`The following target workflows have multiple source workflows merging into them: ${dupTargetMappings.join(
|
|
909
|
+
", "
|
|
910
|
+
)}`
|
|
911
|
+
);
|
|
912
|
+
}
|
|
913
|
+
const finalWorkflows = [];
|
|
914
|
+
const usedTargetIds = /* @__PURE__ */ new Set();
|
|
915
|
+
const noMappings = isEmpty(options?.workflowMappings);
|
|
916
|
+
let sourceWorkflows = source.workflows.filter((w) => {
|
|
917
|
+
if (noMappings)
|
|
918
|
+
return true;
|
|
919
|
+
return !!options?.workflowMappings[w.id];
|
|
920
|
+
});
|
|
921
|
+
for (const sourceWorkflow of sourceWorkflows) {
|
|
922
|
+
const targetId = options.workflowMappings?.[sourceWorkflow.id] ?? sourceWorkflow.id;
|
|
923
|
+
const targetWorkflow = target.getWorkflow(targetId);
|
|
924
|
+
if (targetWorkflow) {
|
|
925
|
+
usedTargetIds.add(targetWorkflow.id);
|
|
926
|
+
const mappings = map_uuids_default(sourceWorkflow, targetWorkflow);
|
|
927
|
+
finalWorkflows.push(
|
|
928
|
+
mergeWorkflows(sourceWorkflow, targetWorkflow, mappings)
|
|
929
|
+
);
|
|
930
|
+
} else {
|
|
931
|
+
finalWorkflows.push(sourceWorkflow);
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
if (!options?.removeUnmapped) {
|
|
935
|
+
for (const targetWorkflow of target.workflows) {
|
|
936
|
+
if (!usedTargetIds.has(targetWorkflow.id)) {
|
|
937
|
+
finalWorkflows.push(targetWorkflow);
|
|
938
|
+
}
|
|
939
|
+
}
|
|
940
|
+
}
|
|
941
|
+
return new Project(
|
|
942
|
+
baseMerge(target, source, ["collections"], { workflows: finalWorkflows })
|
|
943
|
+
);
|
|
944
|
+
}
|
|
945
|
+
|
|
455
946
|
// src/Project.ts
|
|
456
947
|
var setConfigDefaults = (config = {}) => ({
|
|
457
948
|
workflowRoot: config.workflowRoot ?? "workflows",
|
|
@@ -484,11 +975,16 @@ var Project = class {
|
|
|
484
975
|
// these should be shared across projects
|
|
485
976
|
// and saved to an openfn.yaml file
|
|
486
977
|
repo;
|
|
978
|
+
// load a project from a state file (project.json)
|
|
979
|
+
// or from a path (the file system)
|
|
980
|
+
// TODO presumably we can detect a state file? Not a big deal?
|
|
981
|
+
// collections for the project
|
|
982
|
+
// TODO to be well typed
|
|
983
|
+
collections;
|
|
487
984
|
static from(type, data, options) {
|
|
488
985
|
if (type === "state") {
|
|
489
986
|
return from_app_state_default(data, options);
|
|
490
|
-
}
|
|
491
|
-
if (type === "fs") {
|
|
987
|
+
} else if (type === "fs") {
|
|
492
988
|
return parseProject(data, options);
|
|
493
989
|
}
|
|
494
990
|
throw new Error(`Didn't recognize type ${type}`);
|
|
@@ -496,18 +992,23 @@ var Project = class {
|
|
|
496
992
|
// Diff two projects
|
|
497
993
|
static diff(a, b) {
|
|
498
994
|
}
|
|
995
|
+
// Merge a source project (staging) into the target project (main)
|
|
996
|
+
// Returns a new Project
|
|
997
|
+
// TODO: throw if histories have diverged
|
|
998
|
+
static merge(source, target, options) {
|
|
999
|
+
return merge(source, target, options);
|
|
1000
|
+
}
|
|
499
1001
|
// env is excluded because it's not really part of the project
|
|
500
1002
|
// uh maybe
|
|
501
1003
|
// maybe this second arg is config - like env, branch rules, serialisation rules
|
|
502
1004
|
// stuff that's external to the actual project and managed by the repo
|
|
503
1005
|
constructor(data, repoConfig = {}) {
|
|
504
1006
|
this.repo = setConfigDefaults(repoConfig);
|
|
505
|
-
this.id = data.id;
|
|
506
1007
|
this.name = data.name;
|
|
507
1008
|
this.description = data.description;
|
|
508
1009
|
this.openfn = data.openfn;
|
|
509
1010
|
this.options = data.options;
|
|
510
|
-
this.workflows = data.workflows;
|
|
1011
|
+
this.workflows = data.workflows?.map((w) => new Workflow_default(w)) ?? [];
|
|
511
1012
|
this.collections = data.collections;
|
|
512
1013
|
this.credentials = data.credentials;
|
|
513
1014
|
this.meta = data.meta;
|
|
@@ -523,11 +1024,6 @@ var Project = class {
|
|
|
523
1024
|
// this builds a version string for the current state
|
|
524
1025
|
getVersionHash() {
|
|
525
1026
|
}
|
|
526
|
-
// take a second project and merge its data into this one
|
|
527
|
-
// Throws if there's a conflict, unless force is true
|
|
528
|
-
// It's basically an overwrite
|
|
529
|
-
merge(project, options) {
|
|
530
|
-
}
|
|
531
1027
|
// what else might we need?
|
|
532
1028
|
// get workflow by name or id
|
|
533
1029
|
// this is fuzzy, but is that wrong?
|
|
@@ -553,9 +1049,73 @@ var Project = class {
|
|
|
553
1049
|
}
|
|
554
1050
|
};
|
|
555
1051
|
|
|
1052
|
+
// src/util/path-exists.ts
|
|
1053
|
+
import fs2 from "fs";
|
|
1054
|
+
function pathExists(fpath, type) {
|
|
1055
|
+
try {
|
|
1056
|
+
const stat = fs2.statSync(fpath);
|
|
1057
|
+
if (type === "file" && stat.isFile())
|
|
1058
|
+
return true;
|
|
1059
|
+
else if (type === "directory" && stat.isDirectory())
|
|
1060
|
+
return true;
|
|
1061
|
+
return false;
|
|
1062
|
+
} catch (e) {
|
|
1063
|
+
return false;
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
|
|
1067
|
+
// src/Workspace.ts
|
|
1068
|
+
import path2 from "path";
|
|
1069
|
+
import fs3 from "fs";
|
|
1070
|
+
var PROJECTS_DIRECTORY = ".projects";
|
|
1071
|
+
var OPENFN_YAML_FILE = "openfn.yaml";
|
|
1072
|
+
var PROJECT_EXTENSIONS = [".yaml", ".yml"];
|
|
1073
|
+
var Workspace = class {
|
|
1074
|
+
config;
|
|
1075
|
+
projects = [];
|
|
1076
|
+
isValid = false;
|
|
1077
|
+
constructor(workspacePath) {
|
|
1078
|
+
const projectsPath = path2.join(workspacePath, PROJECTS_DIRECTORY);
|
|
1079
|
+
const openfnYamlPath = path2.join(workspacePath, OPENFN_YAML_FILE);
|
|
1080
|
+
if (pathExists(openfnYamlPath, "file")) {
|
|
1081
|
+
this.isValid = true;
|
|
1082
|
+
const data = fs3.readFileSync(openfnYamlPath, "utf-8");
|
|
1083
|
+
this.config = yamlToJson(data);
|
|
1084
|
+
}
|
|
1085
|
+
if (this.isValid && pathExists(projectsPath, "directory")) {
|
|
1086
|
+
const stateFiles = fs3.readdirSync(projectsPath).filter(
|
|
1087
|
+
(fileName) => PROJECT_EXTENSIONS.includes(path2.extname(fileName))
|
|
1088
|
+
);
|
|
1089
|
+
this.projects = stateFiles.map((file) => {
|
|
1090
|
+
const data = fs3.readFileSync(path2.join(projectsPath, file), "utf-8");
|
|
1091
|
+
return from_app_state_default(data, { format: "yaml" });
|
|
1092
|
+
});
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
list() {
|
|
1096
|
+
return this.projects;
|
|
1097
|
+
}
|
|
1098
|
+
get(id) {
|
|
1099
|
+
return this.projects.find((p) => p.name === id);
|
|
1100
|
+
}
|
|
1101
|
+
getActiveProject() {
|
|
1102
|
+
return this.projects.find((p) => p.name === this.config?.name);
|
|
1103
|
+
}
|
|
1104
|
+
getConfig() {
|
|
1105
|
+
return this.config;
|
|
1106
|
+
}
|
|
1107
|
+
get activeProjectId() {
|
|
1108
|
+
return this.config?.name;
|
|
1109
|
+
}
|
|
1110
|
+
get valid() {
|
|
1111
|
+
return this.isValid;
|
|
1112
|
+
}
|
|
1113
|
+
};
|
|
1114
|
+
|
|
556
1115
|
// src/index.ts
|
|
557
1116
|
var src_default = Project;
|
|
558
1117
|
export {
|
|
1118
|
+
Workspace,
|
|
559
1119
|
src_default as default,
|
|
560
1120
|
jsonToYaml,
|
|
561
1121
|
yamlToJson
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/project",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.3.0",
|
|
4
4
|
"description": "Read, serialize, replicate and sync OpenFn projects",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"exports": {
|
|
@@ -17,6 +17,7 @@
|
|
|
17
17
|
"author": "Open Function Group <admin@openfn.org>",
|
|
18
18
|
"license": "ISC",
|
|
19
19
|
"devDependencies": {
|
|
20
|
+
"@types/lodash": "^4.17.20",
|
|
20
21
|
"@types/mock-fs": "~4.13.4",
|
|
21
22
|
"ava": "5.3.1",
|
|
22
23
|
"mock-fs": "^5.5.0",
|
|
@@ -26,8 +27,9 @@
|
|
|
26
27
|
"typescript": "^5.9.2"
|
|
27
28
|
},
|
|
28
29
|
"dependencies": {
|
|
29
|
-
"glob": "^11.0.
|
|
30
|
-
"
|
|
30
|
+
"glob": "^11.0.2",
|
|
31
|
+
"lodash-es": "^4.17.21",
|
|
32
|
+
"yaml": "^2.2.2",
|
|
31
33
|
"@openfn/lexicon": "^1.2.3",
|
|
32
34
|
"@openfn/logger": "1.0.6"
|
|
33
35
|
},
|