@openfn/project 0.5.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +49 -46
- package/dist/index.js +288 -104
- package/package.json +2 -2
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import * as l from '@openfn/lexicon';
|
|
1
|
+
import * as l$1 from '@openfn/lexicon';
|
|
2
2
|
|
|
3
3
|
type OpenfnMeta = {
|
|
4
4
|
uuid?: string;
|
|
@@ -8,7 +8,7 @@ type WithMeta<T> = T & {
|
|
|
8
8
|
};
|
|
9
9
|
declare class Workflow {
|
|
10
10
|
#private;
|
|
11
|
-
workflow: l.Workflow;
|
|
11
|
+
workflow: l$1.Workflow;
|
|
12
12
|
index: {
|
|
13
13
|
steps: {};
|
|
14
14
|
edges: {};
|
|
@@ -18,22 +18,42 @@ declare class Workflow {
|
|
|
18
18
|
name?: string;
|
|
19
19
|
id: string;
|
|
20
20
|
openfn: OpenfnMeta;
|
|
21
|
-
constructor(workflow: l.Workflow);
|
|
22
|
-
get steps(): WithMeta<l.Job | l.Trigger>[];
|
|
23
|
-
set(id: string, props: Parital<l.Job, l.Edge>): this;
|
|
24
|
-
get(id: any): WithMeta<l.Step | l.Trigger | l.Edge>;
|
|
21
|
+
constructor(workflow: l$1.Workflow);
|
|
22
|
+
get steps(): WithMeta<l$1.Job | l$1.Trigger>[];
|
|
23
|
+
set(id: string, props: Parital<l$1.Job, l$1.Edge>): this;
|
|
24
|
+
get(id: any): WithMeta<l$1.Step | l$1.Trigger | l$1.Edge>;
|
|
25
25
|
meta(id: any): OpenfnMeta;
|
|
26
|
-
getEdge(from: any, to: any): WithMeta<l.ConditionalStepEdge>;
|
|
26
|
+
getEdge(from: any, to: any): WithMeta<l$1.ConditionalStepEdge>;
|
|
27
27
|
getAllEdges(): Record<string, string[]>;
|
|
28
28
|
getStep(id: string): Workflow["steps"][number];
|
|
29
|
-
getRoot(): (l.Trigger & {
|
|
29
|
+
getRoot(): (l$1.Trigger & {
|
|
30
30
|
openfn?: OpenfnMeta;
|
|
31
31
|
}) | undefined;
|
|
32
32
|
getUUID(id: any): string;
|
|
33
33
|
toJSON(): JSON.Object;
|
|
34
34
|
getUUIDMap(): Record<string, string>;
|
|
35
|
+
getVersionHash(): string;
|
|
36
|
+
pushHistory(versionHash: string): void;
|
|
37
|
+
canMergeInto(target: Workflow): boolean;
|
|
35
38
|
}
|
|
36
39
|
|
|
40
|
+
type FileFormats$1 = 'yaml' | 'json';
|
|
41
|
+
interface WorkspaceConfig {
|
|
42
|
+
dirs: {
|
|
43
|
+
workflows: string;
|
|
44
|
+
projects: string;
|
|
45
|
+
};
|
|
46
|
+
formats: {
|
|
47
|
+
openfn: FileFormats$1;
|
|
48
|
+
project: FileFormats$1;
|
|
49
|
+
workflow: FileFormats$1;
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
type FromPathConfig = {
|
|
54
|
+
config: WorkspaceConfig;
|
|
55
|
+
};
|
|
56
|
+
|
|
37
57
|
type FromFsConfig = {
|
|
38
58
|
root: string;
|
|
39
59
|
};
|
|
@@ -44,27 +64,23 @@ type MergeProjectOptions = Partial<{
|
|
|
44
64
|
force: boolean;
|
|
45
65
|
}>;
|
|
46
66
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
endpoint: string;
|
|
63
|
-
env: string;
|
|
64
|
-
inserted_at: string;
|
|
65
|
-
updated_at: string;
|
|
66
|
-
};
|
|
67
|
+
declare class Workspace {
|
|
68
|
+
config?: WorkspaceConfig;
|
|
69
|
+
projectMeta: ProjectMeta;
|
|
70
|
+
private projects;
|
|
71
|
+
private projectPaths;
|
|
72
|
+
private isValid;
|
|
73
|
+
constructor(workspacePath: string);
|
|
74
|
+
loadProject(): void;
|
|
75
|
+
list(): Project[];
|
|
76
|
+
get(id: string): Project | undefined;
|
|
77
|
+
getProjectPath(id: string): string | undefined;
|
|
78
|
+
getActiveProject(): Project | undefined;
|
|
79
|
+
getConfig(): Partial<WorkspaceConfig>;
|
|
80
|
+
get activeProjectId(): any;
|
|
81
|
+
get valid(): boolean;
|
|
67
82
|
}
|
|
83
|
+
|
|
68
84
|
type RepoOptions = {
|
|
69
85
|
/**default workflow root when serializing to fs (relative to openfn.yaml) */
|
|
70
86
|
workflowRoot?: string;
|
|
@@ -83,19 +99,21 @@ declare class Project {
|
|
|
83
99
|
options: any;
|
|
84
100
|
meta: any;
|
|
85
101
|
openfn?: l.ProjectConfig;
|
|
86
|
-
|
|
102
|
+
workspace?: Workspace;
|
|
103
|
+
config: WorkspaceConfig;
|
|
87
104
|
collections: any;
|
|
88
105
|
static from(type: 'state', data: any, options: Partial<l.ProjectConfig>): Project;
|
|
89
106
|
static from(type: 'fs', options: FromFsConfig): Project;
|
|
90
107
|
static from(type: 'path', data: string, options?: {
|
|
91
|
-
config?:
|
|
108
|
+
config?: FromPathConfig;
|
|
92
109
|
}): Project;
|
|
93
110
|
static diff(a: Project, b: Project): void;
|
|
94
111
|
static merge(source: Project, target: Project, options: MergeProjectOptions): Project;
|
|
95
112
|
constructor(data: l.Project, repoConfig?: RepoOptions);
|
|
113
|
+
setConfig(config: Partial<WorkspaceConfig>): void;
|
|
96
114
|
serialize(type?: 'json' | 'yaml' | 'fs' | 'state', options?: any): any;
|
|
97
115
|
getVersionHash(): void;
|
|
98
|
-
getWorkflow(
|
|
116
|
+
getWorkflow(idOrName: string): Workflow | undefined;
|
|
99
117
|
getIdentifier(): string;
|
|
100
118
|
compare(proj: Project): void;
|
|
101
119
|
getUUID(workflow: string | Workflow, stepId: string, otherStep?: string): any;
|
|
@@ -108,21 +126,6 @@ declare class Project {
|
|
|
108
126
|
}): {};
|
|
109
127
|
}
|
|
110
128
|
|
|
111
|
-
declare class Workspace {
|
|
112
|
-
private config?;
|
|
113
|
-
private projects;
|
|
114
|
-
private projectPaths;
|
|
115
|
-
private isValid;
|
|
116
|
-
constructor(workspacePath: string);
|
|
117
|
-
list(): Project[];
|
|
118
|
-
get(id: string): Project | undefined;
|
|
119
|
-
getProjectPath(id: string): string | undefined;
|
|
120
|
-
getActiveProject(): Project | undefined;
|
|
121
|
-
getConfig(): Partial<OpenfnConfig>;
|
|
122
|
-
get activeProjectId(): string | undefined;
|
|
123
|
-
get valid(): boolean;
|
|
124
|
-
}
|
|
125
|
-
|
|
126
129
|
declare function yamlToJson(y: string): any;
|
|
127
130
|
declare function jsonToYaml(json: string | JSONObject): string;
|
|
128
131
|
|
package/dist/index.js
CHANGED
|
@@ -9,6 +9,74 @@ function slugify(text) {
|
|
|
9
9
|
return text?.replace(/\W/g, " ").trim().replace(/\s+/g, "-").toLowerCase();
|
|
10
10
|
}
|
|
11
11
|
|
|
12
|
+
// src/util/version.ts
|
|
13
|
+
import crypto from "node:crypto";
|
|
14
|
+
var SHORT_HASH_LENGTH = 12;
|
|
15
|
+
function isDefined(v) {
|
|
16
|
+
return v !== void 0 && v !== null;
|
|
17
|
+
}
|
|
18
|
+
var generateHash = (workflow, source = "cli") => {
|
|
19
|
+
const parts = [];
|
|
20
|
+
const wfKeys = ["name", "credentials"].sort();
|
|
21
|
+
const stepKeys = [
|
|
22
|
+
"name",
|
|
23
|
+
"adaptors",
|
|
24
|
+
"adaptor",
|
|
25
|
+
// there's ao adaptor & adaptors key in steps somehow.
|
|
26
|
+
"expression",
|
|
27
|
+
"configuration",
|
|
28
|
+
// assumes a string credential id
|
|
29
|
+
"expression"
|
|
30
|
+
// TODO need to model trigger types in this, which I think are currently ignored
|
|
31
|
+
].sort();
|
|
32
|
+
const edgeKeys = [
|
|
33
|
+
"condition",
|
|
34
|
+
"label",
|
|
35
|
+
"disabled"
|
|
36
|
+
// This feels more like an option - should be excluded?
|
|
37
|
+
].sort();
|
|
38
|
+
wfKeys.forEach((key) => {
|
|
39
|
+
if (isDefined(workflow[key])) {
|
|
40
|
+
parts.push(key, serializeValue(workflow[key]));
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
const steps = (workflow.steps || []).slice().sort((a, b) => {
|
|
44
|
+
const aName = a.name ?? "";
|
|
45
|
+
const bName = b.name ?? "";
|
|
46
|
+
return aName.localeCompare(bName);
|
|
47
|
+
});
|
|
48
|
+
for (const step of steps) {
|
|
49
|
+
stepKeys.forEach((key) => {
|
|
50
|
+
if (isDefined(step[key])) {
|
|
51
|
+
parts.push(key, serializeValue(step[key]));
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
if (step.next && Array.isArray(step.next)) {
|
|
55
|
+
const edges = step.next.slice().sort((a, b) => {
|
|
56
|
+
const aLabel = a.label || "";
|
|
57
|
+
const bLabel = b.label || "";
|
|
58
|
+
return aLabel.localeCompare(bLabel);
|
|
59
|
+
});
|
|
60
|
+
for (const edge of edges) {
|
|
61
|
+
edgeKeys.forEach((key) => {
|
|
62
|
+
if (isDefined(edge[key])) {
|
|
63
|
+
parts.push(key, serializeValue(edge[key]));
|
|
64
|
+
}
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
const str = parts.join("");
|
|
70
|
+
const hash = crypto.createHash("sha256").update(str).digest("hex");
|
|
71
|
+
return `${source}:${hash.substring(0, SHORT_HASH_LENGTH)}`;
|
|
72
|
+
};
|
|
73
|
+
function serializeValue(val) {
|
|
74
|
+
if (typeof val === "object") {
|
|
75
|
+
return JSON.stringify(val);
|
|
76
|
+
}
|
|
77
|
+
return String(val);
|
|
78
|
+
}
|
|
79
|
+
|
|
12
80
|
// src/Workflow.ts
|
|
13
81
|
var clone = (obj) => JSON.parse(JSON.stringify(obj));
|
|
14
82
|
var Workflow = class {
|
|
@@ -30,6 +98,7 @@ var Workflow = class {
|
|
|
30
98
|
// uuid to ids
|
|
31
99
|
};
|
|
32
100
|
this.workflow = clone(workflow);
|
|
101
|
+
this.workflow.history = workflow.history?.length ? workflow.history : [];
|
|
33
102
|
const { id, name, openfn, steps, ...options } = workflow;
|
|
34
103
|
if (!(id || name)) {
|
|
35
104
|
throw new Error("A Workflow MUST have a name or id");
|
|
@@ -138,6 +207,23 @@ var Workflow = class {
|
|
|
138
207
|
getUUIDMap() {
|
|
139
208
|
return this.index.uuid;
|
|
140
209
|
}
|
|
210
|
+
getVersionHash() {
|
|
211
|
+
return generateHash(this);
|
|
212
|
+
}
|
|
213
|
+
pushHistory(versionHash) {
|
|
214
|
+
this.workflow.history?.push(versionHash);
|
|
215
|
+
}
|
|
216
|
+
// return true if the current workflow can be merged into the target workflow without losing any changes
|
|
217
|
+
canMergeInto(target) {
|
|
218
|
+
const thisHistory = this.workflow.history?.concat(this.getVersionHash());
|
|
219
|
+
const targetHistory = target.workflow.history?.concat(
|
|
220
|
+
target.getVersionHash()
|
|
221
|
+
);
|
|
222
|
+
const targetHead = targetHistory[targetHistory.length - 1];
|
|
223
|
+
if (thisHistory.indexOf(targetHead) > -1)
|
|
224
|
+
return true;
|
|
225
|
+
return false;
|
|
226
|
+
}
|
|
141
227
|
};
|
|
142
228
|
var Workflow_default = Workflow;
|
|
143
229
|
|
|
@@ -156,7 +242,7 @@ function to_json_default(project) {
|
|
|
156
242
|
// Do we just serialize all public fields?
|
|
157
243
|
name: project.name,
|
|
158
244
|
description: project.description,
|
|
159
|
-
|
|
245
|
+
config: project.config,
|
|
160
246
|
meta: project.meta,
|
|
161
247
|
workflows: project.workflows,
|
|
162
248
|
collections: project.collections,
|
|
@@ -204,7 +290,7 @@ function to_app_state_default(project, options = {}) {
|
|
|
204
290
|
...project.options,
|
|
205
291
|
workflows: project.workflows.map(mapWorkflow)
|
|
206
292
|
};
|
|
207
|
-
const shouldReturnYaml = options.format === "yaml" || !options.format && project.
|
|
293
|
+
const shouldReturnYaml = options.format === "yaml" || !options.format && project.config.formats.project === "yaml";
|
|
208
294
|
if (shouldReturnYaml) {
|
|
209
295
|
return jsonToYaml(state);
|
|
210
296
|
}
|
|
@@ -255,7 +341,9 @@ var mapWorkflow = (workflow) => {
|
|
|
255
341
|
const e = {
|
|
256
342
|
id: rules.openfn?.uuid ?? randomUUID(),
|
|
257
343
|
target_job_id: lookup[next],
|
|
258
|
-
enabled: !rules.disabled
|
|
344
|
+
enabled: !rules.disabled,
|
|
345
|
+
source_trigger_id: null
|
|
346
|
+
// lightning complains if this isn't set, even if its falsy :(
|
|
259
347
|
};
|
|
260
348
|
if (isTrigger) {
|
|
261
349
|
e.source_trigger_id = node.id;
|
|
@@ -276,32 +364,129 @@ var mapWorkflow = (workflow) => {
|
|
|
276
364
|
|
|
277
365
|
// src/serialize/to-fs.ts
|
|
278
366
|
import nodepath from "path";
|
|
367
|
+
|
|
368
|
+
// src/util/config.ts
|
|
369
|
+
import { readFileSync } from "node:fs";
|
|
370
|
+
import path from "node:path";
|
|
371
|
+
import { pickBy, isNil } from "lodash-es";
|
|
372
|
+
var buildConfig = (config = {}) => ({
|
|
373
|
+
...config,
|
|
374
|
+
dirs: {
|
|
375
|
+
projects: ".projects",
|
|
376
|
+
// TODO change to projects
|
|
377
|
+
workflows: "workflows"
|
|
378
|
+
},
|
|
379
|
+
formats: {
|
|
380
|
+
openfn: config.formats?.openfn ?? "yaml",
|
|
381
|
+
project: config.formats?.project ?? "yaml",
|
|
382
|
+
workflow: config.formats?.workflow ?? "yaml"
|
|
383
|
+
}
|
|
384
|
+
});
|
|
385
|
+
var extractConfig = (source) => {
|
|
386
|
+
const project = {
|
|
387
|
+
...source.openfn || {}
|
|
388
|
+
};
|
|
389
|
+
const workspace = {
|
|
390
|
+
...source.config
|
|
391
|
+
};
|
|
392
|
+
const content = { project, workspace };
|
|
393
|
+
const format = workspace.formats.openfn;
|
|
394
|
+
if (format === "yaml") {
|
|
395
|
+
return {
|
|
396
|
+
path: "openfn.yaml",
|
|
397
|
+
content: jsonToYaml(content)
|
|
398
|
+
};
|
|
399
|
+
}
|
|
400
|
+
return {
|
|
401
|
+
path: "openfn.json",
|
|
402
|
+
content: JSON.stringify(content, null, 2)
|
|
403
|
+
};
|
|
404
|
+
};
|
|
405
|
+
var loadWorkspaceFile = (contents, format = "yaml") => {
|
|
406
|
+
let project, workspace;
|
|
407
|
+
let json = contents;
|
|
408
|
+
if (format === "yaml") {
|
|
409
|
+
json = yamlToJson(contents) ?? {};
|
|
410
|
+
} else if (typeof contents === "string") {
|
|
411
|
+
json = JSON.parse(contents);
|
|
412
|
+
}
|
|
413
|
+
const legacy = !json.workspace && !json.projects;
|
|
414
|
+
if (legacy) {
|
|
415
|
+
project = json.project ?? {};
|
|
416
|
+
if (json.name) {
|
|
417
|
+
project.name = json.name;
|
|
418
|
+
}
|
|
419
|
+
const {
|
|
420
|
+
formats,
|
|
421
|
+
dirs,
|
|
422
|
+
project: _,
|
|
423
|
+
name,
|
|
424
|
+
...rest
|
|
425
|
+
} = json;
|
|
426
|
+
workspace = pickBy(
|
|
427
|
+
{
|
|
428
|
+
...rest,
|
|
429
|
+
formats,
|
|
430
|
+
dirs
|
|
431
|
+
},
|
|
432
|
+
(value) => !isNil(value)
|
|
433
|
+
);
|
|
434
|
+
} else {
|
|
435
|
+
project = json.project ?? {};
|
|
436
|
+
workspace = json.workspace ?? {};
|
|
437
|
+
}
|
|
438
|
+
return { project, workspace };
|
|
439
|
+
};
|
|
440
|
+
var findWorkspaceFile = (dir = ".") => {
|
|
441
|
+
console.log({ dir });
|
|
442
|
+
let content, type;
|
|
443
|
+
try {
|
|
444
|
+
type = "yaml";
|
|
445
|
+
console.log(path.resolve(path.join(dir, "openfn.yaml")));
|
|
446
|
+
content = readFileSync(path.resolve(path.join(dir, "openfn.yaml")), "utf8");
|
|
447
|
+
console.log({ content });
|
|
448
|
+
} catch (e) {
|
|
449
|
+
try {
|
|
450
|
+
type = "json";
|
|
451
|
+
const file = readFileSync(path.join(dir, "openfn.json"), "utf8");
|
|
452
|
+
if (file) {
|
|
453
|
+
content = JSON.parse(file);
|
|
454
|
+
}
|
|
455
|
+
} catch (e2) {
|
|
456
|
+
console.log(e2);
|
|
457
|
+
throw e2;
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
return { content, type };
|
|
461
|
+
};
|
|
462
|
+
|
|
463
|
+
// src/serialize/to-fs.ts
|
|
279
464
|
var stringify = (json) => JSON.stringify(json, null, 2);
|
|
280
465
|
function to_fs_default(project) {
|
|
281
466
|
const files = {};
|
|
282
|
-
const { path:
|
|
283
|
-
files[
|
|
467
|
+
const { path: path5, content } = extractConfig(project);
|
|
468
|
+
files[path5] = content;
|
|
284
469
|
for (const wf of project.workflows) {
|
|
285
|
-
const { path:
|
|
286
|
-
files[
|
|
470
|
+
const { path: path6, content: content2 } = extractWorkflow(project, wf.id);
|
|
471
|
+
files[path6] = content2;
|
|
287
472
|
for (const s of wf.steps) {
|
|
288
473
|
const result = extractStep(project, wf.id, s.id);
|
|
289
474
|
if (result) {
|
|
290
|
-
const { path:
|
|
291
|
-
files[
|
|
475
|
+
const { path: path7, content: content3 } = result;
|
|
476
|
+
files[path7] = content3;
|
|
292
477
|
}
|
|
293
478
|
}
|
|
294
479
|
}
|
|
295
480
|
return files;
|
|
296
481
|
}
|
|
297
482
|
var extractWorkflow = (project, workflowId2) => {
|
|
298
|
-
const format = project.
|
|
483
|
+
const format = project.config.formats.workflow;
|
|
299
484
|
const workflow = project.getWorkflow(workflowId2);
|
|
300
485
|
if (!workflow) {
|
|
301
486
|
throw new Error(`workflow not found: ${workflowId2}`);
|
|
302
487
|
}
|
|
303
|
-
const root = project.
|
|
304
|
-
const
|
|
488
|
+
const root = project.config.dirs.workflow ?? project.config.workflowRoot ?? "workflows/";
|
|
489
|
+
const path5 = nodepath.join(root, workflow.id, workflow.id);
|
|
305
490
|
const wf = {
|
|
306
491
|
id: workflow.id,
|
|
307
492
|
name: workflow.name,
|
|
@@ -316,7 +501,7 @@ var extractWorkflow = (project, workflowId2) => {
|
|
|
316
501
|
return mapped;
|
|
317
502
|
})
|
|
318
503
|
};
|
|
319
|
-
return handleOutput(wf,
|
|
504
|
+
return handleOutput(wf, path5, format);
|
|
320
505
|
};
|
|
321
506
|
var extractStep = (project, workflowId2, stepId) => {
|
|
322
507
|
const workflow = project.getWorkflow(workflowId2);
|
|
@@ -329,31 +514,22 @@ var extractStep = (project, workflowId2, stepId) => {
|
|
|
329
514
|
}
|
|
330
515
|
if (step.expression) {
|
|
331
516
|
const root = project.config?.workflowRoot ?? "workflows/";
|
|
332
|
-
const
|
|
517
|
+
const path5 = nodepath.join(root, `${workflow.id}/${step.id}.js`);
|
|
333
518
|
const content = step.expression;
|
|
334
|
-
return { path:
|
|
519
|
+
return { path: path5, content };
|
|
335
520
|
}
|
|
336
521
|
};
|
|
337
|
-
var extractRepoConfig = (project) => {
|
|
338
|
-
const format = project.repo.formats.openfn;
|
|
339
|
-
const config = {
|
|
340
|
-
name: project.name,
|
|
341
|
-
...project.repo,
|
|
342
|
-
project: project.openfn ?? {}
|
|
343
|
-
};
|
|
344
|
-
return handleOutput(config, "openfn", format);
|
|
345
|
-
};
|
|
346
522
|
var handleOutput = (data, filePath, format) => {
|
|
347
|
-
const
|
|
523
|
+
const path5 = `${filePath}.${format}`;
|
|
348
524
|
let content;
|
|
349
525
|
if (format === "json") {
|
|
350
|
-
content = stringify(data
|
|
526
|
+
content = stringify(data);
|
|
351
527
|
} else if (format === "yaml") {
|
|
352
528
|
content = jsonToYaml(data);
|
|
353
529
|
} else {
|
|
354
530
|
throw new Error(`Unrecognised format: ${format}`);
|
|
355
531
|
}
|
|
356
|
-
return { path:
|
|
532
|
+
return { path: path5, content };
|
|
357
533
|
};
|
|
358
534
|
|
|
359
535
|
// src/parse/from-app-state.ts
|
|
@@ -388,6 +564,7 @@ var from_app_state_default = (state, config) => {
|
|
|
388
564
|
};
|
|
389
565
|
proj.openfn = {
|
|
390
566
|
uuid: id,
|
|
567
|
+
name,
|
|
391
568
|
endpoint: config.endpoint,
|
|
392
569
|
env: config.env,
|
|
393
570
|
inserted_at,
|
|
@@ -397,7 +574,7 @@ var from_app_state_default = (state, config) => {
|
|
|
397
574
|
fetched_at: config.fetchedAt
|
|
398
575
|
};
|
|
399
576
|
proj.workflows = state.workflows.map(mapWorkflow2);
|
|
400
|
-
return new Project(proj, config?.
|
|
577
|
+
return new Project(proj, config?.config);
|
|
401
578
|
};
|
|
402
579
|
var mapTriggerEdgeCondition = (edge) => {
|
|
403
580
|
const e = {
|
|
@@ -471,13 +648,12 @@ var mapWorkflow2 = (workflow) => {
|
|
|
471
648
|
// src/parse/from-path.ts
|
|
472
649
|
import { extname } from "node:path";
|
|
473
650
|
import { readFile } from "node:fs/promises";
|
|
474
|
-
var from_path_default = async (
|
|
475
|
-
const ext = extname(
|
|
476
|
-
const source = await readFile(
|
|
651
|
+
var from_path_default = async (path5, options = {}) => {
|
|
652
|
+
const ext = extname(path5).toLowerCase();
|
|
653
|
+
const source = await readFile(path5, "utf8");
|
|
477
654
|
const config = {
|
|
478
655
|
format: null,
|
|
479
|
-
|
|
480
|
-
// TMP
|
|
656
|
+
config: options.config
|
|
481
657
|
};
|
|
482
658
|
let state;
|
|
483
659
|
if (ext === ".json") {
|
|
@@ -494,7 +670,7 @@ var from_path_default = async (path4, options = {}) => {
|
|
|
494
670
|
|
|
495
671
|
// src/parse/from-fs.ts
|
|
496
672
|
import fs from "node:fs/promises";
|
|
497
|
-
import
|
|
673
|
+
import path2 from "node:path";
|
|
498
674
|
import { glob } from "glob";
|
|
499
675
|
|
|
500
676
|
// src/util/get-identifier.ts
|
|
@@ -513,34 +689,17 @@ var get_identifier_default = (config = {}) => {
|
|
|
513
689
|
// src/parse/from-fs.ts
|
|
514
690
|
var parseProject = async (options = {}) => {
|
|
515
691
|
const { root } = options;
|
|
516
|
-
const
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
const file = await fs.readFile(
|
|
520
|
-
path.resolve(path.join(root, "openfn.yaml")),
|
|
521
|
-
"utf8"
|
|
522
|
-
);
|
|
523
|
-
config = yamlToJson(file);
|
|
524
|
-
} catch (e) {
|
|
525
|
-
try {
|
|
526
|
-
const file = await fs.readFile(
|
|
527
|
-
path.join(root || ".", "openfn.json"),
|
|
528
|
-
"utf8"
|
|
529
|
-
);
|
|
530
|
-
config = JSON.parse(file);
|
|
531
|
-
} catch (e2) {
|
|
532
|
-
console.log(e2);
|
|
533
|
-
throw e2;
|
|
534
|
-
}
|
|
535
|
-
}
|
|
692
|
+
const { type, content } = findWorkspaceFile(root);
|
|
693
|
+
const context = loadWorkspaceFile(content, type);
|
|
694
|
+
const config = buildConfig(context.workspace);
|
|
536
695
|
let state;
|
|
537
696
|
const identifier = get_identifier_default({
|
|
538
|
-
endpoint:
|
|
539
|
-
env:
|
|
697
|
+
endpoint: context.project?.endpoint,
|
|
698
|
+
env: context.project?.env
|
|
540
699
|
});
|
|
541
700
|
try {
|
|
542
701
|
const format = config.formats?.project ?? config.formats?.projects ?? "yaml";
|
|
543
|
-
const statePath =
|
|
702
|
+
const statePath = path2.join(
|
|
544
703
|
root,
|
|
545
704
|
config.dirs?.projects ?? ".projects",
|
|
546
705
|
`${identifier}.${format}`
|
|
@@ -550,9 +709,11 @@ var parseProject = async (options = {}) => {
|
|
|
550
709
|
} catch (e) {
|
|
551
710
|
console.warn(`Failed to find state file for ${identifier}`);
|
|
552
711
|
}
|
|
553
|
-
const
|
|
554
|
-
|
|
555
|
-
|
|
712
|
+
const proj = {
|
|
713
|
+
openfn: context.project,
|
|
714
|
+
config,
|
|
715
|
+
workflows: []
|
|
716
|
+
};
|
|
556
717
|
const workflowDir = config.workflowRoot ?? config.dirs?.workflows ?? "workflows";
|
|
557
718
|
const fileType = config.formats?.workflow ?? "yaml";
|
|
558
719
|
const pattern = `${root}/${workflowDir}/*/*.${fileType}`;
|
|
@@ -572,8 +733,8 @@ var parseProject = async (options = {}) => {
|
|
|
572
733
|
};
|
|
573
734
|
for (const step of wf.steps) {
|
|
574
735
|
if (step.expression && step.expression.endsWith(".js")) {
|
|
575
|
-
const dir =
|
|
576
|
-
const exprPath =
|
|
736
|
+
const dir = path2.dirname(filePath);
|
|
737
|
+
const exprPath = path2.join(dir, step.expression);
|
|
577
738
|
try {
|
|
578
739
|
console.debug(`Loaded expression from ${exprPath}`);
|
|
579
740
|
step.expression = await fs.readFile(exprPath, "utf-8");
|
|
@@ -592,15 +753,14 @@ var parseProject = async (options = {}) => {
|
|
|
592
753
|
step.next[target].openfn = { uuid: uuid2 };
|
|
593
754
|
}
|
|
594
755
|
}
|
|
595
|
-
workflows.push(wf);
|
|
756
|
+
proj.workflows.push(wf);
|
|
596
757
|
}
|
|
597
758
|
} catch (e) {
|
|
598
759
|
console.log(e);
|
|
599
760
|
continue;
|
|
600
761
|
}
|
|
601
762
|
}
|
|
602
|
-
proj.
|
|
603
|
-
return new Project(proj, repo);
|
|
763
|
+
return new Project(proj, context.workspace);
|
|
604
764
|
};
|
|
605
765
|
|
|
606
766
|
// src/util/uuid.ts
|
|
@@ -945,7 +1105,8 @@ function getDuplicates(arr) {
|
|
|
945
1105
|
function merge(source, target, options) {
|
|
946
1106
|
const defaultOptions = {
|
|
947
1107
|
workflowMappings: {},
|
|
948
|
-
removeUnmapped: false
|
|
1108
|
+
removeUnmapped: false,
|
|
1109
|
+
force: true
|
|
949
1110
|
};
|
|
950
1111
|
options = defaultsDeep(options, defaultOptions);
|
|
951
1112
|
const dupTargetMappings = getDuplicates(
|
|
@@ -966,6 +1127,23 @@ function merge(source, target, options) {
|
|
|
966
1127
|
return true;
|
|
967
1128
|
return !!options?.workflowMappings[w.id];
|
|
968
1129
|
});
|
|
1130
|
+
const potentialConflicts = {};
|
|
1131
|
+
for (const sourceWorkflow of sourceWorkflows) {
|
|
1132
|
+
const targetId = options.workflowMappings?.[sourceWorkflow.id] ?? sourceWorkflow.id;
|
|
1133
|
+
const targetWorkflow = target.getWorkflow(targetId);
|
|
1134
|
+
if (targetWorkflow && !sourceWorkflow.canMergeInto(targetWorkflow)) {
|
|
1135
|
+
potentialConflicts[sourceWorkflow.name] = targetWorkflow?.name;
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
if (Object.keys(potentialConflicts).length && !options?.force) {
|
|
1139
|
+
throw new Error(
|
|
1140
|
+
`The below workflows can't be merged directly without losing data
|
|
1141
|
+
${Object.entries(
|
|
1142
|
+
potentialConflicts
|
|
1143
|
+
).map(([from, to]) => `${from} \u2192 ${to}`).join("\n")}
|
|
1144
|
+
Pass --force to force the merge anyway`
|
|
1145
|
+
);
|
|
1146
|
+
}
|
|
969
1147
|
for (const sourceWorkflow of sourceWorkflows) {
|
|
970
1148
|
const targetId = options.workflowMappings?.[sourceWorkflow.id] ?? sourceWorkflow.id;
|
|
971
1149
|
const targetWorkflow = target.getWorkflow(targetId);
|
|
@@ -993,16 +1171,6 @@ function merge(source, target, options) {
|
|
|
993
1171
|
|
|
994
1172
|
// src/Project.ts
|
|
995
1173
|
var maybeCreateWorkflow = (wf) => wf instanceof Workflow_default ? wf : new Workflow_default(wf);
|
|
996
|
-
var setConfigDefaults = (config = {}) => ({
|
|
997
|
-
...config,
|
|
998
|
-
workflowRoot: config.workflowRoot ?? "workflows",
|
|
999
|
-
formats: {
|
|
1000
|
-
// TODO change these maybe
|
|
1001
|
-
openfn: config.formats?.openfn ?? "yaml",
|
|
1002
|
-
project: config.formats?.project ?? "yaml",
|
|
1003
|
-
workflow: config.formats?.workflow ?? "yaml"
|
|
1004
|
-
}
|
|
1005
|
-
});
|
|
1006
1174
|
var Project = class {
|
|
1007
1175
|
// what schema version is this?
|
|
1008
1176
|
// And how are we tracking this?
|
|
@@ -1021,17 +1189,15 @@ var Project = class {
|
|
|
1021
1189
|
meta;
|
|
1022
1190
|
// this contains meta about the connected openfn project
|
|
1023
1191
|
openfn;
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
// and saved to an openfn.yaml file
|
|
1027
|
-
repo;
|
|
1192
|
+
workspace;
|
|
1193
|
+
config;
|
|
1028
1194
|
// load a project from a state file (project.json)
|
|
1029
1195
|
// or from a path (the file system)
|
|
1030
1196
|
// TODO presumably we can detect a state file? Not a big deal?
|
|
1031
1197
|
// collections for the project
|
|
1032
1198
|
// TODO to be well typed
|
|
1033
1199
|
collections;
|
|
1034
|
-
static from(type, data, options) {
|
|
1200
|
+
static from(type, data, options = {}) {
|
|
1035
1201
|
if (type === "state") {
|
|
1036
1202
|
return from_app_state_default(data, options);
|
|
1037
1203
|
} else if (type === "fs") {
|
|
@@ -1054,8 +1220,9 @@ var Project = class {
|
|
|
1054
1220
|
// uh maybe
|
|
1055
1221
|
// maybe this second arg is config - like env, branch rules, serialisation rules
|
|
1056
1222
|
// stuff that's external to the actual project and managed by the repo
|
|
1223
|
+
// TODO maybe the constructor is (data, Workspace)
|
|
1057
1224
|
constructor(data, repoConfig = {}) {
|
|
1058
|
-
this.
|
|
1225
|
+
this.setConfig(repoConfig);
|
|
1059
1226
|
this.name = data.name;
|
|
1060
1227
|
this.description = data.description;
|
|
1061
1228
|
this.openfn = data.openfn;
|
|
@@ -1065,6 +1232,9 @@ var Project = class {
|
|
|
1065
1232
|
this.credentials = data.credentials;
|
|
1066
1233
|
this.meta = data.meta;
|
|
1067
1234
|
}
|
|
1235
|
+
setConfig(config) {
|
|
1236
|
+
this.config = buildConfig(config);
|
|
1237
|
+
}
|
|
1068
1238
|
serialize(type = "json", options) {
|
|
1069
1239
|
if (type in serialize_exports) {
|
|
1070
1240
|
return serialize_exports[type](this, options);
|
|
@@ -1079,8 +1249,8 @@ var Project = class {
|
|
|
1079
1249
|
// what else might we need?
|
|
1080
1250
|
// get workflow by name or id
|
|
1081
1251
|
// this is fuzzy, but is that wrong?
|
|
1082
|
-
getWorkflow(
|
|
1083
|
-
return this.workflows.find((wf) => wf.id ==
|
|
1252
|
+
getWorkflow(idOrName) {
|
|
1253
|
+
return this.workflows.find((wf) => wf.id == idOrName) || this.workflows.find((wf) => wf.name === idOrName);
|
|
1084
1254
|
}
|
|
1085
1255
|
// it's the name of the project.yaml file
|
|
1086
1256
|
// qualified name? Remote name? App name?
|
|
@@ -1115,6 +1285,10 @@ var Project = class {
|
|
|
1115
1285
|
};
|
|
1116
1286
|
var Project_default = Project;
|
|
1117
1287
|
|
|
1288
|
+
// src/Workspace.ts
|
|
1289
|
+
import path3 from "node:path";
|
|
1290
|
+
import fs3 from "node:fs";
|
|
1291
|
+
|
|
1118
1292
|
// src/util/path-exists.ts
|
|
1119
1293
|
import fs2 from "fs";
|
|
1120
1294
|
function pathExists(fpath, type) {
|
|
@@ -1131,33 +1305,33 @@ function pathExists(fpath, type) {
|
|
|
1131
1305
|
}
|
|
1132
1306
|
|
|
1133
1307
|
// src/Workspace.ts
|
|
1134
|
-
import path2 from "path";
|
|
1135
|
-
import fs3 from "fs";
|
|
1136
|
-
var PROJECTS_DIRECTORY = ".projects";
|
|
1137
|
-
var OPENFN_YAML_FILE = "openfn.yaml";
|
|
1138
1308
|
var PROJECT_EXTENSIONS = [".yaml", ".yml"];
|
|
1139
1309
|
var Workspace = class {
|
|
1140
1310
|
config;
|
|
1311
|
+
projectMeta;
|
|
1141
1312
|
projects = [];
|
|
1142
1313
|
projectPaths = /* @__PURE__ */ new Map();
|
|
1143
1314
|
isValid = false;
|
|
1144
1315
|
constructor(workspacePath) {
|
|
1145
|
-
|
|
1146
|
-
|
|
1316
|
+
let context;
|
|
1317
|
+
try {
|
|
1318
|
+
const { type, content } = findWorkspaceFile(workspacePath);
|
|
1319
|
+
console.log(content);
|
|
1320
|
+
context = loadWorkspaceFile(content, type);
|
|
1147
1321
|
this.isValid = true;
|
|
1148
|
-
|
|
1149
|
-
|
|
1322
|
+
} catch (e) {
|
|
1323
|
+
console.log(e);
|
|
1324
|
+
return;
|
|
1150
1325
|
}
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
);
|
|
1326
|
+
this.config = buildConfig(context.workspace);
|
|
1327
|
+
this.projectMeta = context.project;
|
|
1328
|
+
const projectsPath = path3.join(workspacePath, this.config.dirs.projects);
|
|
1155
1329
|
if (this.isValid && pathExists(projectsPath, "directory")) {
|
|
1156
1330
|
const stateFiles = fs3.readdirSync(projectsPath).filter(
|
|
1157
|
-
(fileName) => PROJECT_EXTENSIONS.includes(
|
|
1331
|
+
(fileName) => PROJECT_EXTENSIONS.includes(path3.extname(fileName)) && path3.parse(fileName).name !== "openfn"
|
|
1158
1332
|
);
|
|
1159
1333
|
this.projects = stateFiles.map((file) => {
|
|
1160
|
-
const stateFilePath =
|
|
1334
|
+
const stateFilePath = path3.join(projectsPath, file);
|
|
1161
1335
|
const data = fs3.readFileSync(stateFilePath, "utf-8");
|
|
1162
1336
|
const project = from_app_state_default(data, { format: "yaml" });
|
|
1163
1337
|
this.projectPaths.set(project.name, stateFilePath);
|
|
@@ -1165,9 +1339,17 @@ var Workspace = class {
|
|
|
1165
1339
|
}).filter((s) => s);
|
|
1166
1340
|
}
|
|
1167
1341
|
}
|
|
1342
|
+
// TODO
|
|
1343
|
+
// This will load a project within this workspace
|
|
1344
|
+
// uses Project.from
|
|
1345
|
+
// Rather than doing new Workspace + Project.from(),
|
|
1346
|
+
// you can do it in a single call
|
|
1347
|
+
loadProject() {
|
|
1348
|
+
}
|
|
1168
1349
|
list() {
|
|
1169
1350
|
return this.projects;
|
|
1170
1351
|
}
|
|
1352
|
+
// TODO clear up name/id confusion
|
|
1171
1353
|
get(id) {
|
|
1172
1354
|
return this.projects.find((p) => p.name === id);
|
|
1173
1355
|
}
|
|
@@ -1175,13 +1357,15 @@ var Workspace = class {
|
|
|
1175
1357
|
return this.projectPaths.get(id);
|
|
1176
1358
|
}
|
|
1177
1359
|
getActiveProject() {
|
|
1178
|
-
return this.projects.find((p) => p.name === this.
|
|
1360
|
+
return this.projects.find((p) => p.name === this.projectMeta?.name);
|
|
1179
1361
|
}
|
|
1362
|
+
// TODO this needs to return default values
|
|
1363
|
+
// We should always rely on the workspace to load these values
|
|
1180
1364
|
getConfig() {
|
|
1181
1365
|
return this.config;
|
|
1182
1366
|
}
|
|
1183
1367
|
get activeProjectId() {
|
|
1184
|
-
return this.
|
|
1368
|
+
return this.projectMeta?.name;
|
|
1185
1369
|
}
|
|
1186
1370
|
get valid() {
|
|
1187
1371
|
return this.isValid;
|
|
@@ -1190,8 +1374,8 @@ var Workspace = class {
|
|
|
1190
1374
|
|
|
1191
1375
|
// src/gen/generator.ts
|
|
1192
1376
|
import { randomUUID as randomUUID2 } from "node:crypto";
|
|
1193
|
-
import
|
|
1194
|
-
import { readFileSync } from "node:fs";
|
|
1377
|
+
import path4 from "node:path";
|
|
1378
|
+
import { readFileSync as readFileSync2 } from "node:fs";
|
|
1195
1379
|
import { grammar } from "ohm-js";
|
|
1196
1380
|
var parser;
|
|
1197
1381
|
var initOperations = (options = {}) => {
|
|
@@ -1287,8 +1471,8 @@ var initOperations = (options = {}) => {
|
|
|
1287
1471
|
return operations;
|
|
1288
1472
|
};
|
|
1289
1473
|
var createParser = () => {
|
|
1290
|
-
const grammarPath =
|
|
1291
|
-
const contents =
|
|
1474
|
+
const grammarPath = path4.resolve(import.meta.dirname, "workflow.ohm");
|
|
1475
|
+
const contents = readFileSync2(grammarPath, "utf-8");
|
|
1292
1476
|
const parser2 = grammar(contents);
|
|
1293
1477
|
return {
|
|
1294
1478
|
parse(str, options) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfn/project",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.7.0",
|
|
4
4
|
"description": "Read, serialize, replicate and sync OpenFn projects",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"exports": {
|
|
@@ -32,7 +32,7 @@
|
|
|
32
32
|
"lodash-es": "^4.17.21",
|
|
33
33
|
"ohm-js": "^17.2.1",
|
|
34
34
|
"yaml": "^2.2.2",
|
|
35
|
-
"@openfn/lexicon": "^1.2.
|
|
35
|
+
"@openfn/lexicon": "^1.2.5",
|
|
36
36
|
"@openfn/logger": "1.0.6"
|
|
37
37
|
},
|
|
38
38
|
"files": [
|