@positronic/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.swcrc +31 -0
- package/dist/src/adapters/types.js +16 -0
- package/dist/src/clients/types.js +1 -0
- package/dist/src/dsl/constants.js +15 -0
- package/dist/src/dsl/extensions.js +19 -0
- package/dist/src/dsl/json-patch.js +30 -0
- package/dist/src/dsl/types.js +1 -0
- package/dist/src/dsl/workflow-runner.js +93 -0
- package/dist/src/dsl/workflow.js +308 -0
- package/dist/src/file-stores/local-file-store.js +12 -0
- package/dist/src/file-stores/types.js +1 -0
- package/dist/src/index.js +10 -0
- package/dist/src/utils/temp-files.js +27 -0
- package/dist/types/adapters/types.d.ts +10 -0
- package/dist/types/adapters/types.d.ts.map +1 -0
- package/dist/types/clients/types.d.ts +10 -0
- package/dist/types/clients/types.d.ts.map +1 -0
- package/dist/types/dsl/constants.d.ts +16 -0
- package/dist/types/dsl/constants.d.ts.map +1 -0
- package/dist/types/dsl/extensions.d.ts +18 -0
- package/dist/types/dsl/extensions.d.ts.map +1 -0
- package/dist/types/dsl/json-patch.d.ts +11 -0
- package/dist/types/dsl/json-patch.d.ts.map +1 -0
- package/dist/types/dsl/types.d.ts +14 -0
- package/dist/types/dsl/types.d.ts.map +1 -0
- package/dist/types/dsl/workflow-runner.d.ts +28 -0
- package/dist/types/dsl/workflow-runner.d.ts.map +1 -0
- package/dist/types/dsl/workflow.d.ts +118 -0
- package/dist/types/dsl/workflow.d.ts.map +1 -0
- package/dist/types/file-stores/local-file-store.d.ts +7 -0
- package/dist/types/file-stores/local-file-store.d.ts.map +1 -0
- package/dist/types/file-stores/types.d.ts +4 -0
- package/dist/types/file-stores/types.d.ts.map +1 -0
- package/dist/types/index.d.ts +12 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/utils/temp-files.d.ts +12 -0
- package/dist/types/utils/temp-files.d.ts.map +1 -0
- package/package.json +21 -0
- package/src/adapters/types.ts +24 -0
- package/src/clients/types.ts +14 -0
- package/src/dsl/constants.ts +16 -0
- package/src/dsl/extensions.ts +58 -0
- package/src/dsl/json-patch.ts +27 -0
- package/src/dsl/types.ts +13 -0
- package/src/dsl/workflow-runner.test.ts +203 -0
- package/src/dsl/workflow-runner.ts +146 -0
- package/src/dsl/workflow.test.ts +1435 -0
- package/src/dsl/workflow.ts +554 -0
- package/src/file-stores/local-file-store.ts +11 -0
- package/src/file-stores/types.ts +3 -0
- package/src/index.ts +22 -0
- package/src/utils/temp-files.ts +46 -0
- package/tsconfig.json +10 -0
package/.swcrc
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
{
|
|
2
|
+
"jsc": {
|
|
3
|
+
"parser": {
|
|
4
|
+
"syntax": "typescript"
|
|
5
|
+
},
|
|
6
|
+
"target": "es2022",
|
|
7
|
+
"experimental": {
|
|
8
|
+
"plugins": [
|
|
9
|
+
[
|
|
10
|
+
"@swc/plugin-transform-imports",
|
|
11
|
+
{
|
|
12
|
+
"^(\\.{1,2}\\/.*?)$": {
|
|
13
|
+
"skipDefaultConversion": true,
|
|
14
|
+
"transform": "{{matches.[1]}}.js"
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
]
|
|
18
|
+
]
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"module": {
|
|
22
|
+
"type": "es6"
|
|
23
|
+
},
|
|
24
|
+
"exclude": [
|
|
25
|
+
".*\\.test\\.ts$",
|
|
26
|
+
"node_modules",
|
|
27
|
+
"dist",
|
|
28
|
+
"coverage",
|
|
29
|
+
"jest.config.*"
|
|
30
|
+
]
|
|
31
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { WORKFLOW_EVENTS } from "../dsl/constants.js";
|
|
2
|
+
export class Adapter {
|
|
3
|
+
async dispatch(event) {
|
|
4
|
+
if (event.type === WORKFLOW_EVENTS.START && this.started) {
|
|
5
|
+
await this.started(event);
|
|
6
|
+
} else if (event.type === WORKFLOW_EVENTS.STEP_COMPLETE && this.updated) {
|
|
7
|
+
await this.updated(event);
|
|
8
|
+
} else if (event.type === WORKFLOW_EVENTS.COMPLETE && this.completed) {
|
|
9
|
+
await this.completed(event);
|
|
10
|
+
} else if (event.type === WORKFLOW_EVENTS.ERROR && this.error) {
|
|
11
|
+
await this.error(event);
|
|
12
|
+
} else if (event.type === WORKFLOW_EVENTS.RESTART && this.restarted) {
|
|
13
|
+
await this.restarted(event);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { };
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export const WORKFLOW_EVENTS = {
|
|
2
|
+
START: 'workflow:start',
|
|
3
|
+
RESTART: 'workflow:restart',
|
|
4
|
+
STEP_START: 'step:start',
|
|
5
|
+
STEP_COMPLETE: 'step:complete',
|
|
6
|
+
STEP_STATUS: 'step:status',
|
|
7
|
+
ERROR: 'workflow:error',
|
|
8
|
+
COMPLETE: 'workflow:complete'
|
|
9
|
+
};
|
|
10
|
+
export const STATUS = {
|
|
11
|
+
PENDING: 'pending',
|
|
12
|
+
RUNNING: 'running',
|
|
13
|
+
COMPLETE: 'complete',
|
|
14
|
+
ERROR: 'error'
|
|
15
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { Workflow } from "./workflow.js";
|
|
2
|
+
export function createExtension(key, extension) {
|
|
3
|
+
return {
|
|
4
|
+
install () {
|
|
5
|
+
Object.defineProperty(Workflow.prototype, key, {
|
|
6
|
+
get () {
|
|
7
|
+
const boundMethods = {};
|
|
8
|
+
for (const [methodKey, fn] of Object.entries(extension)){
|
|
9
|
+
boundMethods[methodKey] = fn.bind(this);
|
|
10
|
+
}
|
|
11
|
+
return boundMethods;
|
|
12
|
+
}
|
|
13
|
+
});
|
|
14
|
+
},
|
|
15
|
+
augment () {
|
|
16
|
+
return {};
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import pkg from 'fast-json-patch';
|
|
2
|
+
const { compare, applyPatch } = pkg;
|
|
3
|
+
/**
|
|
4
|
+
* Creates a JSON Patch that describes the changes needed to transform prevState into nextState.
|
|
5
|
+
*/ export function createPatch(prevState, nextState) {
|
|
6
|
+
// Filter out non-standard operations and ensure type safety
|
|
7
|
+
return compare(prevState, nextState).filter((op)=>[
|
|
8
|
+
'add',
|
|
9
|
+
'remove',
|
|
10
|
+
'replace',
|
|
11
|
+
'move',
|
|
12
|
+
'copy',
|
|
13
|
+
'test'
|
|
14
|
+
].includes(op.op));
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Applies one or more JSON Patches to a state object and returns the resulting state.
|
|
18
|
+
* If multiple patches are provided, they are applied in sequence.
|
|
19
|
+
*/ export function applyPatches(state, patches) {
|
|
20
|
+
const patchArray = Array.isArray(patches[0]) ? patches : [
|
|
21
|
+
patches
|
|
22
|
+
];
|
|
23
|
+
// Apply patches in sequence, creating a new state object each time
|
|
24
|
+
return patchArray.reduce((currentState, patch)=>{
|
|
25
|
+
const { newDocument } = applyPatch(currentState, patch, true, false);
|
|
26
|
+
return newDocument;
|
|
27
|
+
}, {
|
|
28
|
+
...state
|
|
29
|
+
});
|
|
30
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { };
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import { WORKFLOW_EVENTS } from "./constants.js";
|
|
2
|
+
import { applyPatches } from "./json-patch.js";
|
|
3
|
+
export class WorkflowRunner {
|
|
4
|
+
options;
|
|
5
|
+
constructor(options){
|
|
6
|
+
this.options = options;
|
|
7
|
+
}
|
|
8
|
+
async run(workflow, { initialState = {}, options, initialCompletedSteps, workflowRunId, endAfter } = {}) {
|
|
9
|
+
const { adapters, logger: { log }, verbose, fileStore, client } = this.options;
|
|
10
|
+
let currentState = initialState ?? {};
|
|
11
|
+
let stepNumber = 1;
|
|
12
|
+
// Apply any patches from completed steps
|
|
13
|
+
// to the initial state so that the workflow
|
|
14
|
+
// starts with a state that reflects all of the completed steps.
|
|
15
|
+
// Need to do this when a workflow is restarted with completed steps.
|
|
16
|
+
initialCompletedSteps?.forEach((step)=>{
|
|
17
|
+
if (step.patch) {
|
|
18
|
+
currentState = applyPatches(currentState, [
|
|
19
|
+
step.patch
|
|
20
|
+
]);
|
|
21
|
+
stepNumber++;
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
const workflowRun = workflowRunId && initialCompletedSteps ? workflow.run({
|
|
25
|
+
initialState,
|
|
26
|
+
initialCompletedSteps,
|
|
27
|
+
workflowRunId,
|
|
28
|
+
options,
|
|
29
|
+
client,
|
|
30
|
+
fileStore
|
|
31
|
+
}) : workflow.run({
|
|
32
|
+
initialState,
|
|
33
|
+
options,
|
|
34
|
+
client,
|
|
35
|
+
fileStore
|
|
36
|
+
});
|
|
37
|
+
for await (const event of workflowRun){
|
|
38
|
+
// Dispatch event to all adapters
|
|
39
|
+
await Promise.all(adapters.map((adapter)=>adapter.dispatch(event)));
|
|
40
|
+
// Update current state when steps complete
|
|
41
|
+
if (event.type === WORKFLOW_EVENTS.STEP_COMPLETE) {
|
|
42
|
+
if (event.patch) {
|
|
43
|
+
currentState = applyPatches(currentState, [
|
|
44
|
+
event.patch
|
|
45
|
+
]);
|
|
46
|
+
}
|
|
47
|
+
// Check if we should stop after this step
|
|
48
|
+
if (endAfter && stepNumber >= endAfter) {
|
|
49
|
+
// Log final state if verbose
|
|
50
|
+
if (verbose) {
|
|
51
|
+
log(`\nWorkflow stopped after step ${endAfter} as requested: \n\n ${JSON.stringify(this.truncateDeep(structuredClone(currentState)), null, 2)}`);
|
|
52
|
+
}
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
stepNumber++;
|
|
56
|
+
}
|
|
57
|
+
// Log final state on workflow completion/error if verbose
|
|
58
|
+
if ((event.type === WORKFLOW_EVENTS.COMPLETE || event.type === WORKFLOW_EVENTS.ERROR) && verbose) {
|
|
59
|
+
log(`\nWorkflow completed: \n\n ${JSON.stringify(this.truncateDeep(structuredClone(currentState)), null, 2)}`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
truncateDeep(obj, maxLength = 100) {
|
|
64
|
+
if (obj === null || obj === undefined) return obj;
|
|
65
|
+
if (typeof obj === 'string') {
|
|
66
|
+
return obj.length > maxLength ? obj.slice(0, maxLength) + '...' : obj;
|
|
67
|
+
}
|
|
68
|
+
if (Array.isArray(obj)) {
|
|
69
|
+
if (obj.length === 0) return obj;
|
|
70
|
+
let truncatedArray = [];
|
|
71
|
+
let currentLength = 2; // Account for [] brackets
|
|
72
|
+
for(let i = 0; i < obj.length; i++){
|
|
73
|
+
const processedItem = this.truncateDeep(obj[i], maxLength);
|
|
74
|
+
const itemStr = JSON.stringify(processedItem);
|
|
75
|
+
if (currentLength + itemStr.length + (i > 0 ? 1 : 0) > maxLength) {
|
|
76
|
+
truncatedArray.push(`... (${obj.length})`);
|
|
77
|
+
break;
|
|
78
|
+
}
|
|
79
|
+
truncatedArray.push(processedItem);
|
|
80
|
+
currentLength += itemStr.length + (i > 0 ? 1 : 0); // Add 1 for comma
|
|
81
|
+
}
|
|
82
|
+
return truncatedArray;
|
|
83
|
+
}
|
|
84
|
+
if (typeof obj === 'object') {
|
|
85
|
+
const truncated = {};
|
|
86
|
+
for (const [key, value] of Object.entries(obj)){
|
|
87
|
+
truncated[key] = this.truncateDeep(value, maxLength);
|
|
88
|
+
}
|
|
89
|
+
return truncated;
|
|
90
|
+
}
|
|
91
|
+
return obj;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
import { v4 as uuidv4 } from 'uuid';
|
|
2
|
+
import { STATUS } from "./constants.js";
|
|
3
|
+
import { WORKFLOW_EVENTS } from "./constants.js";
|
|
4
|
+
import { createPatch } from "./json-patch.js";
|
|
5
|
+
import { applyPatches } from "./json-patch.js";
|
|
6
|
+
export class Workflow {
|
|
7
|
+
title;
|
|
8
|
+
description;
|
|
9
|
+
blocks;
|
|
10
|
+
type;
|
|
11
|
+
constructor(title, description){
|
|
12
|
+
this.title = title;
|
|
13
|
+
this.description = description;
|
|
14
|
+
this.blocks = [];
|
|
15
|
+
this.type = 'workflow';
|
|
16
|
+
}
|
|
17
|
+
step(title, action) {
|
|
18
|
+
const stepBlock = {
|
|
19
|
+
type: 'step',
|
|
20
|
+
title,
|
|
21
|
+
action
|
|
22
|
+
};
|
|
23
|
+
this.blocks.push(stepBlock);
|
|
24
|
+
return this.nextWorkflow();
|
|
25
|
+
}
|
|
26
|
+
workflow(title, innerWorkflow, action, initialState) {
|
|
27
|
+
const nestedBlock = {
|
|
28
|
+
type: 'workflow',
|
|
29
|
+
title,
|
|
30
|
+
innerWorkflow,
|
|
31
|
+
initialState: initialState || (()=>({})),
|
|
32
|
+
action: (outerState, innerState)=>action({
|
|
33
|
+
state: outerState,
|
|
34
|
+
workflowState: innerState
|
|
35
|
+
})
|
|
36
|
+
};
|
|
37
|
+
this.blocks.push(nestedBlock);
|
|
38
|
+
return this.nextWorkflow();
|
|
39
|
+
}
|
|
40
|
+
// TResponseKey:
|
|
41
|
+
// The response key must be a string literal, so if defining a response model
|
|
42
|
+
// a consumer of this workflow must use "as const" to ensure the key is a string literal
|
|
43
|
+
// this type makes sure that the will get a ts error if they don't.
|
|
44
|
+
prompt(title, config, reduce) {
|
|
45
|
+
const promptBlock = {
|
|
46
|
+
type: 'step',
|
|
47
|
+
title,
|
|
48
|
+
action: async ({ state, client: runClient, options })=>{
|
|
49
|
+
const { template, responseModel, client: stepClient } = config;
|
|
50
|
+
const client = stepClient ?? runClient;
|
|
51
|
+
const promptString = template(state);
|
|
52
|
+
const response = await client.execute(promptString, responseModel);
|
|
53
|
+
const stateWithResponse = {
|
|
54
|
+
...state,
|
|
55
|
+
[config.responseModel.name]: response
|
|
56
|
+
};
|
|
57
|
+
return reduce ? reduce({
|
|
58
|
+
state,
|
|
59
|
+
response,
|
|
60
|
+
options,
|
|
61
|
+
prompt: promptString
|
|
62
|
+
}) : stateWithResponse;
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
this.blocks.push(promptBlock);
|
|
66
|
+
return this.nextWorkflow();
|
|
67
|
+
}
|
|
68
|
+
// Implementation signature
|
|
69
|
+
async *run(params) {
|
|
70
|
+
const { title, description, blocks } = this;
|
|
71
|
+
const stream = new WorkflowEventStream({
|
|
72
|
+
title,
|
|
73
|
+
description,
|
|
74
|
+
blocks,
|
|
75
|
+
...params
|
|
76
|
+
});
|
|
77
|
+
yield* stream.next();
|
|
78
|
+
}
|
|
79
|
+
withBlocks(blocks) {
|
|
80
|
+
this.blocks = blocks;
|
|
81
|
+
return this;
|
|
82
|
+
}
|
|
83
|
+
nextWorkflow() {
|
|
84
|
+
return new Workflow(this.title, this.description).withBlocks(this.blocks);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
class Step {
|
|
88
|
+
block;
|
|
89
|
+
id;
|
|
90
|
+
patch;
|
|
91
|
+
status;
|
|
92
|
+
constructor(block, id){
|
|
93
|
+
this.block = block;
|
|
94
|
+
this.status = STATUS.PENDING;
|
|
95
|
+
this.id = id || uuidv4();
|
|
96
|
+
}
|
|
97
|
+
withPatch(patch) {
|
|
98
|
+
this.patch = patch;
|
|
99
|
+
return this;
|
|
100
|
+
}
|
|
101
|
+
withStatus(status) {
|
|
102
|
+
this.status = status;
|
|
103
|
+
return this;
|
|
104
|
+
}
|
|
105
|
+
get serialized() {
|
|
106
|
+
return {
|
|
107
|
+
id: this.id,
|
|
108
|
+
title: this.block.title,
|
|
109
|
+
status: this.status,
|
|
110
|
+
patch: typeof this.patch === 'string' ? JSON.parse(this.patch) : this.patch
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
class WorkflowEventStream {
|
|
115
|
+
currentState;
|
|
116
|
+
steps;
|
|
117
|
+
currentStepIndex = 0;
|
|
118
|
+
initialState;
|
|
119
|
+
workflowRunId;
|
|
120
|
+
title;
|
|
121
|
+
description;
|
|
122
|
+
fileStore;
|
|
123
|
+
client;
|
|
124
|
+
options;
|
|
125
|
+
constructor(params){
|
|
126
|
+
const { initialState = {}, initialCompletedSteps, blocks, title, description, workflowRunId: providedWorkflowRunId, options = {}, fileStore, client } = params;
|
|
127
|
+
this.initialState = initialState;
|
|
128
|
+
this.title = title;
|
|
129
|
+
this.description = description;
|
|
130
|
+
this.fileStore = fileStore;
|
|
131
|
+
this.client = client;
|
|
132
|
+
this.options = options;
|
|
133
|
+
// Initialize steps array with UUIDs and pending status
|
|
134
|
+
this.steps = blocks.map((block, index)=>{
|
|
135
|
+
const completedStep = initialCompletedSteps?.[index];
|
|
136
|
+
if (completedStep) {
|
|
137
|
+
return new Step(block, completedStep.id).withStatus(completedStep.status).withPatch(completedStep.patch);
|
|
138
|
+
}
|
|
139
|
+
return new Step(block);
|
|
140
|
+
});
|
|
141
|
+
this.currentState = clone(this.initialState);
|
|
142
|
+
for (const step of this.steps){
|
|
143
|
+
if (step.serialized.status === STATUS.COMPLETE && step.serialized.patch) {
|
|
144
|
+
this.currentState = applyPatches(this.currentState, [
|
|
145
|
+
step.serialized.patch
|
|
146
|
+
]);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
this.workflowRunId = providedWorkflowRunId ?? uuidv4();
|
|
150
|
+
}
|
|
151
|
+
async *next() {
|
|
152
|
+
const { steps, title: workflowTitle, description: workflowDescription, currentState, options, workflowRunId } = this;
|
|
153
|
+
try {
|
|
154
|
+
const hasCompletedSteps = steps.some((step)=>step.serialized.status !== STATUS.PENDING);
|
|
155
|
+
yield {
|
|
156
|
+
type: hasCompletedSteps ? WORKFLOW_EVENTS.RESTART : WORKFLOW_EVENTS.START,
|
|
157
|
+
status: STATUS.RUNNING,
|
|
158
|
+
workflowTitle,
|
|
159
|
+
workflowDescription,
|
|
160
|
+
initialState: currentState,
|
|
161
|
+
options,
|
|
162
|
+
workflowRunId
|
|
163
|
+
};
|
|
164
|
+
// Emit initial step status after workflow starts
|
|
165
|
+
yield {
|
|
166
|
+
type: WORKFLOW_EVENTS.STEP_STATUS,
|
|
167
|
+
steps: steps.map((step)=>step.serialized),
|
|
168
|
+
options,
|
|
169
|
+
workflowRunId
|
|
170
|
+
};
|
|
171
|
+
// Process each step
|
|
172
|
+
while(this.currentStepIndex < steps.length){
|
|
173
|
+
const step = steps[this.currentStepIndex];
|
|
174
|
+
// Skip completed steps
|
|
175
|
+
if (step.serialized.status === STATUS.COMPLETE) {
|
|
176
|
+
this.currentStepIndex++;
|
|
177
|
+
continue;
|
|
178
|
+
}
|
|
179
|
+
// Step start event
|
|
180
|
+
yield {
|
|
181
|
+
type: WORKFLOW_EVENTS.STEP_START,
|
|
182
|
+
status: STATUS.RUNNING,
|
|
183
|
+
stepTitle: step.block.title,
|
|
184
|
+
stepId: step.id,
|
|
185
|
+
options,
|
|
186
|
+
workflowRunId
|
|
187
|
+
};
|
|
188
|
+
// Execute step and yield the STEP_COMPLETE event and
|
|
189
|
+
// all events from inner workflows if any
|
|
190
|
+
yield* this.executeStep(step);
|
|
191
|
+
// Step Status Event
|
|
192
|
+
yield {
|
|
193
|
+
type: WORKFLOW_EVENTS.STEP_STATUS,
|
|
194
|
+
steps: steps.map((step)=>step.serialized),
|
|
195
|
+
options,
|
|
196
|
+
workflowRunId
|
|
197
|
+
};
|
|
198
|
+
this.currentStepIndex++;
|
|
199
|
+
}
|
|
200
|
+
yield {
|
|
201
|
+
type: WORKFLOW_EVENTS.COMPLETE,
|
|
202
|
+
status: STATUS.COMPLETE,
|
|
203
|
+
workflowTitle,
|
|
204
|
+
workflowDescription,
|
|
205
|
+
workflowRunId,
|
|
206
|
+
options
|
|
207
|
+
};
|
|
208
|
+
} catch (err) {
|
|
209
|
+
const error = err;
|
|
210
|
+
const currentStep = steps[this.currentStepIndex];
|
|
211
|
+
currentStep?.withStatus(STATUS.ERROR);
|
|
212
|
+
yield {
|
|
213
|
+
type: WORKFLOW_EVENTS.ERROR,
|
|
214
|
+
status: STATUS.ERROR,
|
|
215
|
+
workflowTitle,
|
|
216
|
+
workflowDescription,
|
|
217
|
+
workflowRunId,
|
|
218
|
+
error: {
|
|
219
|
+
name: error.name,
|
|
220
|
+
message: error.message,
|
|
221
|
+
stack: error.stack
|
|
222
|
+
},
|
|
223
|
+
options
|
|
224
|
+
};
|
|
225
|
+
// Step Status Event
|
|
226
|
+
yield {
|
|
227
|
+
type: WORKFLOW_EVENTS.STEP_STATUS,
|
|
228
|
+
steps: steps.map((step)=>step.serialized),
|
|
229
|
+
options,
|
|
230
|
+
workflowRunId
|
|
231
|
+
};
|
|
232
|
+
throw error;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
async *executeStep(step) {
|
|
236
|
+
const block = step.block;
|
|
237
|
+
if (block.type === 'workflow') {
|
|
238
|
+
const initialState = typeof block.initialState === 'function' ? block.initialState(this.currentState) : block.initialState;
|
|
239
|
+
// Run inner workflow and yield all its events
|
|
240
|
+
let patches = [];
|
|
241
|
+
const innerRun = block.innerWorkflow.run({
|
|
242
|
+
fileStore: this.fileStore,
|
|
243
|
+
client: this.client,
|
|
244
|
+
initialState,
|
|
245
|
+
options: this.options ?? {}
|
|
246
|
+
});
|
|
247
|
+
for await (const event of innerRun){
|
|
248
|
+
yield event; // Forward all inner workflow events
|
|
249
|
+
if (event.type === WORKFLOW_EVENTS.STEP_COMPLETE) {
|
|
250
|
+
patches.push(event.patch);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
// Apply collected patches to get final inner state
|
|
254
|
+
const innerState = applyPatches(initialState, patches);
|
|
255
|
+
// Get previous state before action
|
|
256
|
+
const prevState = this.currentState;
|
|
257
|
+
// Update state with inner workflow results
|
|
258
|
+
this.currentState = await block.action(this.currentState, innerState);
|
|
259
|
+
yield* this.completeStep(step, prevState);
|
|
260
|
+
} else {
|
|
261
|
+
// Get previous state before action
|
|
262
|
+
const prevState = this.currentState;
|
|
263
|
+
// Execute regular step
|
|
264
|
+
this.currentState = await block.action({
|
|
265
|
+
state: this.currentState,
|
|
266
|
+
options: this.options ?? {},
|
|
267
|
+
client: this.client,
|
|
268
|
+
fileStore: this.fileStore
|
|
269
|
+
});
|
|
270
|
+
yield* this.completeStep(step, prevState);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
*completeStep(step, prevState) {
|
|
274
|
+
step.withStatus(STATUS.COMPLETE);
|
|
275
|
+
// Create patch for the state change
|
|
276
|
+
const patch = createPatch(prevState, this.currentState);
|
|
277
|
+
step.withPatch(patch);
|
|
278
|
+
yield {
|
|
279
|
+
type: WORKFLOW_EVENTS.STEP_COMPLETE,
|
|
280
|
+
status: STATUS.RUNNING,
|
|
281
|
+
stepTitle: step.block.title,
|
|
282
|
+
stepId: step.id,
|
|
283
|
+
patch,
|
|
284
|
+
options: this.options ?? {},
|
|
285
|
+
workflowRunId: this.workflowRunId
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
let workflowNamesAreUnique = true;
|
|
290
|
+
export function disableWorkflowNameUniqueness() {
|
|
291
|
+
workflowNamesAreUnique = false;
|
|
292
|
+
}
|
|
293
|
+
export function enableWorkflowNameUniqueness() {
|
|
294
|
+
workflowNamesAreUnique = true;
|
|
295
|
+
}
|
|
296
|
+
const workflowNames = new Set();
|
|
297
|
+
export function workflow(workflowConfig) {
|
|
298
|
+
const title = typeof workflowConfig === 'string' ? workflowConfig : workflowConfig.title;
|
|
299
|
+
const description = typeof workflowConfig === 'string' ? undefined : workflowConfig.description;
|
|
300
|
+
if (workflowNamesAreUnique && workflowNames.has(title)) {
|
|
301
|
+
throw new Error(`Workflow with name "${title}" already exists. Workflow names must be unique.`);
|
|
302
|
+
}
|
|
303
|
+
if (workflowNamesAreUnique) {
|
|
304
|
+
workflowNames.add(title);
|
|
305
|
+
}
|
|
306
|
+
return new Workflow(title, description);
|
|
307
|
+
}
|
|
308
|
+
const clone = (value)=>structuredClone(value);
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { join } from 'path';
|
|
2
|
+
import * as fs from 'fs/promises';
|
|
3
|
+
export class LocalFileStore {
|
|
4
|
+
baseDir;
|
|
5
|
+
constructor(baseDir){
|
|
6
|
+
this.baseDir = baseDir;
|
|
7
|
+
}
|
|
8
|
+
async readFile(path) {
|
|
9
|
+
const filePath = join(this.baseDir, path);
|
|
10
|
+
return fs.readFile(filePath, 'utf-8');
|
|
11
|
+
}
|
|
12
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { };
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export { Workflow } from "./dsl/workflow.js";
|
|
2
|
+
export { workflow } from "./dsl/workflow.js";
|
|
3
|
+
export { WorkflowRunner } from "./dsl/workflow-runner.js";
|
|
4
|
+
export { createExtension } from "./dsl/extensions.js";
|
|
5
|
+
export { STATUS } from "./dsl/constants.js";
|
|
6
|
+
export { WORKFLOW_EVENTS } from "./dsl/constants.js";
|
|
7
|
+
export { Adapter } from "./adapters/types.js";
|
|
8
|
+
export { createPatch } from "./dsl/json-patch.js";
|
|
9
|
+
export { applyPatches } from "./dsl/json-patch.js";
|
|
10
|
+
export { LocalFileStore } from "./file-stores/local-file-store.js";
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import * as path from 'path';
|
|
2
|
+
import * as os from 'os';
|
|
3
|
+
import * as fs from 'fs/promises';
|
|
4
|
+
import { exec } from 'child_process';
|
|
5
|
+
import { promisify } from 'util';
|
|
6
|
+
const execAsync = promisify(exec);
|
|
7
|
+
/**
|
|
8
|
+
* Writes content to a temporary file and opens it in cursor
|
|
9
|
+
*/ export async function writeAndOpenTemp(content, prefix, extension = 'txt') {
|
|
10
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'positronic-'));
|
|
11
|
+
const filename = `${prefix}-${Date.now()}.${extension}`;
|
|
12
|
+
const filepath = path.join(tempDir, filename);
|
|
13
|
+
await fs.writeFile(filepath, content, 'utf8');
|
|
14
|
+
// Open in cursor using the cursor command
|
|
15
|
+
await execAsync(`cursor ${filepath}`);
|
|
16
|
+
return filepath;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Writes both prompt and response to temp files and opens them in cursor
|
|
20
|
+
*/ export async function writePromptAndResponse(prompt, response, prefix = 'debug') {
|
|
21
|
+
const promptPath = await writeAndOpenTemp(prompt, `${prefix}-prompt`, 'md');
|
|
22
|
+
const responsePath = await writeAndOpenTemp(typeof response === 'string' ? response : JSON.stringify(response, null, 2), `${prefix}-response`, 'tsx');
|
|
23
|
+
return {
|
|
24
|
+
promptPath,
|
|
25
|
+
responsePath
|
|
26
|
+
};
|
|
27
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { WorkflowEvent } from '../dsl/workflow';
|
|
2
|
+
export declare abstract class Adapter<Options extends object = any> {
|
|
3
|
+
started?(event: WorkflowEvent<Options>): Promise<void>;
|
|
4
|
+
updated?(event: WorkflowEvent<Options>): Promise<void>;
|
|
5
|
+
completed?(event: WorkflowEvent<Options>): Promise<void>;
|
|
6
|
+
error?(event: WorkflowEvent<Options>): Promise<void>;
|
|
7
|
+
restarted?(event: WorkflowEvent<Options>): Promise<void>;
|
|
8
|
+
dispatch(event: WorkflowEvent<Options>): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/adapters/types.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAErD,8BAAsB,OAAO,CAAC,OAAO,SAAS,MAAM,GAAG,GAAG;IAClD,OAAO,CAAC,CAAC,KAAK,EAAE,aAAa,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IACtD,OAAO,CAAC,CAAC,KAAK,EAAE,aAAa,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IACtD,SAAS,CAAC,CAAC,KAAK,EAAE,aAAa,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IACxD,KAAK,CAAC,CAAC,KAAK,EAAE,aAAa,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IACpD,SAAS,CAAC,CAAC,KAAK,EAAE,aAAa,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAExD,QAAQ,CAAC,KAAK,EAAE,aAAa,CAAC,OAAO,CAAC;CAa7C"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
export type ResponseModel<T extends z.AnyZodObject> = {
|
|
3
|
+
schema: T;
|
|
4
|
+
name: string;
|
|
5
|
+
description?: string;
|
|
6
|
+
};
|
|
7
|
+
export interface PromptClient {
|
|
8
|
+
execute<T extends z.AnyZodObject>(prompt: string, responseModel: ResponseModel<T>): Promise<z.infer<T>>;
|
|
9
|
+
}
|
|
10
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/clients/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,MAAM,aAAa,CAAC,CAAC,SAAS,CAAC,CAAC,YAAY,IAAI;IAClD,MAAM,EAAE,CAAC,CAAC;IACV,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;CACxB,CAAA;AAED,MAAM,WAAW,YAAY;IAC3B,OAAO,CAAC,CAAC,SAAS,CAAC,CAAC,YAAY,EAC9B,MAAM,EAAE,MAAM,EACd,aAAa,EAAE,aAAa,CAAC,CAAC,CAAC,GAC9B,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;CACxB"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export declare const WORKFLOW_EVENTS: {
|
|
2
|
+
readonly START: "workflow:start";
|
|
3
|
+
readonly RESTART: "workflow:restart";
|
|
4
|
+
readonly STEP_START: "step:start";
|
|
5
|
+
readonly STEP_COMPLETE: "step:complete";
|
|
6
|
+
readonly STEP_STATUS: "step:status";
|
|
7
|
+
readonly ERROR: "workflow:error";
|
|
8
|
+
readonly COMPLETE: "workflow:complete";
|
|
9
|
+
};
|
|
10
|
+
export declare const STATUS: {
|
|
11
|
+
readonly PENDING: "pending";
|
|
12
|
+
readonly RUNNING: "running";
|
|
13
|
+
readonly COMPLETE: "complete";
|
|
14
|
+
readonly ERROR: "error";
|
|
15
|
+
};
|
|
16
|
+
//# sourceMappingURL=constants.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../src/dsl/constants.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,eAAe;;;;;;;;CAQlB,CAAC;AAEX,eAAO,MAAM,MAAM;;;;;CAKT,CAAC"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Workflow } from "./workflow";
|
|
2
|
+
import type { State } from "./types";
|
|
3
|
+
type Expand<T> = T extends infer O ? {
|
|
4
|
+
[K in keyof O]: O[K];
|
|
5
|
+
} : never;
|
|
6
|
+
type ExtensionMethods<TExtension extends Record<string, any>, TOptions extends object, TState extends State> = TExtension extends ((...args: any[]) => any) ? TExtension extends ((this: any, title: string, config: infer TConfig) => Workflow<any, infer TReturnState>) ? (title: string, config: TConfig extends ((ctx: any) => any) ? {
|
|
7
|
+
[P in keyof TConfig]: TConfig[P] extends Function ? ((ctx: TState) => any) : TConfig[P];
|
|
8
|
+
} : TConfig) => Workflow<TOptions, Expand<TState & TReturnState>> : never : {
|
|
9
|
+
[K in keyof TExtension]: TExtension[K] extends (this: any, title: string, config: infer TConfig) => Workflow<any, infer TReturnState> ? (title: string, config: TConfig extends ((ctx: any) => any) ? {
|
|
10
|
+
[P in keyof TConfig]: TConfig[P] extends Function ? ((ctx: TState) => any) : TConfig[P];
|
|
11
|
+
} : TConfig) => Workflow<TOptions, Expand<TState & TReturnState>> : never;
|
|
12
|
+
};
|
|
13
|
+
export declare function createExtension<TExtensionKey extends string, TExtension extends Record<string, any>>(key: TExtensionKey, extension: TExtension): {
|
|
14
|
+
install(): void;
|
|
15
|
+
augment<TOptions extends object, TState extends State>(): ExtensionMethods<TExtension, TOptions, TState>;
|
|
16
|
+
};
|
|
17
|
+
export {};
|
|
18
|
+
//# sourceMappingURL=extensions.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"extensions.d.ts","sourceRoot":"","sources":["../../../src/dsl/extensions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAErC,KAAK,MAAM,CAAC,CAAC,IAAI,CAAC,SAAS,MAAM,CAAC,GAAG;KAAG,CAAC,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;CAAE,GAAG,KAAK,CAAC;AAEtE,KAAK,gBAAgB,CACnB,UAAU,SAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EACtC,QAAQ,SAAS,MAAM,EACvB,MAAM,SAAS,KAAK,IAClB,UAAU,SAAS,CAAC,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,GAAG,CAAC,GAC5C,UAAU,SAAS,CAAC,CAClB,IAAI,EAAE,GAAG,EACT,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,OAAO,KAClB,QAAQ,CAAC,GAAG,EAAE,MAAM,YAAY,CAAC,CAAC,GACrC,CACE,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,OAAO,SAAS,CAAC,CAAC,GAAG,EAAE,GAAG,KAAK,GAAG,CAAC,GACvC;KAAG,CAAC,IAAI,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,CAAC,SAAS,QAAQ,GAAG,CAAC,CAAC,GAAG,EAAE,MAAM,KAAK,GAAG,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;CAAE,GAC3F,OAAO,KACR,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,GAAG,YAAY,CAAC,CAAC,GACtD,KAAK,GACP;KACG,CAAC,IAAI,MAAM,UAAU,GAAG,UAAU,CAAC,CAAC,CAAC,SAAS,CAC7C,IAAI,EAAE,GAAG,EACT,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,OAAO,KAClB,QAAQ,CAAC,GAAG,EAAE,MAAM,YAAY,CAAC,GAClC,CACE,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,OAAO,SAAS,CAAC,CAAC,GAAG,EAAE,GAAG,KAAK,GAAG,CAAC,GACvC;SAAG,CAAC,IAAI,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,CAAC,SAAS,QAAQ,GAAG,CAAC,CAAC,GAAG,EAAE,MAAM,KAAK,GAAG,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;KAAE,GAC3F,OAAO,KACR,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,GAAG,YAAY,CAAC,CAAC,GACtD,KAAK;CACV,CAAC;AAEN,wBAAgB,eAAe,CAC7B,aAAa,SAAS,MAAM,EAC5B,UAAU,SAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EACtC,GAAG,EAAE,aAAa,EAAE,SAAS,EAAE,UAAU;;YAa/B,QAAQ,SAAS,MAAM,EAAE,MAAM,SAAS,KAAK,KAAK,gBAAgB,CAAC,UAAU,EAAE,QAAQ,EAAE,MAAM,CAAC;EAI3G"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { JsonPatch, State } from './types';
|
|
2
|
+
/**
|
|
3
|
+
* Creates a JSON Patch that describes the changes needed to transform prevState into nextState.
|
|
4
|
+
*/
|
|
5
|
+
export declare function createPatch(prevState: State, nextState: State): JsonPatch;
|
|
6
|
+
/**
|
|
7
|
+
* Applies one or more JSON Patches to a state object and returns the resulting state.
|
|
8
|
+
* If multiple patches are provided, they are applied in sequence.
|
|
9
|
+
*/
|
|
10
|
+
export declare function applyPatches(state: State, patches: JsonPatch | JsonPatch[]): State;
|
|
11
|
+
//# sourceMappingURL=json-patch.d.ts.map
|