@usehelical/workflows 0.0.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,123 @@
1
+ # Helical Workflows
2
+
3
+ Simple, typesafe durable workflows without bundler magic
4
+
5
+ > [!WARNING]
6
+ > This is a work in progress
7
+
8
+ # Goals
9
+
10
+ - simple & typesafe api
11
+ - minimal deployment complexity (just postgres)
12
+ - low latency (making it suitable for user-facing ai workflows)
13
+
14
+ # Getting started
15
+
16
+ ## Writing workflows
17
+
18
+ ```ts
19
+ import * as wf from 'helical-workflows/workflow';
20
+
21
+ async function indexDocument(url: string) {
22
+ // idempotent retryable durable steps
23
+ const document = await wf.runStep(async () => fetchDocument(url), { maxRetries: 5 });
24
+ const parsedDocument = await wf.runStep(async () => parseDocument(document));
25
+ await wf.runStep(async () => upsertIntoDb(parsedDocument));
26
+ }
27
+
28
+ const indexDocumentWorkflow = defineWorkflow(indexDocument);
29
+
30
+ const indexingQueue = defineQueue();
31
+
32
+ async function processDocumentBatch(urls: string[]) {
33
+ const handles = [];
34
+
35
+ // enqueue all documents for indexing
36
+ for (const url of urls) {
37
+ const runHandle = await wf.enqueueWorkflow(indexingQueue, indexDocument, [url]);
38
+ handles.push(runHandle);
39
+ }
40
+
41
+ let successfulCount = 0;
42
+ let failedCount = 0;
43
+
44
+ // wait for all documents to be indexed
45
+ for (const handle of handles) {
46
+ try {
47
+ await handle.result();
48
+ successfulCount++;
49
+ } catch (error) {
50
+ failedCount++;
51
+ }
52
+ }
53
+
54
+ return { successfulCount, failedCount };
55
+ }
56
+
57
+ const processDocumentBatchWorkflow = defineWorkflow(processDocumentBatch);
58
+ ```
59
+
60
+ ## Running a workflow
61
+
62
+ ```ts
63
+ // create instance and register workflows and queues
64
+ const { runWorkflow } = createInstance({
65
+ workflows: { indexDocumentWorkflow, processDocumentBatchWorkflow },
66
+ queues: { indexingQueue },
67
+ options: { connectionString: 'dummy' },
68
+ });
69
+
70
+ const urls = ['a', 'b', 'c'];
71
+
72
+ const run = await runWorkflow(processDocumentBatchWorkflow, urls);
73
+
74
+ console.log(await run.result());
75
+ ```
76
+
77
+ # Development roadmap
78
+
79
+ ## Messages
80
+
81
+ Messages are a way of communicating with a workflow from the outside. This can be useful for human-in-the-loop workflows or for other signals. Messages are persisted in the database and atomically consumed adhering to once-and-only-once semantics.
82
+
83
+ ```ts
84
+ const approvalMessage = defineMessage<ApprovalMessage>('approval');
85
+
86
+ async function myWorkflowFunction() {
87
+ await wf.runStep(() => stageForApproval());
88
+ await wf.receiveMessage(approvalMessage);
89
+ await wf.runStep(() => publish());
90
+ }
91
+ ```
92
+
93
+ ## State
94
+
95
+ Workflow state is a way for workflows to expose state to the outside. The benefit of this state approach in comparison to other request handler approaches like in Temporal is that the workflow doesn’t have to be executing in order for the state to be retrievable. This makes it possible to retrieve state even if a workflow is already cancelled or finished.
96
+
97
+ ```ts
98
+ async function myWorkflowFunction() {
99
+ await wf.setState({ progress: 50 });
100
+ }
101
+ ```
102
+
103
+ ## Schedules
104
+
105
+ Allow a workflow to be scheduled…
106
+
107
+ ## Streams
108
+
109
+ Streams allow for publishing streaming data from workflows to the outside the primary use case for this is realtime streaming of LLM responses. I want to take a different approach to other workflow systems here and treat streams as side effects inside of workflows and not as durable outputs. Since LLM streaming responses can be quite quick persisting the token stream into the database is costly. My hypothesis is that stream persistence is not needed since resuming LLM streaming responses require [special handling](<[https://platform.claude.com/docs/en/build-with-claude/streaming#error-recovery](https://platform.claude.com/docs/en/build-with-claude/streaming#error-recovery)>) and in most cases it makes more sense to just regenerate the response. Usually non-idempotent side effects in workflows are prohibited to achieve durability guarantees. But in this case the correctness of the workflow doesn’t depend on the stream the stream is just a user experience optimisation.
110
+
111
+ ## Workflow suspension
112
+
113
+ When implementing very long running workflows (days, months…) the workflow would still be the whole time in memory idling waiting for instance for a message or for a sleep() event. Workflow suspension would allow for workflows to be suspended when waiting and then replayed and continued when the event arrives. This happens at the cost of latency (since the workflow has to be replayed to reach its original state again after has been suspended) which is why I want to make this an optional feature.
114
+
115
+ ## Workflow version management
116
+
117
+ A tricky topic. In production systems it is possible that workflows with old versions are still Pending or are Queued. I don’t know yet what is the best way to handle this but I’m leaning towards workflow versions that can be manually specified using a versionId or could be auto generated by creating a hash from the workflow code.
118
+
119
+ ## Chaos tests
120
+
121
+ ## OTEL Integration
122
+
123
+ ## CLI & Observability UI
@@ -0,0 +1,42 @@
1
+ import { W as WorkflowStatus, a as WorkflowEntry, Q as QueueEntry, M as MessageDefinition, S as StateDefinition } from './state-CmpqDrnz.mjs';
2
+
3
+ interface Run<TReturn = unknown> {
4
+ id: string;
5
+ status: () => Promise<WorkflowStatus>;
6
+ result: () => Promise<TReturn>;
7
+ }
8
+
9
+ type RunWorkflowOptions = {
10
+ timeout?: number;
11
+ deadline?: number;
12
+ };
13
+
14
+ type QueueWorkflowOptions = {
15
+ timeout?: number;
16
+ deadline?: number;
17
+ priority?: number;
18
+ partitionKey?: string;
19
+ deduplicationId?: string;
20
+ };
21
+
22
+ type CreateInstanceOptions = {
23
+ instanceId?: string;
24
+ connectionString: string;
25
+ };
26
+ type CreateInstanceParams = {
27
+ workflows: Record<string, WorkflowEntry>;
28
+ queues?: Record<string, QueueEntry>;
29
+ options: CreateInstanceOptions;
30
+ };
31
+ declare function createInstance(props: CreateInstanceParams): {
32
+ runWorkflow: <TArgs extends unknown[], TReturn>(wf: WorkflowEntry<TArgs, TReturn> | string, args?: TArgs, options?: RunWorkflowOptions) => Promise<Run<TReturn>>;
33
+ cancelRun: (runId: string) => Promise<void>;
34
+ resumeRun: (runId: string) => Promise<Run<void>>;
35
+ getRun: (runId: string) => Promise<Run<unknown>>;
36
+ queueWorkflow: <TArgs extends unknown[], TReturn>(queue: QueueEntry | string, wf: WorkflowEntry<TArgs, TReturn> | string, args?: TArgs, options?: QueueWorkflowOptions) => Promise<Run<TReturn>>;
37
+ sendMessage: <T>(target: Run | string, name: MessageDefinition<T>, data: T) => Promise<void>;
38
+ getState: <T>(target: Run | string, key: StateDefinition<T> | string) => Promise<T>;
39
+ };
40
+ type Instance = ReturnType<typeof createInstance>;
41
+
42
+ export { type CreateInstanceParams, type Instance, createInstance };
package/dist/index.mjs ADDED
@@ -0,0 +1,3 @@
1
+ export * from './runtime';
2
+ //# sourceMappingURL=index.mjs.map
3
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"names":[],"mappings":"","file":"index.mjs","sourcesContent":[]}
@@ -0,0 +1,49 @@
1
+ type QueueRateLimit = {
2
+ limitPerPeriod: number;
3
+ period: number;
4
+ };
5
+ type QueueOptions = {
6
+ workerConcurrency?: number;
7
+ concurrency?: number;
8
+ rateLimit?: QueueRateLimit;
9
+ priorityEnabled?: boolean;
10
+ partitioningEnabled?: boolean;
11
+ name?: string;
12
+ };
13
+ declare function defineQueue(options?: QueueOptions): QueueEntry;
14
+ type QueueEntry = () => QueueOptions;
15
+ type QueueDefinition = QueueOptions & {
16
+ name: string;
17
+ };
18
+
19
+ declare enum WorkflowStatus {
20
+ PENDING = "PENDING",
21
+ QUEUED = "QUEUED",
22
+ SUCCESS = "SUCCESS",
23
+ ERROR = "ERROR",
24
+ CANCELLED = "CANCELLED",
25
+ MAX_RECOVERY_ATTEMPTS_EXCEEDED = "MAX_RECOVERY_ATTEMPTS_EXCEEDED"
26
+ }
27
+ type WorkflowFunction<Args extends unknown[], R> = (...args: Args) => Promise<R> | R;
28
+ type WorkflowDefinition<TArgs extends unknown[] = unknown[], TReturn = unknown> = {
29
+ fn: WorkflowFunction<TArgs, TReturn>;
30
+ maxRecoveryAttempts?: number;
31
+ };
32
+ declare function defineWorkflow<TArgs extends unknown[], TReturn>(fn: WorkflowFunction<TArgs, TReturn>, options?: {
33
+ maxRecoveryAttempts?: number;
34
+ }): () => WorkflowDefinition<TArgs, TReturn>;
35
+ type WorkflowEntry<TArgs extends unknown[] = unknown[], TReturn = unknown> = () => WorkflowDefinition<TArgs, TReturn>;
36
+
37
+ type MessageDefinition<T> = {
38
+ name: string;
39
+ data?: T;
40
+ };
41
+ declare function defineMessage<T>(name: string): MessageDefinition<T>;
42
+
43
+ interface StateDefinition<T> {
44
+ name: string;
45
+ data?: T;
46
+ }
47
+ declare function defineState<T>(name: string): StateDefinition<T>;
48
+
49
+ export { type MessageDefinition as M, type QueueEntry as Q, type StateDefinition as S, WorkflowStatus as W, type WorkflowEntry as a, type QueueDefinition as b, type QueueOptions as c, type QueueRateLimit as d, type WorkflowDefinition as e, type WorkflowFunction as f, defineMessage as g, defineQueue as h, defineState as i, defineWorkflow as j };
@@ -0,0 +1,19 @@
1
+ export { M as MessageDefinition, b as QueueDefinition, Q as QueueEntry, c as QueueOptions, d as QueueRateLimit, S as StateDefinition, e as WorkflowDefinition, a as WorkflowEntry, f as WorkflowFunction, W as WorkflowStatus, g as defineMessage, h as defineQueue, i as defineState, j as defineWorkflow } from './state-CmpqDrnz.mjs';
2
+
3
+ type RetryConfig = {
4
+ maxRetries?: number;
5
+ retryDelay?: number;
6
+ backOffRate?: number;
7
+ };
8
+ type StepOptions = RetryConfig & {
9
+ name?: string;
10
+ };
11
+ type StepFunction<Args extends unknown[], R> = (...args: Args) => Promise<R> | R;
12
+ type StepDefinition<TArgs extends unknown[], TReturn> = {
13
+ fn: StepFunction<TArgs, TReturn>;
14
+ args: TArgs;
15
+ options: StepOptions;
16
+ };
17
+ declare function defineStep<TArgs extends unknown[], TReturn>(fn: StepFunction<TArgs, TReturn>, options?: StepOptions): (...args: TArgs) => StepDefinition<TArgs, TReturn>;
18
+
19
+ export { type RetryConfig, type StepDefinition, type StepFunction, defineStep };
@@ -0,0 +1,7 @@
1
+ export * from './message';
2
+ export * from './queue';
3
+ export * from './state';
4
+ export * from './step';
5
+ export * from './workflow';
6
+ //# sourceMappingURL=workflows.mjs.map
7
+ //# sourceMappingURL=workflows.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"names":[],"mappings":"","file":"workflows.mjs","sourcesContent":[]}
package/package.json ADDED
@@ -0,0 +1,62 @@
1
+ {
2
+ "name": "@usehelical/workflows",
3
+ "version": "0.0.1-alpha.1",
4
+ "description": "simple typesage durable workflows without magic",
5
+ "main": "./dist/index.js",
6
+ "types": "./dist/index.d.ts",
7
+ "exports": {
8
+ ".": {
9
+ "import": "./dist/index.js",
10
+ "types": "./dist/index.d.ts"
11
+ },
12
+ "./workflows": {
13
+ "types": "./dist/workflows.d.ts",
14
+ "import": "./dist/workflows.js"
15
+ }
16
+ },
17
+ "files": [
18
+ "dist",
19
+ "README.md"
20
+ ],
21
+ "scripts": {
22
+ "test": "vitest run",
23
+ "test:watch": "vitest",
24
+ "test:coverage": "vitest run --coverage",
25
+ "typecheck": "tsc --noEmit",
26
+ "lint": "eslint .",
27
+ "fmt": "prettier --write .",
28
+ "build": "tsup",
29
+ "db:generate:types": "kysely-codegen --out-file ./core/internal/db/types.ts",
30
+ "db:migrate": "docker-compose exec -T postgres psql -U postgres -d postgres < core/internal/db/migrations/init.up.sql",
31
+ "db:destroy": "docker-compose exec -T postgres psql -U postgres -d postgres -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'"
32
+ },
33
+ "engines": {
34
+ "node": ">=18"
35
+ },
36
+ "keywords": [],
37
+ "author": "",
38
+ "license": "AGPL 3.0",
39
+ "devDependencies": {
40
+ "@electric-sql/pglite": "^0.3.15",
41
+ "@eslint/js": "^9.39.2",
42
+ "@types/pg": "^8.16.0",
43
+ "@vitest/coverage-v8": "^4.0.18",
44
+ "eslint": "^9.39.2",
45
+ "globals": "^17.2.0",
46
+ "jiti": "^2.6.1",
47
+ "kysely-codegen": "^0.19.0",
48
+ "kysely-pglite": "^0.6.1",
49
+ "prettier": "^3.7.4",
50
+ "tsup": "^8.5.1",
51
+ "typescript": "^5.9.3",
52
+ "typescript-eslint": "^8.54.0",
53
+ "uuid_ossp": "link:@electric-sql/pglite/contrib/uuid_ossp",
54
+ "vitest": "^4.0.17"
55
+ },
56
+ "dependencies": {
57
+ "kysely": "^0.28.9",
58
+ "pg": "^8.16.3",
59
+ "pino": "^10.2.0",
60
+ "serialize-error": "^13.0.1"
61
+ }
62
+ }