@hotmeshio/hotmesh 0.5.5 → 0.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +7 -45
  2. package/build/index.d.ts +1 -3
  3. package/build/index.js +1 -5
  4. package/build/modules/enums.d.ts +0 -5
  5. package/build/modules/enums.js +1 -6
  6. package/build/modules/utils.d.ts +1 -1
  7. package/build/modules/utils.js +2 -29
  8. package/build/package.json +6 -17
  9. package/build/services/activities/hook.js +1 -5
  10. package/build/services/activities/trigger.js +1 -1
  11. package/build/services/compiler/index.d.ts +2 -2
  12. package/build/services/compiler/index.js +4 -4
  13. package/build/services/connector/factory.d.ts +1 -1
  14. package/build/services/connector/factory.js +3 -12
  15. package/build/services/connector/providers/postgres.js +11 -6
  16. package/build/services/exporter/index.d.ts +8 -8
  17. package/build/services/exporter/index.js +8 -8
  18. package/build/services/memflow/client.js +5 -10
  19. package/build/services/memflow/connection.d.ts +0 -2
  20. package/build/services/memflow/connection.js +0 -2
  21. package/build/services/memflow/exporter.d.ts +3 -3
  22. package/build/services/memflow/exporter.js +3 -3
  23. package/build/services/memflow/index.d.ts +154 -34
  24. package/build/services/memflow/index.js +165 -33
  25. package/build/services/memflow/interceptor.d.ts +241 -0
  26. package/build/services/memflow/interceptor.js +256 -0
  27. package/build/services/memflow/schemas/factory.js +1 -1
  28. package/build/services/memflow/search.d.ts +11 -4
  29. package/build/services/memflow/search.js +98 -71
  30. package/build/services/memflow/worker.d.ts +1 -1
  31. package/build/services/memflow/worker.js +11 -2
  32. package/build/services/memflow/workflow/execChild.js +3 -1
  33. package/build/services/memflow/workflow/execHook.js +1 -1
  34. package/build/services/memflow/workflow/hook.js +4 -2
  35. package/build/services/memflow/workflow/proxyActivities.js +2 -1
  36. package/build/services/meshcall/index.d.ts +1 -1
  37. package/build/services/meshcall/index.js +1 -1
  38. package/build/services/reporter/index.d.ts +1 -1
  39. package/build/services/reporter/index.js +12 -12
  40. package/build/services/router/consumption/index.js +23 -9
  41. package/build/services/router/error-handling/index.js +3 -3
  42. package/build/services/search/factory.js +0 -8
  43. package/build/services/search/providers/postgres/postgres.js +48 -20
  44. package/build/services/store/cache.d.ts +1 -1
  45. package/build/services/store/cache.js +1 -1
  46. package/build/services/store/factory.js +1 -9
  47. package/build/services/store/index.d.ts +1 -1
  48. package/build/services/store/providers/postgres/kvtypes/hash/basic.js +1 -1
  49. package/build/services/store/providers/postgres/kvtypes/hash/index.js +59 -2
  50. package/build/services/store/providers/postgres/kvtypes/hash/jsonb.js +11 -11
  51. package/build/services/store/providers/postgres/kvtypes/hash/udata.d.ts +10 -0
  52. package/build/services/store/providers/postgres/kvtypes/hash/udata.js +384 -0
  53. package/build/services/store/providers/postgres/postgres.js +10 -14
  54. package/build/services/stream/factory.js +0 -16
  55. package/build/services/stream/providers/postgres/postgres.js +23 -20
  56. package/build/services/sub/factory.js +0 -8
  57. package/build/services/sub/providers/nats/nats.js +0 -1
  58. package/build/services/sub/providers/postgres/postgres.js +11 -3
  59. package/build/services/task/index.js +4 -5
  60. package/build/types/activity.d.ts +1 -5
  61. package/build/types/hotmesh.d.ts +0 -5
  62. package/build/types/index.d.ts +0 -1
  63. package/build/types/index.js +1 -4
  64. package/build/types/job.d.ts +1 -1
  65. package/build/types/memflow.d.ts +83 -4
  66. package/build/types/meshcall.d.ts +0 -25
  67. package/build/types/provider.d.ts +1 -1
  68. package/build/types/stream.d.ts +1 -6
  69. package/index.ts +0 -4
  70. package/package.json +6 -17
  71. package/build/services/connector/providers/ioredis.d.ts +0 -9
  72. package/build/services/connector/providers/ioredis.js +0 -26
  73. package/build/services/connector/providers/redis.d.ts +0 -9
  74. package/build/services/connector/providers/redis.js +0 -38
  75. package/build/services/search/providers/redis/ioredis.d.ts +0 -23
  76. package/build/services/search/providers/redis/ioredis.js +0 -134
  77. package/build/services/search/providers/redis/redis.d.ts +0 -23
  78. package/build/services/search/providers/redis/redis.js +0 -147
  79. package/build/services/store/providers/redis/_base.d.ts +0 -137
  80. package/build/services/store/providers/redis/_base.js +0 -980
  81. package/build/services/store/providers/redis/ioredis.d.ts +0 -20
  82. package/build/services/store/providers/redis/ioredis.js +0 -180
  83. package/build/services/store/providers/redis/redis.d.ts +0 -18
  84. package/build/services/store/providers/redis/redis.js +0 -199
  85. package/build/services/stream/providers/redis/ioredis.d.ts +0 -61
  86. package/build/services/stream/providers/redis/ioredis.js +0 -272
  87. package/build/services/stream/providers/redis/redis.d.ts +0 -61
  88. package/build/services/stream/providers/redis/redis.js +0 -305
  89. package/build/services/sub/providers/redis/ioredis.d.ts +0 -17
  90. package/build/services/sub/providers/redis/ioredis.js +0 -81
  91. package/build/services/sub/providers/redis/redis.d.ts +0 -17
  92. package/build/services/sub/providers/redis/redis.js +0 -72
  93. package/build/types/redis.d.ts +0 -258
  94. package/build/types/redis.js +0 -11
@@ -23,8 +23,8 @@ class ExporterService {
23
23
  return jobExport;
24
24
  }
25
25
  /**
26
- * Inflates the job data from Redis into a MemFlowJobExport object
27
- * @param jobHash - the job data from Redis
26
+ * Inflates the job data into a MemFlowJobExport object
27
+ * @param jobHash - the job data
28
28
  * @param dependencyList - the list of dependencies for the job
29
29
  * @returns - the inflated job data
30
30
  */
@@ -82,7 +82,7 @@ class ExporterService {
82
82
  return resolved;
83
83
  }
84
84
  /**
85
- * Inflates the key from Redis, 3-character symbol
85
+ * Inflates the key
86
86
  * into a human-readable JSON path, reflecting the
87
87
  * tree-like structure of the unidimensional Hash
88
88
  * @private
@@ -1,4 +1,4 @@
1
- import { ContextType } from '../../types/memflow';
1
+ import { ContextType, WorkflowInterceptor } from '../../types/memflow';
2
2
  import { ClientService } from './client';
3
3
  import { ConnectionService } from './connection';
4
4
  import { Search } from './search';
@@ -8,60 +8,170 @@ import { WorkflowService } from './workflow';
8
8
  import { WorkflowHandleService } from './handle';
9
9
  import { didInterrupt } from './workflow/interruption';
10
10
  /**
11
- * The MemFlow service is a collection of services that
12
- * emulate Temporal's capabilities, but instead are
13
- * backed by Postgres or Redis/ValKey. The following lifecycle example
14
- * demonstrates how to start a new workflow, subscribe
15
- * to the result, and shutdown the system.
11
+ * The MemFlow service provides a Temporal-compatible workflow framework backed by
12
+ * Postgres. It offers durable execution, entity-based memory management,
13
+ * and composable workflows.
16
14
  *
17
- * @example
15
+ * ## Core Features
16
+ *
17
+ * ### 1. Entity-Based Memory Model
18
+ * Each workflow has a durable JSONB entity that serves as its memory:
19
+ * ```typescript
20
+ * export async function researchAgent(query: string) {
21
+ * const agent = await MemFlow.workflow.entity();
22
+ *
23
+ * // Initialize entity state
24
+ * await agent.set({
25
+ * query,
26
+ * findings: [],
27
+ * status: 'researching'
28
+ * });
29
+ *
30
+ * // Update state atomically
31
+ * await agent.merge({ status: 'analyzing' });
32
+ * await agent.append('findings', newFinding);
33
+ * }
34
+ * ```
35
+ *
36
+ * ### 2. Hook Functions & Workflow Coordination
37
+ * Spawn and coordinate multiple perspectives/phases:
18
38
  * ```typescript
19
- * import { Client, Worker, MemFlow, HotMesh } from '@hotmeshio/hotmesh';
20
- * import { Client as Postgres} from 'pg';
21
- * import * as workflows from './workflows';
39
+ * // Launch parallel research perspectives
40
+ * await MemFlow.workflow.execHook({
41
+ * taskQueue: 'research',
42
+ * workflowName: 'optimisticView',
43
+ * args: [query],
44
+ * signalId: 'optimistic-complete'
45
+ * });
46
+ *
47
+ * await MemFlow.workflow.execHook({
48
+ * taskQueue: 'research',
49
+ * workflowName: 'skepticalView',
50
+ * args: [query],
51
+ * signalId: 'skeptical-complete'
52
+ * });
22
53
  *
23
- * //1) Initialize the worker
54
+ * // Wait for both perspectives
55
+ * await Promise.all([
56
+ * MemFlow.workflow.waitFor('optimistic-complete'),
57
+ * MemFlow.workflow.waitFor('skeptical-complete')
58
+ * ]);
59
+ * ```
60
+ *
61
+ * ### 3. Durable Activities & Proxies
62
+ * Define and execute durable activities with automatic retry:
63
+ * ```typescript
64
+ * const activities = MemFlow.workflow.proxyActivities<{
65
+ * analyzeDocument: typeof analyzeDocument;
66
+ * validateFindings: typeof validateFindings;
67
+ * }>({
68
+ * activities: { analyzeDocument, validateFindings },
69
+ * retryPolicy: {
70
+ * maximumAttempts: 3,
71
+ * backoffCoefficient: 2
72
+ * }
73
+ * });
74
+ *
75
+ * // Activities are durable and automatically retried
76
+ * const analysis = await activities.analyzeDocument(data);
77
+ * const validation = await activities.validateFindings(analysis);
78
+ * ```
79
+ *
80
+ * ### 4. Workflow Composition
81
+ * Build complex workflows through composition:
82
+ * ```typescript
83
+ * // Start a child workflow
84
+ * const childResult = await MemFlow.workflow.execChild({
85
+ * taskQueue: 'analysis',
86
+ * workflowName: 'detailedAnalysis',
87
+ * args: [data],
88
+ * // Child workflow config
89
+ * config: {
90
+ * maximumAttempts: 5,
91
+ * backoffCoefficient: 2
92
+ * }
93
+ * });
94
+ *
95
+ * // Fire-and-forget child workflow
96
+ * await MemFlow.workflow.startChild({
97
+ * taskQueue: 'notifications',
98
+ * workflowName: 'sendUpdates',
99
+ * args: [updates]
100
+ * });
101
+ * ```
102
+ *
103
+ * ### 5. Workflow Interceptors
104
+ * Add cross-cutting concerns through interceptors that run as durable functions:
105
+ * ```typescript
106
+ * // Add audit interceptor that uses MemFlow functions
107
+ * MemFlow.registerInterceptor({
108
+ * async execute(ctx, next) {
109
+ * try {
110
+ * // Interceptors can use MemFlow functions and participate in replay
111
+ * const entity = await MemFlow.workflow.entity();
112
+ * await entity.append('auditLog', {
113
+ * action: 'started',
114
+ * timestamp: new Date().toISOString()
115
+ * });
116
+ *
117
+ * // Rate limiting with durable sleep
118
+ * await MemFlow.workflow.sleepFor('100 milliseconds');
119
+ *
120
+ * const result = await next();
121
+ *
122
+ * await entity.append('auditLog', {
123
+ * action: 'completed',
124
+ * timestamp: new Date().toISOString()
125
+ * });
126
+ *
127
+ * return result;
128
+ * } catch (err) {
129
+ * // CRITICAL: Always check for HotMesh interruptions
130
+ * if (MemFlow.didInterrupt(err)) {
131
+ * throw err; // Rethrow for replay system
132
+ * }
133
+ * throw err;
134
+ * }
135
+ * }
136
+ * });
137
+ * ```
138
+ *
139
+ * ## Basic Usage Example
140
+ *
141
+ * ```typescript
142
+ * import { Client, Worker, MemFlow } from '@hotmeshio/hotmesh';
143
+ * import { Client as Postgres } from 'pg';
144
+ *
145
+ * // Initialize worker
24
146
  * await Worker.create({
25
147
  * connection: {
26
148
  * class: Postgres,
27
- * options: {
28
- * connectionString: 'postgresql://usr:pwd@localhost:5432/db',
29
- * }
30
- * }
149
+ * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
150
+ * },
31
151
  * taskQueue: 'default',
32
- * namespace: 'memflow',
33
- * workflow: workflows.example,
34
- * options: {
35
- * backoffCoefficient: 2,
36
- * maximumAttempts: 1_000,
37
- * maximumInterval: '5 seconds'
38
- * }
152
+ * workflow: workflows.example
39
153
  * });
40
154
  *
41
- * //2) initialize the client
155
+ * // Initialize client
42
156
  * const client = new Client({
43
157
  * connection: {
44
158
  * class: Postgres,
45
- * options: {
46
- * connectionString: 'postgresql://usr:pwd@localhost:5432/db',
47
- * }
159
+ * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
48
160
  * }
49
161
  * });
50
162
  *
51
- * //3) start a new workflow
163
+ * // Start workflow
52
164
  * const handle = await client.workflow.start({
53
- * args: ['HotMesh', 'es'],
165
+ * args: ['input data'],
54
166
  * taskQueue: 'default',
55
167
  * workflowName: 'example',
56
- * workflowId: HotMesh.guid(),
57
- * namespace: 'memflow',
168
+ * workflowId: MemFlow.guid()
58
169
  * });
59
170
  *
60
- * //4) subscribe to the eventual result
61
- * console.log('\nRESPONSE', await handle.result(), '\n');
62
- * //logs '¡Hola, HotMesh!'
171
+ * // Get result
172
+ * const result = await handle.result();
63
173
  *
64
- * //5) Shutdown (typically on sigint)
174
+ * // Cleanup
65
175
  * await MemFlow.shutdown();
66
176
  * ```
67
177
  */
@@ -114,6 +224,16 @@ declare class MemFlowClass {
114
224
  * @see {@link utils/interruption.didInterrupt} for detailed documentation
115
225
  */
116
226
  static didInterrupt: typeof didInterrupt;
227
+ private static interceptorService;
228
+ /**
229
+ * Register a workflow interceptor
230
+ * @param interceptor The interceptor to register
231
+ */
232
+ static registerInterceptor(interceptor: WorkflowInterceptor): void;
233
+ /**
234
+ * Clear all registered workflow interceptors
235
+ */
236
+ static clearInterceptors(): void;
117
237
  /**
118
238
  * Shutdown everything. All connections, workers, and clients will be closed.
119
239
  * Include in your signal handlers to ensure a clean shutdown.
@@ -10,61 +10,172 @@ const worker_1 = require("./worker");
10
10
  const workflow_1 = require("./workflow");
11
11
  const handle_1 = require("./handle");
12
12
  const interruption_1 = require("./workflow/interruption");
13
+ const interceptor_1 = require("./interceptor");
13
14
  /**
14
- * The MemFlow service is a collection of services that
15
- * emulate Temporal's capabilities, but instead are
16
- * backed by Postgres or Redis/ValKey. The following lifecycle example
17
- * demonstrates how to start a new workflow, subscribe
18
- * to the result, and shutdown the system.
15
+ * The MemFlow service provides a Temporal-compatible workflow framework backed by
16
+ * Postgres. It offers durable execution, entity-based memory management,
17
+ * and composable workflows.
19
18
  *
20
- * @example
19
+ * ## Core Features
20
+ *
21
+ * ### 1. Entity-Based Memory Model
22
+ * Each workflow has a durable JSONB entity that serves as its memory:
23
+ * ```typescript
24
+ * export async function researchAgent(query: string) {
25
+ * const agent = await MemFlow.workflow.entity();
26
+ *
27
+ * // Initialize entity state
28
+ * await agent.set({
29
+ * query,
30
+ * findings: [],
31
+ * status: 'researching'
32
+ * });
33
+ *
34
+ * // Update state atomically
35
+ * await agent.merge({ status: 'analyzing' });
36
+ * await agent.append('findings', newFinding);
37
+ * }
38
+ * ```
39
+ *
40
+ * ### 2. Hook Functions & Workflow Coordination
41
+ * Spawn and coordinate multiple perspectives/phases:
42
+ * ```typescript
43
+ * // Launch parallel research perspectives
44
+ * await MemFlow.workflow.execHook({
45
+ * taskQueue: 'research',
46
+ * workflowName: 'optimisticView',
47
+ * args: [query],
48
+ * signalId: 'optimistic-complete'
49
+ * });
50
+ *
51
+ * await MemFlow.workflow.execHook({
52
+ * taskQueue: 'research',
53
+ * workflowName: 'skepticalView',
54
+ * args: [query],
55
+ * signalId: 'skeptical-complete'
56
+ * });
57
+ *
58
+ * // Wait for both perspectives
59
+ * await Promise.all([
60
+ * MemFlow.workflow.waitFor('optimistic-complete'),
61
+ * MemFlow.workflow.waitFor('skeptical-complete')
62
+ * ]);
63
+ * ```
64
+ *
65
+ * ### 3. Durable Activities & Proxies
66
+ * Define and execute durable activities with automatic retry:
67
+ * ```typescript
68
+ * const activities = MemFlow.workflow.proxyActivities<{
69
+ * analyzeDocument: typeof analyzeDocument;
70
+ * validateFindings: typeof validateFindings;
71
+ * }>({
72
+ * activities: { analyzeDocument, validateFindings },
73
+ * retryPolicy: {
74
+ * maximumAttempts: 3,
75
+ * backoffCoefficient: 2
76
+ * }
77
+ * });
78
+ *
79
+ * // Activities are durable and automatically retried
80
+ * const analysis = await activities.analyzeDocument(data);
81
+ * const validation = await activities.validateFindings(analysis);
82
+ * ```
83
+ *
84
+ * ### 4. Workflow Composition
85
+ * Build complex workflows through composition:
86
+ * ```typescript
87
+ * // Start a child workflow
88
+ * const childResult = await MemFlow.workflow.execChild({
89
+ * taskQueue: 'analysis',
90
+ * workflowName: 'detailedAnalysis',
91
+ * args: [data],
92
+ * // Child workflow config
93
+ * config: {
94
+ * maximumAttempts: 5,
95
+ * backoffCoefficient: 2
96
+ * }
97
+ * });
98
+ *
99
+ * // Fire-and-forget child workflow
100
+ * await MemFlow.workflow.startChild({
101
+ * taskQueue: 'notifications',
102
+ * workflowName: 'sendUpdates',
103
+ * args: [updates]
104
+ * });
105
+ * ```
106
+ *
107
+ * ### 5. Workflow Interceptors
108
+ * Add cross-cutting concerns through interceptors that run as durable functions:
21
109
  * ```typescript
22
- * import { Client, Worker, MemFlow, HotMesh } from '@hotmeshio/hotmesh';
23
- * import { Client as Postgres} from 'pg';
24
- * import * as workflows from './workflows';
110
+ * // Add audit interceptor that uses MemFlow functions
111
+ * MemFlow.registerInterceptor({
112
+ * async execute(ctx, next) {
113
+ * try {
114
+ * // Interceptors can use MemFlow functions and participate in replay
115
+ * const entity = await MemFlow.workflow.entity();
116
+ * await entity.append('auditLog', {
117
+ * action: 'started',
118
+ * timestamp: new Date().toISOString()
119
+ * });
120
+ *
121
+ * // Rate limiting with durable sleep
122
+ * await MemFlow.workflow.sleepFor('100 milliseconds');
123
+ *
124
+ * const result = await next();
125
+ *
126
+ * await entity.append('auditLog', {
127
+ * action: 'completed',
128
+ * timestamp: new Date().toISOString()
129
+ * });
25
130
  *
26
- * //1) Initialize the worker
131
+ * return result;
132
+ * } catch (err) {
133
+ * // CRITICAL: Always check for HotMesh interruptions
134
+ * if (MemFlow.didInterrupt(err)) {
135
+ * throw err; // Rethrow for replay system
136
+ * }
137
+ * throw err;
138
+ * }
139
+ * }
140
+ * });
141
+ * ```
142
+ *
143
+ * ## Basic Usage Example
144
+ *
145
+ * ```typescript
146
+ * import { Client, Worker, MemFlow } from '@hotmeshio/hotmesh';
147
+ * import { Client as Postgres } from 'pg';
148
+ *
149
+ * // Initialize worker
27
150
  * await Worker.create({
28
151
  * connection: {
29
152
  * class: Postgres,
30
- * options: {
31
- * connectionString: 'postgresql://usr:pwd@localhost:5432/db',
32
- * }
33
- * }
153
+ * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
154
+ * },
34
155
  * taskQueue: 'default',
35
- * namespace: 'memflow',
36
- * workflow: workflows.example,
37
- * options: {
38
- * backoffCoefficient: 2,
39
- * maximumAttempts: 1_000,
40
- * maximumInterval: '5 seconds'
41
- * }
156
+ * workflow: workflows.example
42
157
  * });
43
158
  *
44
- * //2) initialize the client
159
+ * // Initialize client
45
160
  * const client = new Client({
46
161
  * connection: {
47
162
  * class: Postgres,
48
- * options: {
49
- * connectionString: 'postgresql://usr:pwd@localhost:5432/db',
50
- * }
163
+ * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
51
164
  * }
52
165
  * });
53
166
  *
54
- * //3) start a new workflow
167
+ * // Start workflow
55
168
  * const handle = await client.workflow.start({
56
- * args: ['HotMesh', 'es'],
169
+ * args: ['input data'],
57
170
  * taskQueue: 'default',
58
171
  * workflowName: 'example',
59
- * workflowId: HotMesh.guid(),
60
- * namespace: 'memflow',
172
+ * workflowId: MemFlow.guid()
61
173
  * });
62
174
  *
63
- * //4) subscribe to the eventual result
64
- * console.log('\nRESPONSE', await handle.result(), '\n');
65
- * //logs '¡Hola, HotMesh!'
175
+ * // Get result
176
+ * const result = await handle.result();
66
177
  *
67
- * //5) Shutdown (typically on sigint)
178
+ * // Cleanup
68
179
  * await MemFlow.shutdown();
69
180
  * ```
70
181
  */
@@ -73,6 +184,26 @@ class MemFlowClass {
73
184
  * @private
74
185
  */
75
186
  constructor() { }
187
+ /**
188
+ * Register a workflow interceptor
189
+ * @param interceptor The interceptor to register
190
+ */
191
+ static registerInterceptor(interceptor) {
192
+ MemFlowClass.interceptorService.register(interceptor);
193
+ }
194
+ /**
195
+ * Clear all registered workflow interceptors
196
+ */
197
+ static clearInterceptors() {
198
+ MemFlowClass.interceptorService.clear();
199
+ }
200
+ /**
201
+ * Get the interceptor service instance
202
+ * @internal
203
+ */
204
+ static getInterceptorService() {
205
+ return MemFlowClass.interceptorService;
206
+ }
76
207
  /**
77
208
  * Shutdown everything. All connections, workers, and clients will be closed.
78
209
  * Include in your signal handlers to ensure a clean shutdown.
@@ -128,3 +259,4 @@ MemFlowClass.workflow = workflow_1.WorkflowService;
128
259
  * @see {@link utils/interruption.didInterrupt} for detailed documentation
129
260
  */
130
261
  MemFlowClass.didInterrupt = interruption_1.didInterrupt;
262
+ MemFlowClass.interceptorService = new interceptor_1.InterceptorService();