@hotmeshio/hotmesh 0.5.4 → 0.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/README.md +185 -161
  2. package/build/package.json +3 -2
  3. package/build/services/activities/trigger.js +1 -1
  4. package/build/services/connector/factory.js +2 -1
  5. package/build/services/connector/providers/postgres.js +11 -6
  6. package/build/services/memflow/client.js +4 -2
  7. package/build/services/memflow/index.d.ts +154 -34
  8. package/build/services/memflow/index.js +165 -33
  9. package/build/services/memflow/interceptor.d.ts +241 -0
  10. package/build/services/memflow/interceptor.js +256 -0
  11. package/build/services/memflow/worker.js +10 -1
  12. package/build/services/memflow/workflow/execChild.js +3 -1
  13. package/build/services/memflow/workflow/execHook.js +1 -1
  14. package/build/services/memflow/workflow/hook.js +4 -2
  15. package/build/services/memflow/workflow/proxyActivities.js +2 -1
  16. package/build/services/router/consumption/index.js +23 -9
  17. package/build/services/router/error-handling/index.js +3 -3
  18. package/build/services/search/providers/postgres/postgres.js +47 -19
  19. package/build/services/store/providers/postgres/kvtypes/hash/basic.js +1 -1
  20. package/build/services/store/providers/postgres/kvtypes/hash/index.js +2 -2
  21. package/build/services/store/providers/postgres/kvtypes/hash/jsonb.js +11 -11
  22. package/build/services/store/providers/postgres/postgres.js +8 -8
  23. package/build/services/stream/providers/postgres/postgres.js +23 -20
  24. package/build/services/sub/providers/postgres/postgres.js +11 -3
  25. package/build/services/task/index.js +4 -4
  26. package/build/types/memflow.d.ts +78 -0
  27. package/package.json +3 -2
package/README.md CHANGED
@@ -1,52 +1,123 @@
1
1
  # HotMesh
2
2
 
3
- **Workflow That Remembers**
3
+ **Durable Memory + Coordinated Execution**
4
4
 
5
- ![beta release](https://img.shields.io/badge/release-beta-blue.svg) ![made with typescript](https://img.shields.io/badge/built%20with-typescript-lightblue.svg)
5
+ ![beta release](https://img.shields.io/badge/release-beta-blue.svg) ![made with typescript](https://img.shields.io/badge/built%20with-typescript-lightblue.svg)
6
6
 
7
- HotMesh brings a **memory model** to durable functions. Built on PostgreSQL, it treats your database as the runtime hub for agents, pipelines, and long-lived processes.
7
+ HotMesh removes the repetitive glue of building durable agents, pipelines, and long‑running workflows. You focus on *what should change*; HotMesh handles *how it changes safely and durably.*
8
8
 
9
- Use HotMesh to:
9
+ ---
10
+
11
+ ## Why Choose HotMesh
10
12
 
11
- * **Store Evolving State** – Retain memory/state between executions
12
- * **Coordinate Distributed Work** Safely allow multiple workers to act on shared state
13
- * **Track and Replay** Full audit history and replay support by default
13
+
14
+ - **One memory model** across all your agents and pipelines. No more designing custom persistence for each workflow.
15
+ - **Automatic reliability** with transactional safety, replay protection, and crash recovery built-in.
16
+ - **Natural concurrency** through isolated hooks that can run in parallel without coordination overhead.
17
+ - **Operational transparency** using standard SQL to query live pipeline status and agent memory.
18
+ - **Multi-tenant ready** with clean schema isolation and flexible indexing.
19
+ ---
20
+
21
+ ## Core Abstractions
22
+
23
+ ### 1. Entities
24
+
25
+ Durable JSONB documents representing *process memory*. Each entity:
26
+
27
+ * Has a stable identity (`workflowId` / logical key).
28
+ * Evolves via atomic commands.
29
+ * Is versioned implicitly by transactional history.
30
+ * Can be partially indexed for targeted query performance.
31
+
32
+ > **Design Note:** Treat entity shape as *contractual surface* + *freeform interior*. Index only the minimal surface required for lookups or dashboards.
33
+
34
+ ### 2. Hooks
35
+
36
+ Re‑entrant, idempotent, interruptible units of work that *maintain* an entity. Hooks can:
37
+
38
+ * Start, stop, or be re‑invoked without corrupting state.
39
+ * Run concurrently (Postgres ensures isolation on write).
40
+ * Emit signals to let coordinators or sibling hooks know a perspective / phase completed.
41
+
42
+ ### 3. Workflow Coordinators
43
+
44
+ Thin entrypoints that:
45
+
46
+ * Seed initial entity state.
47
+ * Fan out perspective / phase hooks.
48
+ * Optionally synthesize or finalize.
49
+ * Return a snapshot (often the final entity state) — *the workflow result is just memory*.
50
+
51
+ ### 4. Commands (Entity Mutation Primitives)
52
+
53
+ | Command | Purpose | Example |
54
+ | ----------- | ----------------------------------------- | ------------------------------------------------ |
55
+ | `set` | Replace full value (first write or reset) | `await e.set({ user: { id: 123, name: "John" } })` |
56
+ | `merge` | Deep JSON merge | `await e.merge({ user: { email: "john@example.com" } })` |
57
+ | `append` | Append to an array field | `await e.append('items', { id: 1, name: "New Item" })` |
58
+ | `prepend` | Add to start of array field | `await e.prepend('items', { id: 0, name: "First Item" })` |
59
+ | `remove` | Remove item from array by index | `await e.remove('items', 0)` |
60
+ | `increment` | Numeric counters / progress | `await e.increment('counter', 5)` |
61
+ | `toggle` | Toggle boolean value | `await e.toggle('settings.enabled')` |
62
+ | `setIfNotExists` | Set value only if path doesn't exist | `await e.setIfNotExists('user.id', 123)` |
63
+ | `delete` | Remove field at specified path | `await e.delete('user.email')` |
64
+ | `get` | Read value at path (or full entity) | `await e.get('user.email')` |
65
+ | `signal` | Mark hook milestone / unlock waiters | `await MemFlow.workflow.signal('phase-x', data)` |
66
+
67
+ The Entity module also provides static methods for cross-entity querying:
68
+
69
+ ```typescript
70
+ // Find entities matching conditions
71
+ const activeUsers = await Entity.find('user', {
72
+ status: 'active',
73
+ country: 'US'
74
+ });
75
+
76
+ // Find by specific field condition
77
+ const highValueOrders = await Entity.findByCondition(
78
+ 'order',
79
+ 'total_amount',
80
+ 1000,
81
+ '>=',
82
+ hotMeshClient
83
+ );
84
+
85
+ // Find single entity by ID
86
+ const user = await Entity.findById('user', 'user123', hotMeshClient);
87
+
88
+ // Create optimized index for queries
89
+ await Entity.createIndex('user', 'email', hotMeshClient);
90
+ ```
14
91
 
15
92
  ---
16
93
 
17
94
  ## Table of Contents
18
95
 
19
96
  1. [Quick Start](#quick-start)
20
- 2. [Permanent Memory Architecture](#permanent-memory-architecture)
97
+ 2. [Memory Architecture](#memory-architecture)
21
98
  3. [Durable AI Agents](#durable-ai-agents)
22
- 4. [Building Pipelines with State](#building-pipelines-with-state)
23
- 5. [Documentation & Links](#documentation--links)
99
+ 4. [Stateful Pipelines](#stateful-pipelines)
100
+ 5. [Indexing Strategy](#indexing-strategy)
101
+ 6. [Operational Notes](#operational-notes)
102
+ 7. [Documentation & Links](#documentation--links)
24
103
 
25
104
  ---
26
105
 
27
106
  ## Quick Start
28
107
 
29
- ### Prerequisites
30
-
31
- * PostgreSQL (or Supabase)
32
- * Node.js 16+
33
-
34
108
  ### Install
35
109
 
36
110
  ```bash
37
111
  npm install @hotmeshio/hotmesh
38
112
  ```
39
113
 
40
- ### Connect to a Database
41
-
42
- HotMesh leverages Temporal.io's developer-friendly syntax for authoring workers, workflows, and clients. The `init` and `start` methods should look familiar.
43
-
114
+ ### Minimal Setup
44
115
  ```ts
45
116
  import { MemFlow } from '@hotmeshio/hotmesh';
46
117
  import { Client as Postgres } from 'pg';
47
118
 
48
119
  async function main() {
49
- // MemFlow will auto-provision the database upon init
120
+ // Auto-provisions required tables/index scaffolding on first run
50
121
  const mf = await MemFlow.init({
51
122
  appId: 'my-app',
52
123
  engine: {
@@ -57,157 +128,129 @@ async function main() {
57
128
  }
58
129
  });
59
130
 
60
- // Start a workflow with an assigned ID and arguments
131
+ // Start a durable research agent (entity-backed workflow)
61
132
  const handle = await mf.workflow.start({
62
133
  entity: 'research-agent',
63
134
  workflowName: 'researchAgent',
64
135
  workflowId: 'agent-session-jane-001',
65
- args: ['What are the long-term impacts of renewable energy subsidies?'],
136
+ args: ['Long-term impacts of renewable energy subsidies'],
66
137
  taskQueue: 'agents'
67
138
  });
68
139
 
69
- console.log('Result:', await handle.result());
140
+ console.log('Final Memory Snapshot:', await handle.result());
70
141
  }
71
142
 
72
143
  main().catch(console.error);
73
144
  ```
74
145
 
75
- ### System Benefits
76
-
77
- * **No Setup Required** – Tables and indexes are provisioned automatically
78
- * **Shared State** – Every worker shares access to the same entity memory
79
- * **Coordination by Design** PostgreSQL handles consistency and isolation
80
- * **Tenant Isolation** – Each app maintains its own schema
81
- * **Scalable Defaults** – Partitioned tables and index support included
146
+ ### Value Checklist (What You Did *Not* Have To Do)
147
+ - Create tables / migrations
148
+ - Define per-agent caches
149
+ - Implement optimistic locking
150
+ - Build a queue fan‑out mechanism
151
+ - Hand-roll replay protection
82
152
 
83
153
  ---
84
154
 
85
- ## Permanent Memory Architecture
86
-
87
- Every workflow in HotMesh is backed by an "entity": a versioned, JSONB record that tracks its memory and state transitions.
155
+ ## Memory Architecture
156
+ Each workflow = **1 durable entity**. Hooks are stateless functions *shaped by* that entity's evolving JSON. You can inspect or modify it at any time using ordinary SQL or the provided API.
88
157
 
89
- * **Entities** – Represent long-lived state for a workflow or agent
90
- * **Commands** – Modify state with methods like `set`, `merge`, `append`, `increment`
91
- * **Consistency** All updates are transactional with Postgres
92
- * **Replay Safety** – Protects against duplicated side effects during re-execution
93
- * **Partial Indexing** – Optimized querying of fields within large JSON structures
94
-
95
- ### Example: Partial Index for Premium Users
158
+ ### Programmatic Indexing
159
+ ```ts
160
+ // Create index for premium research agents
161
+ await MemFlow.Entity.createIndex('research-agent', 'isPremium', hotMeshClient);
162
+
163
+ // Find premium agents needing verification
164
+ const agents = await MemFlow.Entity.find('research-agent', {
165
+ isPremium: true,
166
+ needsVerification: true
167
+ }, hotMeshClient);
168
+ ```
96
169
 
170
+ ### Direct SQL Access
97
171
  ```sql
98
- -- Index only those user entities that are marked as premium
99
- CREATE INDEX idx_user_premium ON your_app.jobs (id)
100
- WHERE entity = 'user' AND (context->>'isPremium')::boolean = true;
172
+ -- Same index via SQL (more control over index type/conditions)
173
+ CREATE INDEX idx_research_agents_premium ON my_app.jobs (id)
174
+ WHERE entity = 'research-agent' AND (context->>'isPremium')::boolean = true;
175
+
176
+ -- Ad hoc query example
177
+ SELECT id, context->>'status' as status, context->>'confidence' as confidence
178
+ FROM my_app.jobs
179
+ WHERE entity = 'research-agent'
180
+ AND (context->>'isPremium')::boolean = true
181
+ AND (context->>'confidence')::numeric > 0.8;
101
182
  ```
102
183
 
103
- This index improves performance for filtered queries while reducing index size.
184
+ **Guidelines:**
185
+ 1. *Model intent, not mechanics.* Keep ephemeral calculation artifacts minimal; store derived values only if reused.
186
+ 2. *Index sparingly.* Each index is a write amplification cost. Start with 1–2 selective partial indexes.
187
+ 3. *Keep arrays append‑only where possible.* Supports audit and replay semantics cheaply.
188
+ 4. *Choose your tool:* Use Entity methods for standard queries, raw SQL for complex analytics or custom indexes.
104
189
 
105
190
  ---
106
191
 
107
192
  ## Durable AI Agents
193
+ Agents become simpler: the *agent* is the memory record; hooks supply perspectives, verification, enrichment, or lifecycle progression.
108
194
 
109
- Agents often require memory—context that persists between invocations, spans multiple perspectives, or outlives a single process.
110
-
111
- The following example builds a "research agent" that executes hooks with different perspectives and then synthesizes. The data-first approach sets up initial state and then uses temporary hook functions to augment over the lifecycle of the entity record.
112
-
113
- ### Research Agent Example
114
-
115
- #### Main Coordinator Agent
116
-
195
+ ### Coordinator (Research Agent)
117
196
  ```ts
118
- export async function researchAgent(query: string): Promise<any> {
119
- const agent = await MemFlow.workflow.entity();
197
+ export async function researchAgent(query: string) {
198
+ const entity = await MemFlow.workflow.entity();
120
199
 
121
- // Set up shared memory for this agent session
122
- const initialState = {
200
+ const initial = {
123
201
  query,
124
202
  findings: [],
125
203
  perspectives: {},
126
204
  confidence: 0,
127
205
  verification: {},
128
206
  status: 'researching',
129
- startTime: new Date().toISOString(),
130
- }
131
- await agent.set<typeof initialState>(initialState);
132
-
133
- // Launch perspective hooks
134
- await MemFlow.workflow.execHook({
135
- taskQueue: 'agents',
136
- workflowName: 'optimisticPerspective',
137
- args: [query],
138
- signalId: 'optimistic-complete'
139
- });
140
-
141
- await MemFlow.workflow.execHook({
142
- taskQueue: 'agents',
143
- workflowName: 'skepticalPerspective',
144
- args: [query],
145
- signalId: 'skeptical-complete'
146
- });
147
-
148
- await MemFlow.workflow.execHook({
149
- taskQueue: 'agents',
150
- workflowName: 'verificationHook',
151
- args: [query],
152
- signalId: 'verification-complete'
153
- });
207
+ startTime: new Date().toISOString()
208
+ };
209
+ await entity.set<typeof initial>(initial);
154
210
 
155
- await MemFlow.workflow.execHook({
156
- taskQueue: 'perspectives',
157
- workflowName: 'synthesizePerspectives',
158
- args: [],
159
- signalId: 'synthesis-complete',
160
- });
211
+ // Fan-out perspectives
212
+ await MemFlow.workflow.execHook({ taskQueue: 'agents', workflowName: 'optimisticPerspective', args: [query], signalId: 'optimistic-complete' });
213
+ await MemFlow.workflow.execHook({ taskQueue: 'agents', workflowName: 'skepticalPerspective', args: [query], signalId: 'skeptical-complete' });
214
+ await MemFlow.workflow.execHook({ taskQueue: 'agents', workflowName: 'verificationHook', args: [query], signalId: 'verification-complete' });
215
+ await MemFlow.workflow.execHook({ taskQueue: 'agents', workflowName: 'synthesizePerspectives', args: [], signalId: 'synthesis-complete' });
161
216
 
162
- // return analysis, verification, and synthesis
163
- return await agent.get();
217
+ return await entity.get();
164
218
  }
165
219
  ```
166
220
 
167
-
168
- Let's look at one of these hooks in detail - the synthesis hook that combines all perspectives into a final assessment:
169
-
170
- #### Synthesis Hook
171
-
221
+ ### Synthesis Hook
172
222
  ```ts
173
- // Synthesis hook aggregates different viewpoints
174
- export async function synthesizePerspectives(config: {signal: string}): Promise<void> {
175
- const entity = await MemFlow.workflow.entity();
176
- const context = await entity.get();
177
-
178
- const result = await analyzePerspectives(context.perspectives);
223
+ export async function synthesizePerspectives({ signal }: { signal: string }) {
224
+ const e = await MemFlow.workflow.entity();
225
+ const ctx = await e.get();
179
226
 
180
- await entity.merge({
227
+ const synthesized = await analyzePerspectives(ctx.perspectives);
228
+ await e.merge({
181
229
  perspectives: {
182
230
  synthesis: {
183
- finalAssessment: result,
184
- confidence: calculateConfidence(context.perspectives)
231
+ finalAssessment: synthesized,
232
+ confidence: calculateConfidence(ctx.perspectives)
185
233
  }
186
234
  },
187
235
  status: 'completed'
188
236
  });
189
- await MemFlow.workflow.signal(config.signal, {});
237
+ await MemFlow.workflow.signal(signal, {});
190
238
  }
191
-
192
- //other hooks...
193
239
  ```
194
240
 
195
- > 💡 A complete implementation of this Research Agent example with tests, OpenAI integration, and multi-perspective analysis can be found in the [agent test suite](https://github.com/hotmeshio/sdk-typescript/tree/main/tests/memflow/agent).
241
+ > **Pattern:** Fan-out hooks that write *adjacent* subtrees (e.g., `perspectives.optimistic`, `perspectives.skeptical`). A final hook merges a compact synthesis object. Avoid cross-hook mutation of the same nested branch.
196
242
 
197
243
  ---
198
244
 
199
- ## Building Pipelines with State
200
-
201
- HotMesh treats pipelines as long-lived records. Every pipeline run is stateful, resumable, and traceable. Hooks can be re-run at any time, and can be invoked by external callers. Sleep and run on a cadence to keep the pipeline up to date.
202
-
203
- ### Setup a Data Pipeline
245
+ ## Stateful Pipelines
246
+ Pipelines are identical in structure to agents: a coordinator seeds memory; phase hooks advance state; the entity is the audit trail.
204
247
 
248
+ ### Document Processing Pipeline (Coordinator)
205
249
  ```ts
206
- export async function documentProcessingPipeline(): Promise<any> {
250
+ export async function documentProcessingPipeline() {
207
251
  const pipeline = await MemFlow.workflow.entity();
208
252
 
209
- // Initialize pipeline state with empty arrays
210
- const initialState = {
253
+ const initial = {
211
254
  documentId: `doc-${Date.now()}`,
212
255
  status: 'started',
213
256
  startTime: new Date().toISOString(),
@@ -219,73 +262,54 @@ export async function documentProcessingPipeline(): Promise<any> {
219
262
  errors: [],
220
263
  pageSignals: {}
221
264
  };
222
-
223
- await pipeline.set<typeof initialState>(initialState);
265
+ await pipeline.set<typeof initial>(initial);
224
266
 
225
- // Step 1: Get list of image file references
226
- await pipeline.merge({status: 'loading-images'});
267
+ await pipeline.merge({ status: 'loading-images' });
227
268
  await pipeline.append('processingSteps', 'image-load-started');
228
269
  const imageRefs = await activities.loadImagePages();
229
- if (!imageRefs || imageRefs.length === 0) {
230
- throw new Error('No image references found');
231
- }
232
- await pipeline.merge({imageRefs});
270
+ if (!imageRefs?.length) throw new Error('No image references found');
271
+ await pipeline.merge({ imageRefs });
233
272
  await pipeline.append('processingSteps', 'image-load-completed');
234
273
 
235
- // Step 2: Launch processing hooks for each page
236
- for (const [index, imageRef] of imageRefs.entries()) {
237
- const pageNumber = index + 1;
238
-
274
+ // Page hooks
275
+ for (const [i, ref] of imageRefs.entries()) {
276
+ const page = i + 1;
239
277
  await MemFlow.workflow.execHook({
240
278
  taskQueue: 'pipeline',
241
279
  workflowName: 'pageProcessingHook',
242
- args: [imageRef, pageNumber, initialState.documentId],
243
- signalId: `page-${pageNumber}-complete`
280
+ args: [ref, page, initial.documentId],
281
+ signalId: `page-${page}-complete`
244
282
  });
245
- };
246
-
247
- // Step 3: Launch validation hook
248
- await MemFlow.workflow.execHook({
249
- taskQueue: 'pipeline',
250
- workflowName: 'validationHook',
251
- args: [initialState.documentId],
252
- signalId: 'validation-complete'
253
- });
254
-
255
- // Step 4: Launch approval hook
256
- await MemFlow.workflow.execHook({
257
- taskQueue: 'pipeline',
258
- workflowName: 'approvalHook',
259
- args: [initialState.documentId],
260
- signalId: 'approval-complete',
261
- });
283
+ }
262
284
 
263
- // Step 5: Launch notification hook
264
- await MemFlow.workflow.execHook({
265
- taskQueue: 'pipeline',
266
- workflowName: 'notificationHook',
267
- args: [initialState.documentId],
268
- signalId: 'processing-complete',
269
- });
285
+ // Validation
286
+ await MemFlow.workflow.execHook({ taskQueue: 'pipeline', workflowName: 'validationHook', args: [initial.documentId], signalId: 'validation-complete' });
287
+ // Approval
288
+ await MemFlow.workflow.execHook({ taskQueue: 'pipeline', workflowName: 'approvalHook', args: [initial.documentId], signalId: 'approval-complete' });
289
+ // Notification
290
+ await MemFlow.workflow.execHook({ taskQueue: 'pipeline', workflowName: 'notificationHook', args: [initial.documentId], signalId: 'processing-complete' });
270
291
 
271
- // Step 6: Return final state
272
- await pipeline.merge({status: 'completed', completedAt: new Date().toISOString()});
292
+ await pipeline.merge({ status: 'completed', completedAt: new Date().toISOString() });
273
293
  await pipeline.append('processingSteps', 'pipeline-completed');
274
294
  return await pipeline.get();
275
295
  }
276
296
  ```
277
297
 
278
- > 💡 A complete implementation of this Pipeline example with OpenAI Vision integration, processing hooks, and document workflow automation can be found in the [pipeline test suite](https://github.com/hotmeshio/sdk-typescript/tree/main/tests/memflow/pipeline).
298
+ **Operational Characteristics:**
299
+ - *Replay Friendly*: Each hook can be retried; pipeline memory records invariant progress markers (`processingSteps`).
300
+ - *Parallelizable*: Pages fan out naturally without manual queue wiring.
301
+ - *Auditable*: Entire lifecycle captured in a single evolving JSON record.
279
302
 
280
303
  ---
281
304
 
282
305
  ## Documentation & Links
283
-
284
- * SDK Reference[hotmeshio.github.io/sdk-typescript](https://hotmeshio.github.io/sdk-typescript)
285
- * Examples[github.com/hotmeshio/samples-typescript](https://github.com/hotmeshio/samples-typescript)
306
+ * **SDK Reference** – https://hotmeshio.github.io/sdk-typescript
307
+ * **Agent Example Tests** – https://github.com/hotmeshio/sdk-typescript/tree/main/tests/memflow/agent
308
+ * **Pipeline Example Tests** https://github.com/hotmeshio/sdk-typescript/tree/main/tests/memflow/pipeline
309
+ * **Sample Projects** – https://github.com/hotmeshio/samples-typescript
286
310
 
287
311
  ---
288
312
 
289
313
  ## License
290
-
291
- Apache 2.0 with commercial restrictions See `LICENSE` for details.
314
+ Apache 2.0 with commercial restrictions* – see `LICENSE`.
315
+ >*NOTE: It's open source with one commercial exception: Build, sell, and share solutions made with HotMesh. But don't white-label the orchestration core and repackage it as your own workflow-as-a-service.
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hotmeshio/hotmesh",
3
- "version": "0.5.4",
3
+ "version": "0.5.6",
4
4
  "description": "Permanent-Memory Workflows & AI Agents",
5
5
  "main": "./build/index.js",
6
6
  "types": "./build/index.d.ts",
@@ -31,7 +31,8 @@
31
31
  "test:memflow:basic": "HMSH_LOGLEVEL=info NODE_ENV=test jest ./tests/memflow/basic/postgres.test.ts --detectOpenHandles --forceExit --verbose",
32
32
  "test:memflow:collision": "NODE_ENV=test jest ./tests/memflow/collision/*.test.ts --detectOpenHandles --forceExit --verbose",
33
33
  "test:memflow:fatal": "NODE_ENV=test jest ./tests/memflow/fatal/*.test.ts --detectOpenHandles --forceExit --verbose",
34
- "test:memflow:goodbye": "NODE_ENV=test jest ./tests/memflow/goodbye/*.test.ts --detectOpenHandles --forceExit --verbose",
34
+ "test:memflow:goodbye": "NODE_ENV=test HMSH_LOGLEVEL=debug jest ./tests/memflow/goodbye/postgres.test.ts --detectOpenHandles --forceExit --verbose",
35
+ "test:memflow:interceptor": "NODE_ENV=test HMSH_LOGLEVEL=debug jest ./tests/memflow/interceptor/postgres.test.ts --detectOpenHandles --forceExit --verbose",
35
36
  "test:memflow:entity": "NODE_ENV=test HMSH_LOGLEVEL=debug jest ./tests/memflow/entity/postgres.test.ts --detectOpenHandles --forceExit --verbose",
36
37
  "test:memflow:agent": "NODE_ENV=test HMSH_LOGLEVEL=debug jest ./tests/memflow/agent/postgres.test.ts --detectOpenHandles --forceExit --verbose",
37
38
  "test:memflow:hello": "HMSH_TELEMETRY=debug HMSH_LOGLEVEL=debug HMSH_IS_CLUSTER=true NODE_ENV=test jest ./tests/memflow/helloworld/postgres.test.ts --detectOpenHandles --forceExit --verbose",
@@ -8,8 +8,8 @@ const pipe_1 = require("../pipe");
8
8
  const reporter_1 = require("../reporter");
9
9
  const serializer_1 = require("../serializer");
10
10
  const telemetry_1 = require("../telemetry");
11
- const activity_1 = require("./activity");
12
11
  const mapper_1 = require("../mapper");
12
+ const activity_1 = require("./activity");
13
13
  class Trigger extends activity_1.Activity {
14
14
  constructor(config, data, metadata, hook, engine, context) {
15
15
  super(config, data, metadata, hook, engine, context);
@@ -90,7 +90,8 @@ class ConnectorService {
90
90
  //if connecting as a poolClient for subscription, auto connect the client
91
91
  const bAutoConnect = field === 'sub';
92
92
  // Use taskQueue-based connection pooling for PostgreSQL
93
- clientInstance = await postgres_1.PostgresConnection.getOrCreateTaskQueueConnection(id, taskQueue, providerClass, options, { connect: bAutoConnect, provider: providerName });
93
+ clientInstance =
94
+ await postgres_1.PostgresConnection.getOrCreateTaskQueueConnection(id, taskQueue, providerClass, options, { connect: bAutoConnect, provider: providerName });
94
95
  break;
95
96
  default:
96
97
  throw new Error(`Unknown provider type: ${providerType}`);
@@ -23,13 +23,13 @@ class PostgresConnection extends __1.AbstractConnection {
23
23
  const taskQueueDetails = Array.from(this.taskQueueConnections.entries()).map(([key, connection]) => ({
24
24
  key,
25
25
  connectionId: connection.getConnectionId() || 'unknown',
26
- reusedCount: connection.reusedCount || 0
26
+ reusedCount: connection.reusedCount || 0,
27
27
  }));
28
28
  return {
29
29
  totalPoolClients: this.poolClientInstances.size,
30
30
  totalConnections: this.connectionInstances.size,
31
31
  taskQueueConnections: this.taskQueueConnections.size,
32
- taskQueueDetails
32
+ taskQueueDetails,
33
33
  };
34
34
  }
35
35
  /**
@@ -41,7 +41,7 @@ class PostgresConnection extends __1.AbstractConnection {
41
41
  if (logger) {
42
42
  logger.info('postgres-connection-stats', {
43
43
  ...stats,
44
- message
44
+ message,
45
45
  });
46
46
  }
47
47
  else {
@@ -54,8 +54,12 @@ class PostgresConnection extends __1.AbstractConnection {
54
54
  static getPoolingEffectiveness() {
55
55
  const stats = this.getConnectionStats();
56
56
  const totalReuses = stats.taskQueueDetails.reduce((sum, detail) => sum + detail.reusedCount, 0);
57
- const averageReusesPerPool = stats.taskQueueConnections > 0 ? totalReuses / stats.taskQueueConnections : 0;
58
- const poolingEfficiency = stats.totalConnections > 0 ? (stats.taskQueueConnections / stats.totalConnections) * 100 : 0;
57
+ const averageReusesPerPool = stats.taskQueueConnections > 0
58
+ ? totalReuses / stats.taskQueueConnections
59
+ : 0;
60
+ const poolingEfficiency = stats.totalConnections > 0
61
+ ? stats.taskQueueConnections / stats.totalConnections * 100
62
+ : 0;
59
63
  return {
60
64
  totalConnections: stats.totalConnections,
61
65
  taskQueuePools: stats.taskQueueConnections,
@@ -164,7 +168,8 @@ class PostgresConnection extends __1.AbstractConnection {
164
168
  if (this.taskQueueConnections.has(connectionKey)) {
165
169
  const existingConnection = this.taskQueueConnections.get(connectionKey);
166
170
  // Track reuse count for monitoring
167
- existingConnection.reusedCount = (existingConnection.reusedCount || 0) + 1;
171
+ existingConnection.reusedCount =
172
+ (existingConnection.reusedCount || 0) + 1;
168
173
  this.logger.debug('postgres-connection-reused', {
169
174
  connectionKey,
170
175
  taskQueue,
@@ -69,7 +69,7 @@ class ClientService {
69
69
  }
70
70
  //init, but don't await
71
71
  const readonly = this.connection.readonly ?? undefined;
72
- let hotMeshClient = hotmesh_1.HotMesh.init({
72
+ const hotMeshClient = hotmesh_1.HotMesh.init({
73
73
  appId: targetNS,
74
74
  taskQueue,
75
75
  logLevel: enums_1.HMSH_LOGLEVEL,
@@ -126,7 +126,9 @@ class ClientService {
126
126
  */
127
127
  start: async (options) => {
128
128
  const taskQueueName = options.taskQueue ?? options.entity;
129
- const workflowName = options.taskQueue ? options.workflowName : (options.entity ?? options.workflowName);
129
+ const workflowName = options.taskQueue
130
+ ? options.workflowName
131
+ : options.entity ?? options.workflowName;
130
132
  const trc = options.workflowTrace;
131
133
  const spn = options.workflowSpan;
132
134
  //hotmesh `topic` is equivalent to `queue+workflowname` pattern in other systems