@hotmeshio/hotmesh 0.5.4 → 0.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/README.md +185 -161
  2. package/build/package.json +3 -2
  3. package/build/services/activities/trigger.js +1 -1
  4. package/build/services/connector/factory.js +2 -1
  5. package/build/services/connector/providers/postgres.js +11 -6
  6. package/build/services/memflow/client.js +4 -2
  7. package/build/services/memflow/index.d.ts +154 -34
  8. package/build/services/memflow/index.js +165 -33
  9. package/build/services/memflow/interceptor.d.ts +241 -0
  10. package/build/services/memflow/interceptor.js +256 -0
  11. package/build/services/memflow/worker.js +10 -1
  12. package/build/services/memflow/workflow/execChild.js +3 -1
  13. package/build/services/memflow/workflow/execHook.js +1 -1
  14. package/build/services/memflow/workflow/hook.js +4 -2
  15. package/build/services/memflow/workflow/proxyActivities.js +2 -1
  16. package/build/services/router/consumption/index.js +23 -9
  17. package/build/services/router/error-handling/index.js +3 -3
  18. package/build/services/search/providers/postgres/postgres.js +47 -19
  19. package/build/services/store/providers/postgres/kvtypes/hash/basic.js +1 -1
  20. package/build/services/store/providers/postgres/kvtypes/hash/index.js +2 -2
  21. package/build/services/store/providers/postgres/kvtypes/hash/jsonb.js +11 -11
  22. package/build/services/store/providers/postgres/postgres.js +8 -8
  23. package/build/services/stream/providers/postgres/postgres.js +23 -20
  24. package/build/services/sub/providers/postgres/postgres.js +11 -3
  25. package/build/services/task/index.js +4 -4
  26. package/build/types/memflow.d.ts +78 -0
  27. package/package.json +3 -2
@@ -0,0 +1,256 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.InterceptorService = void 0;
4
+ /**
5
+ * Service for managing workflow interceptors that wrap workflow execution
6
+ * in an onion-like pattern. Each interceptor can perform actions before
7
+ * and after workflow execution, add cross-cutting concerns, and handle errors.
8
+ *
9
+ * ## Basic Interceptor Pattern
10
+ *
11
+ * @example
12
+ * ```typescript
13
+ * // Create and configure interceptors
14
+ * const service = new InterceptorService();
15
+ *
16
+ * // Add logging interceptor (outermost)
17
+ * service.register({
18
+ * async execute(ctx, next) {
19
+ * console.log('Starting workflow');
20
+ * const result = await next();
21
+ * console.log('Workflow completed');
22
+ * return result;
23
+ * }
24
+ * });
25
+ *
26
+ * // Add metrics interceptor (middle)
27
+ * service.register({
28
+ * async execute(ctx, next) {
29
+ * const timer = startTimer();
30
+ * const result = await next();
31
+ * recordDuration(timer.end());
32
+ * return result;
33
+ * }
34
+ * });
35
+ *
36
+ * // Add error handling interceptor (innermost)
37
+ * service.register({
38
+ * async execute(ctx, next) {
39
+ * try {
40
+ * return await next();
41
+ * } catch (err) {
42
+ * reportError(err);
43
+ * throw err;
44
+ * }
45
+ * }
46
+ * });
47
+ *
48
+ * // Execute workflow through interceptor chain
49
+ * const result = await service.executeChain(context, async () => {
50
+ * return await workflowFn();
51
+ * });
52
+ * ```
53
+ *
54
+ * ## Durable Interceptors with MemFlow Functions
55
+ *
56
+ * Interceptors run within the workflow's async local storage context, which means
57
+ * they can use MemFlow functions like `sleepFor`, `entity`, `proxyActivities`, etc.
58
+ * These interceptors participate in the HotMesh interruption/replay pattern.
59
+ *
60
+ * @example
61
+ * ```typescript
62
+ * import { MemFlow } from '@hotmeshio/hotmesh';
63
+ *
64
+ * // Rate limiting interceptor that sleeps before execution
65
+ * const rateLimitInterceptor: WorkflowInterceptor = {
66
+ * async execute(ctx, next) {
67
+ * try {
68
+ * // This sleep will cause an interruption on first execution
69
+ * await MemFlow.workflow.sleepFor('1 second');
70
+ *
71
+ * const result = await next();
72
+ *
73
+ * // Another sleep after workflow completes
74
+ * await MemFlow.workflow.sleepFor('500 milliseconds');
75
+ *
76
+ * return result;
77
+ * } catch (err) {
78
+ * // CRITICAL: Always check for HotMesh interruptions
79
+ * if (MemFlow.didInterrupt(err)) {
80
+ * throw err; // Rethrow interruptions for replay system
81
+ * }
82
+ * // Handle actual errors
83
+ * console.error('Interceptor error:', err);
84
+ * throw err;
85
+ * }
86
+ * }
87
+ * };
88
+ *
89
+ * // Entity-based audit interceptor
90
+ * const auditInterceptor: WorkflowInterceptor = {
91
+ * async execute(ctx, next) {
92
+ * try {
93
+ * const entity = await MemFlow.workflow.entity();
94
+ * await entity.append('auditLog', {
95
+ * action: 'workflow_started',
96
+ * timestamp: new Date().toISOString(),
97
+ * workflowId: ctx.get('workflowId')
98
+ * });
99
+ *
100
+ * const startTime = Date.now();
101
+ * const result = await next();
102
+ * const duration = Date.now() - startTime;
103
+ *
104
+ * await entity.append('auditLog', {
105
+ * action: 'workflow_completed',
106
+ * timestamp: new Date().toISOString(),
107
+ * duration,
108
+ * success: true
109
+ * });
110
+ *
111
+ * return result;
112
+ * } catch (err) {
113
+ * if (MemFlow.didInterrupt(err)) {
114
+ * throw err;
115
+ * }
116
+ *
117
+ * // Log failure to entity
118
+ * const entity = await MemFlow.workflow.entity();
119
+ * await entity.append('auditLog', {
120
+ * action: 'workflow_failed',
121
+ * timestamp: new Date().toISOString(),
122
+ * error: err.message
123
+ * });
124
+ *
125
+ * throw err;
126
+ * }
127
+ * }
128
+ * };
129
+ *
130
+ * // Register interceptors
131
+ * MemFlow.registerInterceptor(rateLimitInterceptor);
132
+ * MemFlow.registerInterceptor(auditInterceptor);
133
+ * ```
134
+ *
135
+ * ## Execution Pattern with Interruptions
136
+ *
137
+ * When interceptors use MemFlow functions, the workflow will execute multiple times
138
+ * due to the interruption/replay pattern:
139
+ *
140
+ * 1. **First execution**: Interceptor calls `sleepFor` → throws `MemFlowSleepError` → workflow pauses
141
+ * 2. **Second execution**: Interceptor sleep replays (skipped), workflow runs → proxy activity throws `MemFlowProxyError` → workflow pauses
142
+ * 3. **Third execution**: All previous operations replay, interceptor sleep after workflow → throws `MemFlowSleepError` → workflow pauses
143
+ * 4. **Fourth execution**: Everything replays successfully, workflow completes
144
+ *
145
+ * This pattern ensures deterministic, durable execution across all interceptors and workflow code.
146
+ *
147
+ * @example
148
+ * ```typescript
149
+ * // Interceptor with complex MemFlow operations
150
+ * const complexInterceptor: WorkflowInterceptor = {
151
+ * async execute(ctx, next) {
152
+ * try {
153
+ * // Get persistent state
154
+ * const entity = await MemFlow.workflow.entity();
155
+ * const state = await entity.get() as any;
156
+ *
157
+ * // Conditional durable operations
158
+ * if (!state.preProcessed) {
159
+ * await MemFlow.workflow.sleepFor('100 milliseconds');
160
+ * await entity.merge({ preProcessed: true });
161
+ * }
162
+ *
163
+ * // Execute the workflow
164
+ * const result = await next();
165
+ *
166
+ * // Post-processing with child workflow
167
+ * if (!state.postProcessed) {
168
+ * await MemFlow.workflow.execChild({
169
+ * taskQueue: 'cleanup',
170
+ * workflowName: 'cleanupWorkflow',
171
+ * args: [result]
172
+ * });
173
+ * await entity.merge({ postProcessed: true });
174
+ * }
175
+ *
176
+ * return result;
177
+ * } catch (err) {
178
+ * if (MemFlow.didInterrupt(err)) {
179
+ * throw err;
180
+ * }
181
+ * throw err;
182
+ * }
183
+ * }
184
+ * };
185
+ * ```
186
+ */
187
+ class InterceptorService {
188
+ constructor() {
189
+ this.interceptors = [];
190
+ }
191
+ /**
192
+ * Register a new workflow interceptor that will wrap workflow execution.
193
+ * Interceptors are executed in the order they are registered, with the
194
+ * first registered interceptor being the outermost wrapper.
195
+ *
196
+ * @param interceptor The interceptor to register
197
+ *
198
+ * @example
199
+ * ```typescript
200
+ * service.register({
201
+ * async execute(ctx, next) {
202
+ * console.time('workflow');
203
+ * try {
204
+ * return await next();
205
+ * } finally {
206
+ * console.timeEnd('workflow');
207
+ * }
208
+ * }
209
+ * });
210
+ * ```
211
+ */
212
+ register(interceptor) {
213
+ this.interceptors.push(interceptor);
214
+ }
215
+ /**
216
+ * Execute the interceptor chain around the workflow function.
217
+ * The chain is built in an onion pattern where each interceptor
218
+ * wraps the next one, with the workflow function at the center.
219
+ *
220
+ * @param ctx The workflow context map
221
+ * @param fn The workflow function to execute
222
+ * @returns The result of the workflow execution
223
+ *
224
+ * @example
225
+ * ```typescript
226
+ * // Execute workflow with timing
227
+ * const result = await service.executeChain(context, async () => {
228
+ * const start = Date.now();
229
+ * try {
230
+ * return await workflowFn();
231
+ * } finally {
232
+ * console.log('Duration:', Date.now() - start);
233
+ * }
234
+ * });
235
+ * ```
236
+ */
237
+ async executeChain(ctx, fn) {
238
+ // Create the onion-like chain of interceptors
239
+ const chain = this.interceptors.reduceRight((next, interceptor) => {
240
+ return () => interceptor.execute(ctx, next);
241
+ }, fn);
242
+ return chain();
243
+ }
244
+ /**
245
+ * Clear all registered interceptors.
246
+ *
247
+ * @example
248
+ * ```typescript
249
+ * service.clear();
250
+ * ```
251
+ */
252
+ clear() {
253
+ this.interceptors = [];
254
+ }
255
+ }
256
+ exports.InterceptorService = InterceptorService;
@@ -10,6 +10,7 @@ const hotmesh_1 = require("../hotmesh");
10
10
  const stream_1 = require("../../types/stream");
11
11
  const search_1 = require("./search");
12
12
  const factory_1 = require("./schemas/factory");
13
+ const index_1 = require("./index");
13
14
  /**
14
15
  * The *Worker* service Registers worker functions and connects them to the mesh,
15
16
  * using the target backend provider/s (Redis, Postgres, NATS, etc).
@@ -321,8 +322,16 @@ class WorkerService {
321
322
  const [cursor, replay] = await store.findJobFields(workflowInput.workflowId, replayQuery, 50000, 5000);
322
323
  context.set('replay', replay);
323
324
  context.set('cursor', cursor); // if != 0, more remain
325
+ // Execute workflow with interceptors
324
326
  const workflowResponse = await storage_1.asyncLocalStorage.run(context, async () => {
325
- return await workflowFunction.apply(this, workflowInput.arguments);
327
+ // Get the interceptor service
328
+ const interceptorService = index_1.MemFlow.getInterceptorService();
329
+ // Create the workflow execution function
330
+ const execWorkflow = async () => {
331
+ return await workflowFunction.apply(this, workflowInput.arguments);
332
+ };
333
+ // Execute the workflow through the interceptor chain
334
+ return await interceptorService.executeChain(context, execWorkflow);
326
335
  });
327
336
  //if the embedded function has a try/catch, it can interrup the throw
328
337
  // throw here to interrupt the workflow if the embedded function caught and suppressed
@@ -22,7 +22,9 @@ function getChildInterruptPayload(context, options, execIndex) {
22
22
  }
23
23
  const parentWorkflowId = workflowId;
24
24
  const taskQueueName = options.taskQueue ?? options.entity;
25
- const workflowName = options.taskQueue ? options.workflowName : (options.entity ?? options.workflowName);
25
+ const workflowName = options.taskQueue
26
+ ? options.workflowName
27
+ : options.entity ?? options.workflowName;
26
28
  const workflowTopic = `${taskQueueName}-${workflowName}`;
27
29
  return {
28
30
  arguments: [...(options.args || [])],
@@ -67,7 +67,7 @@ async function execHook(options) {
67
67
  }
68
68
  const hookOptions = {
69
69
  ...options,
70
- args: [...options.args, { signal: options.signalId, $memflow: true }]
70
+ args: [...options.args, { signal: options.signalId, $memflow: true }],
71
71
  };
72
72
  // Execute the hook with the signal information
73
73
  await (0, hook_1.hook)(hookOptions);
@@ -27,7 +27,9 @@ async function hook(options) {
27
27
  targetTopic = workflowTopic;
28
28
  }
29
29
  // DEFENSIVE CHECK: Prevent infinite loops
30
- if (targetTopic === workflowTopic && !options.entity && !options.taskQueue) {
30
+ if (targetTopic === workflowTopic &&
31
+ !options.entity &&
32
+ !options.taskQueue) {
31
33
  throw new Error(`MemFlow Hook Error: Potential infinite loop detected!\n\n` +
32
34
  `The hook would target the same workflow topic ('${workflowTopic}') as the current workflow, ` +
33
35
  `creating an infinite loop.\n\n` +
@@ -39,7 +41,7 @@ async function hook(options) {
39
41
  `Provided options: ${JSON.stringify({
40
42
  workflowName: options.workflowName,
41
43
  taskQueue: options.taskQueue,
42
- entity: options.entity
44
+ entity: options.entity,
43
45
  }, null, 2)}`);
44
46
  }
45
47
  const payload = {
@@ -55,7 +55,8 @@ function wrapActivity(activityName, options) {
55
55
  throw new common_1.MemFlowTimeoutError(message, stack);
56
56
  }
57
57
  else {
58
- // Non-fatal error
58
+ // For any other error code, throw a MemFlowFatalError to stop the workflow
59
+ throw new common_1.MemFlowFatalError(message, stack);
59
60
  }
60
61
  }
61
62
  return result.$error;
@@ -49,12 +49,20 @@ class ConsumptionManager {
49
49
  const features = this.stream.getProviderSpecificFeatures();
50
50
  const supportsNotifications = features.supportsNotifications;
51
51
  if (supportsNotifications) {
52
- this.logger.info(`router-stream-using-notifications`, { group, consumer, stream });
52
+ this.logger.info(`router-stream-using-notifications`, {
53
+ group,
54
+ consumer,
55
+ stream,
56
+ });
53
57
  this.lifecycleManager.setIsUsingNotifications(true);
54
58
  return this.consumeWithNotifications(stream, group, consumer, callback);
55
59
  }
56
60
  else {
57
- this.logger.info(`router-stream-using-polling`, { group, consumer, stream });
61
+ this.logger.info(`router-stream-using-polling`, {
62
+ group,
63
+ consumer,
64
+ stream,
65
+ });
58
66
  this.lifecycleManager.setIsUsingNotifications(false);
59
67
  return this.consumeWithPolling(stream, group, consumer, callback);
60
68
  }
@@ -67,7 +75,8 @@ class ConsumptionManager {
67
75
  return;
68
76
  }
69
77
  await this.throttleManager.customSleep(); // respect throttle
70
- if (this.lifecycleManager.isStopped(group, consumer, stream) || this.throttleManager.isPaused()) {
78
+ if (this.lifecycleManager.isStopped(group, consumer, stream) ||
79
+ this.throttleManager.isPaused()) {
71
80
  return;
72
81
  }
73
82
  // Process messages - use parallel processing for PostgreSQL
@@ -78,7 +87,7 @@ class ConsumptionManager {
78
87
  this.logger.debug('postgres-stream-parallel-processing', {
79
88
  streamName: stream,
80
89
  groupName: group,
81
- messageCount: messages.length
90
+ messageCount: messages.length,
82
91
  });
83
92
  const processingStart = Date.now();
84
93
  const processingPromises = messages.map(async (message) => {
@@ -93,7 +102,7 @@ class ConsumptionManager {
93
102
  streamName: stream,
94
103
  groupName: group,
95
104
  messageCount: messages.length,
96
- processingDuration: Date.now() - processingStart
105
+ processingDuration: Date.now() - processingStart,
97
106
  });
98
107
  }
99
108
  else {
@@ -153,7 +162,11 @@ class ConsumptionManager {
153
162
  consumer,
154
163
  });
155
164
  // Fall back to polling if notifications fail
156
- this.logger.info(`router-stream-fallback-to-polling`, { group, consumer, stream });
165
+ this.logger.info(`router-stream-fallback-to-polling`, {
166
+ group,
167
+ consumer,
168
+ stream,
169
+ });
157
170
  this.lifecycleManager.setIsUsingNotifications(false);
158
171
  return this.consumeWithPolling(stream, group, consumer, callback);
159
172
  }
@@ -223,7 +236,7 @@ class ConsumptionManager {
223
236
  this.logger.debug('postgres-stream-parallel-processing-polling', {
224
237
  streamName: stream,
225
238
  groupName: group,
226
- messageCount: messages.length
239
+ messageCount: messages.length,
227
240
  });
228
241
  const processingStart = Date.now();
229
242
  const processingPromises = messages.map(async (message) => {
@@ -238,7 +251,7 @@ class ConsumptionManager {
238
251
  streamName: stream,
239
252
  groupName: group,
240
253
  messageCount: messages.length,
241
- processingDuration: Date.now() - processingStart
254
+ processingDuration: Date.now() - processingStart,
242
255
  });
243
256
  }
244
257
  else {
@@ -299,7 +312,8 @@ class ConsumptionManager {
299
312
  }
300
313
  }
301
314
  catch (error) {
302
- if (this.lifecycleManager.getShouldConsume() && process.env.NODE_ENV !== 'test') {
315
+ if (this.lifecycleManager.getShouldConsume() &&
316
+ process.env.NODE_ENV !== 'test') {
303
317
  this.logger.error(`router-stream-error`, {
304
318
  error,
305
319
  stream,
@@ -82,16 +82,16 @@ class ErrorHandler {
82
82
  const [shouldRetry, timeout] = this.shouldRetry(input, output);
83
83
  if (shouldRetry) {
84
84
  await (0, utils_1.sleepFor)(timeout);
85
- return await publishMessage(input.metadata.topic, {
85
+ return (await publishMessage(input.metadata.topic, {
86
86
  data: input.data,
87
87
  //note: retain guid (this is a retry attempt)
88
88
  metadata: { ...input.metadata, try: (input.metadata.try || 0) + 1 },
89
89
  policies: input.policies,
90
- });
90
+ }));
91
91
  }
92
92
  else {
93
93
  const structuredError = this.structureError(input, output);
94
- return await publishMessage(null, structuredError);
94
+ return (await publishMessage(null, structuredError));
95
95
  }
96
96
  }
97
97
  }
@@ -156,7 +156,9 @@ class PostgresSearchService extends index_1.SearchService {
156
156
  const params = [entity];
157
157
  let paramIndex = 2;
158
158
  for (const [key, value] of Object.entries(conditions)) {
159
- if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
159
+ if (typeof value === 'object' &&
160
+ value !== null &&
161
+ !Array.isArray(value)) {
160
162
  // Handle MongoDB-style operators like { $gte: 18 }
161
163
  for (const [op, opValue] of Object.entries(value)) {
162
164
  const sqlOp = this.mongoToSqlOperator(op);
@@ -188,14 +190,20 @@ class PostgresSearchService extends index_1.SearchService {
188
190
  params.push(options.offset);
189
191
  }
190
192
  const result = await this.pgClient.query(sql, params);
191
- return result.rows.map(row => ({
193
+ return result.rows.map((row) => ({
192
194
  key: row.key,
193
- context: typeof row.context === 'string' ? JSON.parse(row.context || '{}') : (row.context || {}),
195
+ context: typeof row.context === 'string'
196
+ ? JSON.parse(row.context || '{}')
197
+ : row.context || {},
194
198
  status: row.status,
195
199
  }));
196
200
  }
197
201
  catch (error) {
198
- this.logger.error(`postgres-find-entities-error`, { entity, conditions, error });
202
+ this.logger.error(`postgres-find-entities-error`, {
203
+ entity,
204
+ conditions,
205
+ error,
206
+ });
199
207
  throw error;
200
208
  }
201
209
  }
@@ -206,7 +214,7 @@ class PostgresSearchService extends index_1.SearchService {
206
214
  // Use KeyService to mint the job state key
207
215
  const fullKey = key_1.KeyService.mintKey(key_1.HMNS, key_1.KeyType.JOB_STATE, {
208
216
  appId: this.appId,
209
- jobId: id
217
+ jobId: id,
210
218
  });
211
219
  const sql = `
212
220
  SELECT key, context, status, entity
@@ -221,12 +229,18 @@ class PostgresSearchService extends index_1.SearchService {
221
229
  const row = result.rows[0];
222
230
  return {
223
231
  key: row.key,
224
- context: typeof row.context === 'string' ? JSON.parse(row.context || '{}') : (row.context || {}),
232
+ context: typeof row.context === 'string'
233
+ ? JSON.parse(row.context || '{}')
234
+ : row.context || {},
225
235
  status: row.status,
226
236
  };
227
237
  }
228
238
  catch (error) {
229
- this.logger.error(`postgres-find-entity-by-id-error`, { entity, id, error });
239
+ this.logger.error(`postgres-find-entity-by-id-error`, {
240
+ entity,
241
+ id,
242
+ error,
243
+ });
230
244
  throw error;
231
245
  }
232
246
  }
@@ -278,15 +292,21 @@ class PostgresSearchService extends index_1.SearchService {
278
292
  params.push(options.offset);
279
293
  }
280
294
  const result = await this.pgClient.query(sql, params);
281
- return result.rows.map(row => ({
295
+ return result.rows.map((row) => ({
282
296
  key: row.key,
283
- context: typeof row.context === 'string' ? JSON.parse(row.context || '{}') : (row.context || {}),
297
+ context: typeof row.context === 'string'
298
+ ? JSON.parse(row.context || '{}')
299
+ : row.context || {},
284
300
  status: row.status,
285
301
  }));
286
302
  }
287
303
  catch (error) {
288
304
  this.logger.error(`postgres-find-entities-by-condition-error`, {
289
- entity, field, value, operator, error
305
+ entity,
306
+ field,
307
+ value,
308
+ operator,
309
+ error,
290
310
  });
291
311
  throw error;
292
312
  }
@@ -323,11 +343,19 @@ class PostgresSearchService extends index_1.SearchService {
323
343
  `;
324
344
  }
325
345
  await this.pgClient.query(sql);
326
- this.logger.info(`postgres-entity-index-created`, { entity, field, indexType, indexName });
346
+ this.logger.info(`postgres-entity-index-created`, {
347
+ entity,
348
+ field,
349
+ indexType,
350
+ indexName,
351
+ });
327
352
  }
328
353
  catch (error) {
329
354
  this.logger.error(`postgres-create-entity-index-error`, {
330
- entity, field, indexType, error
355
+ entity,
356
+ field,
357
+ indexType,
358
+ error,
331
359
  });
332
360
  throw error;
333
361
  }
@@ -335,13 +363,13 @@ class PostgresSearchService extends index_1.SearchService {
335
363
  // Helper methods for entity operations
336
364
  mongoToSqlOperator(mongoOp) {
337
365
  const mapping = {
338
- '$eq': '=',
339
- '$ne': '!=',
340
- '$gt': '>',
341
- '$gte': '>=',
342
- '$lt': '<',
343
- '$lte': '<=',
344
- '$in': 'IN',
366
+ $eq: '=',
367
+ $ne: '!=',
368
+ $gt: '>',
369
+ $gte: '>=',
370
+ $lt: '<',
371
+ $lte: '<=',
372
+ $in: 'IN',
345
373
  };
346
374
  return mapping[mongoOp] || '=';
347
375
  }
@@ -31,7 +31,7 @@ function createBasicOperations(context) {
31
31
  try {
32
32
  const res = await context.pgClient.query(sql, params);
33
33
  // Check if this is a JSONB operation that returns a value
34
- const isJsonbOperation = Object.keys(fields).some(k => k.startsWith('@context:') && k !== '@context');
34
+ const isJsonbOperation = Object.keys(fields).some((k) => k.startsWith('@context:') && k !== '@context');
35
35
  if (isJsonbOperation && res.rows[0]?.new_value !== undefined) {
36
36
  let returnValue;
37
37
  try {
@@ -75,7 +75,7 @@ const hashModule = (context) => {
75
75
  const { sql, params } = jsonbOps.handleContextSetIfNotExists(key, fields, options);
76
76
  return executeJsonbOperation(sql, params, multi);
77
77
  }
78
- else if (Object.keys(fields).some(k => k.startsWith('@context:get:'))) {
78
+ else if (Object.keys(fields).some((k) => k.startsWith('@context:get:'))) {
79
79
  const { sql, params } = jsonbOps.handleContextGetPath(key, fields, options);
80
80
  return executeJsonbOperation(sql, params, multi);
81
81
  }
@@ -120,7 +120,7 @@ const hashModule = (context) => {
120
120
  else if ('@context:setIfNotExists' in fields) {
121
121
  return jsonbOps.handleContextSetIfNotExists(key, fields, options);
122
122
  }
123
- else if (Object.keys(fields).some(k => k.startsWith('@context:get:'))) {
123
+ else if (Object.keys(fields).some((k) => k.startsWith('@context:get:'))) {
124
124
  return jsonbOps.handleContextGetPath(key, fields, options);
125
125
  }
126
126
  else if ('@context:get' in fields) {