@hotmeshio/hotmesh 0.5.5 → 0.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +7 -45
  2. package/build/index.d.ts +1 -3
  3. package/build/index.js +1 -5
  4. package/build/modules/enums.d.ts +0 -5
  5. package/build/modules/enums.js +1 -6
  6. package/build/modules/utils.d.ts +1 -1
  7. package/build/modules/utils.js +2 -29
  8. package/build/package.json +6 -17
  9. package/build/services/activities/hook.js +1 -5
  10. package/build/services/activities/trigger.js +1 -1
  11. package/build/services/compiler/index.d.ts +2 -2
  12. package/build/services/compiler/index.js +4 -4
  13. package/build/services/connector/factory.d.ts +1 -1
  14. package/build/services/connector/factory.js +3 -12
  15. package/build/services/connector/providers/postgres.js +11 -6
  16. package/build/services/exporter/index.d.ts +8 -8
  17. package/build/services/exporter/index.js +8 -8
  18. package/build/services/memflow/client.js +5 -10
  19. package/build/services/memflow/connection.d.ts +0 -2
  20. package/build/services/memflow/connection.js +0 -2
  21. package/build/services/memflow/exporter.d.ts +3 -3
  22. package/build/services/memflow/exporter.js +3 -3
  23. package/build/services/memflow/index.d.ts +154 -34
  24. package/build/services/memflow/index.js +165 -33
  25. package/build/services/memflow/interceptor.d.ts +241 -0
  26. package/build/services/memflow/interceptor.js +256 -0
  27. package/build/services/memflow/schemas/factory.js +1 -1
  28. package/build/services/memflow/search.d.ts +11 -4
  29. package/build/services/memflow/search.js +98 -71
  30. package/build/services/memflow/worker.d.ts +1 -1
  31. package/build/services/memflow/worker.js +11 -2
  32. package/build/services/memflow/workflow/execChild.js +3 -1
  33. package/build/services/memflow/workflow/execHook.js +1 -1
  34. package/build/services/memflow/workflow/hook.js +4 -2
  35. package/build/services/memflow/workflow/proxyActivities.js +2 -1
  36. package/build/services/meshcall/index.d.ts +1 -1
  37. package/build/services/meshcall/index.js +1 -1
  38. package/build/services/reporter/index.d.ts +1 -1
  39. package/build/services/reporter/index.js +12 -12
  40. package/build/services/router/consumption/index.js +23 -9
  41. package/build/services/router/error-handling/index.js +3 -3
  42. package/build/services/search/factory.js +0 -8
  43. package/build/services/search/providers/postgres/postgres.js +48 -20
  44. package/build/services/store/cache.d.ts +1 -1
  45. package/build/services/store/cache.js +1 -1
  46. package/build/services/store/factory.js +1 -9
  47. package/build/services/store/index.d.ts +1 -1
  48. package/build/services/store/providers/postgres/kvtypes/hash/basic.js +1 -1
  49. package/build/services/store/providers/postgres/kvtypes/hash/index.js +59 -2
  50. package/build/services/store/providers/postgres/kvtypes/hash/jsonb.js +11 -11
  51. package/build/services/store/providers/postgres/kvtypes/hash/udata.d.ts +10 -0
  52. package/build/services/store/providers/postgres/kvtypes/hash/udata.js +384 -0
  53. package/build/services/store/providers/postgres/postgres.js +10 -14
  54. package/build/services/stream/factory.js +0 -16
  55. package/build/services/stream/providers/postgres/postgres.js +23 -20
  56. package/build/services/sub/factory.js +0 -8
  57. package/build/services/sub/providers/nats/nats.js +0 -1
  58. package/build/services/sub/providers/postgres/postgres.js +11 -3
  59. package/build/services/task/index.js +4 -5
  60. package/build/types/activity.d.ts +1 -5
  61. package/build/types/hotmesh.d.ts +0 -5
  62. package/build/types/index.d.ts +0 -1
  63. package/build/types/index.js +1 -4
  64. package/build/types/job.d.ts +1 -1
  65. package/build/types/memflow.d.ts +83 -4
  66. package/build/types/meshcall.d.ts +0 -25
  67. package/build/types/provider.d.ts +1 -1
  68. package/build/types/stream.d.ts +1 -6
  69. package/index.ts +0 -4
  70. package/package.json +6 -17
  71. package/build/services/connector/providers/ioredis.d.ts +0 -9
  72. package/build/services/connector/providers/ioredis.js +0 -26
  73. package/build/services/connector/providers/redis.d.ts +0 -9
  74. package/build/services/connector/providers/redis.js +0 -38
  75. package/build/services/search/providers/redis/ioredis.d.ts +0 -23
  76. package/build/services/search/providers/redis/ioredis.js +0 -134
  77. package/build/services/search/providers/redis/redis.d.ts +0 -23
  78. package/build/services/search/providers/redis/redis.js +0 -147
  79. package/build/services/store/providers/redis/_base.d.ts +0 -137
  80. package/build/services/store/providers/redis/_base.js +0 -980
  81. package/build/services/store/providers/redis/ioredis.d.ts +0 -20
  82. package/build/services/store/providers/redis/ioredis.js +0 -180
  83. package/build/services/store/providers/redis/redis.d.ts +0 -18
  84. package/build/services/store/providers/redis/redis.js +0 -199
  85. package/build/services/stream/providers/redis/ioredis.d.ts +0 -61
  86. package/build/services/stream/providers/redis/ioredis.js +0 -272
  87. package/build/services/stream/providers/redis/redis.d.ts +0 -61
  88. package/build/services/stream/providers/redis/redis.js +0 -305
  89. package/build/services/sub/providers/redis/ioredis.d.ts +0 -17
  90. package/build/services/sub/providers/redis/ioredis.js +0 -81
  91. package/build/services/sub/providers/redis/redis.d.ts +0 -17
  92. package/build/services/sub/providers/redis/redis.js +0 -72
  93. package/build/types/redis.d.ts +0 -258
  94. package/build/types/redis.js +0 -11
@@ -33,10 +33,6 @@ class Search {
33
33
  * @private
34
34
  */
35
35
  this.searchSessionIndex = 0;
36
- /**
37
- * @private
38
- */
39
- this.cachedFields = {};
40
36
  const keyParams = {
41
37
  appId: hotMeshClient.appId,
42
38
  jobId: workflowId,
@@ -111,6 +107,46 @@ class Search {
111
107
  }
112
108
  }
113
109
  }
110
+ /**
111
+ * Returns all user-defined attributes (udata) for a workflow.
112
+ * These are fields that start with underscore (_) and have type='udata'.
113
+ *
114
+ * @example
115
+ * ```typescript
116
+ * const allUserData = await Search.findAllUserData('job123', hotMeshClient);
117
+ * // Returns: { _status: "active", _counter: "42", _name: "test" }
118
+ * ```
119
+ */
120
+ static async findAllUserData(jobId, hotMeshClient) {
121
+ const keyParams = {
122
+ appId: hotMeshClient.appId,
123
+ jobId: jobId,
124
+ };
125
+ const key = key_1.KeyService.mintKey(hotMeshClient.namespace, key_1.KeyType.JOB_STATE, keyParams);
126
+ const search = hotMeshClient.engine.search;
127
+ const rawResult = await search.updateContext(key, {
128
+ '@udata:all': ''
129
+ });
130
+ // Transform the result:
131
+ // 1. Remove underscore prefix from keys
132
+ // 2. Handle special fields (like exponential values)
133
+ const result = {};
134
+ for (const [key, value] of Object.entries(rawResult)) {
135
+ // Remove underscore prefix
136
+ const cleanKey = key.startsWith('_') ? key.slice(1) : key;
137
+ // Special handling for fields that use logarithmic storage
138
+ if (cleanKey === 'multer') {
139
+ // Convert from log value back to actual value
140
+ const expValue = Math.exp(Number(value));
141
+ // Round to nearest integer since log multiplication doesn't need decimal precision
142
+ result[cleanKey] = Math.round(expValue).toString();
143
+ }
144
+ else {
145
+ result[cleanKey] = value;
146
+ }
147
+ }
148
+ return result;
149
+ }
114
150
  /**
115
151
  * Returns an array of search indexes ids
116
152
  *
@@ -153,27 +189,31 @@ class Search {
153
189
  if (ssGuid in replay) {
154
190
  return Number(replay[ssGuid]);
155
191
  }
156
- const fields = {};
192
+ // Prepare fields to set with udata format
193
+ let udataFields;
157
194
  if (typeof args[0] === 'object') {
195
+ // Object format: { field1: 'value1', field2: 'value2' }
196
+ udataFields = {};
158
197
  for (const [key, value] of Object.entries(args[0])) {
159
- delete this.cachedFields[key];
160
- fields[this.safeKey(key)] = value.toString();
198
+ udataFields[this.safeKey(key)] = value.toString();
161
199
  }
162
200
  }
163
201
  else {
202
+ // Array format: ['field1', 'value1', 'field2', 'value2']
203
+ udataFields = [];
164
204
  for (let i = 0; i < args.length; i += 2) {
165
205
  const keyName = args[i];
166
- delete this.cachedFields[keyName];
167
206
  const key = this.safeKey(keyName);
168
207
  const value = args[i + 1].toString();
169
- fields[key] = value;
208
+ udataFields.push(key, value);
170
209
  }
171
210
  }
172
- const fieldCount = await this.search.setFields(this.jobId, fields);
173
- await this.search.setFields(this.jobId, {
174
- [ssGuid]: fieldCount.toString(),
211
+ // Use single transactional call to update fields and store replay value
212
+ const result = await this.search.updateContext(this.jobId, {
213
+ '@udata:set': JSON.stringify(udataFields),
214
+ [ssGuid]: '', // Pass replay ID to hash module for transactional replay storage
175
215
  });
176
- return fieldCount;
216
+ return result;
177
217
  }
178
218
  /**
179
219
  * Returns the value of the record data field, given a field id
@@ -183,13 +223,20 @@ class Search {
183
223
  * const value = await search.get('field1');
184
224
  */
185
225
  async get(id) {
226
+ const ssGuid = this.getSearchSessionGuid();
227
+ const store = storage_1.asyncLocalStorage.getStore();
228
+ const replay = store?.get('replay') ?? {};
229
+ if (ssGuid in replay) {
230
+ // Replay cache stores the field value
231
+ return replay[ssGuid];
232
+ }
186
233
  try {
187
- if (id in this.cachedFields) {
188
- return this.cachedFields[id];
189
- }
190
- const value = await this.search.getField(this.jobId, this.safeKey(id));
191
- this.cachedFields[id] = value;
192
- return value;
234
+ // Use server-side udata get operation with replay storage
235
+ const result = await this.search.updateContext(this.jobId, {
236
+ '@udata:get': this.safeKey(id),
237
+ [ssGuid]: '', // Pass replay ID to hash module
238
+ });
239
+ return result || '';
193
240
  }
194
241
  catch (error) {
195
242
  this.hotMeshClient.logger.error('memflow-search-get-error', {
@@ -202,29 +249,22 @@ class Search {
202
249
  * Returns the values of all specified fields in the HASH stored at key.
203
250
  */
204
251
  async mget(...args) {
205
- let isCached = true;
206
- const values = [];
207
- const safeArgs = [];
208
- for (let i = 0; i < args.length; i++) {
209
- if (isCached && args[i] in this.cachedFields) {
210
- values.push(this.cachedFields[args[i]]);
211
- }
212
- else {
213
- isCached = false;
214
- }
215
- safeArgs.push(this.safeKey(args[i]));
252
+ const ssGuid = this.getSearchSessionGuid();
253
+ const store = storage_1.asyncLocalStorage.getStore();
254
+ const replay = store?.get('replay') ?? {};
255
+ if (ssGuid in replay) {
256
+ // Replay cache stores the field values array
257
+ const replayValue = replay[ssGuid];
258
+ return typeof replayValue === 'string' ? replayValue.split('|||') : replayValue;
216
259
  }
217
260
  try {
218
- if (isCached) {
219
- return values;
220
- }
221
- const returnValues = await this.search.getFields(this.jobId, safeArgs);
222
- returnValues.forEach((value, index) => {
223
- if (value !== null) {
224
- this.cachedFields[args[index]] = value;
225
- }
261
+ const safeArgs = args.map(arg => this.safeKey(arg));
262
+ // Use server-side udata mget operation with replay storage
263
+ const result = await this.search.updateContext(this.jobId, {
264
+ '@udata:mget': JSON.stringify(safeArgs),
265
+ [ssGuid]: '', // Pass replay ID to hash module
226
266
  });
227
- return returnValues;
267
+ return result || [];
228
268
  }
229
269
  catch (error) {
230
270
  this.hotMeshClient.logger.error('memflow-search-mget-error', {
@@ -245,23 +285,16 @@ class Search {
245
285
  const ssGuid = this.getSearchSessionGuid();
246
286
  const store = storage_1.asyncLocalStorage.getStore();
247
287
  const replay = store?.get('replay') ?? {};
248
- const safeArgs = [];
249
- for (let i = 0; i < args.length; i++) {
250
- const keyName = args[i];
251
- delete this.cachedFields[keyName];
252
- safeArgs.push(this.safeKey(keyName));
253
- }
254
288
  if (ssGuid in replay) {
255
289
  return Number(replay[ssGuid]);
256
290
  }
257
- const response = await this.search.deleteFields(this.jobId, safeArgs);
258
- const formattedResponse = isNaN(response)
259
- ? 0
260
- : Number(response);
261
- await this.search.setFields(this.jobId, {
262
- [ssGuid]: formattedResponse.toString(),
291
+ const safeArgs = args.map(arg => this.safeKey(arg));
292
+ // Use server-side udata delete operation with replay storage
293
+ const result = await this.search.updateContext(this.jobId, {
294
+ '@udata:delete': JSON.stringify(safeArgs),
295
+ [ssGuid]: '', // Pass replay ID to hash module for transactional replay storage
263
296
  });
264
- return formattedResponse;
297
+ return Number(result || 0);
265
298
  }
266
299
  /**
267
300
  * Increments the value of a float field by the given amount. Returns the
@@ -273,16 +306,18 @@ class Search {
273
306
  * const count = await search.incr('field1', 1.5);
274
307
  */
275
308
  async incr(key, val) {
276
- delete this.cachedFields[key];
277
309
  const ssGuid = this.getSearchSessionGuid();
278
310
  const store = storage_1.asyncLocalStorage.getStore();
279
311
  const replay = store?.get('replay') ?? {};
280
312
  if (ssGuid in replay) {
281
313
  return Number(replay[ssGuid]);
282
314
  }
283
- const num = await this.search.incrementFieldByFloat(this.jobId, this.safeKey(key), val);
284
- await this.search.setFields(this.jobId, { [ssGuid]: num.toString() });
285
- return num;
315
+ // Use server-side udata increment operation with replay storage
316
+ const result = await this.search.updateContext(this.jobId, {
317
+ '@udata:increment': JSON.stringify({ field: this.safeKey(key), value: val }),
318
+ [ssGuid]: '', // Pass replay ID to hash module for transactional replay storage
319
+ });
320
+ return Number(result);
286
321
  }
287
322
  /**
288
323
  * Multiplies the value of a field by the given amount. Returns the
@@ -294,27 +329,19 @@ class Search {
294
329
  * const product = await search.mult('field1', 1.5);
295
330
  */
296
331
  async mult(key, val) {
297
- delete this.cachedFields[key];
298
332
  const ssGuid = this.getSearchSessionGuid();
299
333
  const store = storage_1.asyncLocalStorage.getStore();
300
334
  const replay = store?.get('replay') ?? {};
301
335
  if (ssGuid in replay) {
302
336
  return Math.exp(Number(replay[ssGuid]));
303
337
  }
304
- const ssGuidValue = await this.search.incrementFieldByFloat(this.jobId, ssGuid, 1);
305
- if (ssGuidValue === 1) {
306
- const log = Math.log(val);
307
- const logTotal = await this.search.incrementFieldByFloat(this.jobId, this.safeKey(key), log);
308
- await this.search.setFields(this.jobId, {
309
- [ssGuid]: logTotal.toString(),
310
- });
311
- return Math.exp(logTotal);
312
- }
313
- else {
314
- const logTotalStr = await this.search.getField(this.jobId, ssGuid);
315
- const logTotal = Number(logTotalStr);
316
- return Math.exp(logTotal);
317
- }
338
+ // Use server-side udata multiply operation with replay storage
339
+ const result = await this.search.updateContext(this.jobId, {
340
+ '@udata:multiply': JSON.stringify({ field: this.safeKey(key), value: val }),
341
+ [ssGuid]: '', // Pass replay ID to hash module for transactional replay storage
342
+ });
343
+ // The result is the log value, so we need to exponentiate it
344
+ return Math.exp(Number(result));
318
345
  }
319
346
  }
320
347
  exports.Search = Search;
@@ -2,7 +2,7 @@ import { HotMesh } from '../hotmesh';
2
2
  import { Connection, Registry, WorkerConfig, WorkerOptions } from '../../types/memflow';
3
3
  /**
4
4
  * The *Worker* service Registers worker functions and connects them to the mesh,
5
- * using the target backend provider/s (Redis, Postgres, NATS, etc).
5
+ * using the target backend provider/s (Postgres, NATS, etc).
6
6
  *
7
7
  * @example
8
8
  * ```typescript
@@ -10,9 +10,10 @@ const hotmesh_1 = require("../hotmesh");
10
10
  const stream_1 = require("../../types/stream");
11
11
  const search_1 = require("./search");
12
12
  const factory_1 = require("./schemas/factory");
13
+ const index_1 = require("./index");
13
14
  /**
14
15
  * The *Worker* service Registers worker functions and connects them to the mesh,
15
- * using the target backend provider/s (Redis, Postgres, NATS, etc).
16
+ * using the target backend provider/s (Postgres, NATS, etc).
16
17
  *
17
18
  * @example
18
19
  * ```typescript
@@ -321,8 +322,16 @@ class WorkerService {
321
322
  const [cursor, replay] = await store.findJobFields(workflowInput.workflowId, replayQuery, 50000, 5000);
322
323
  context.set('replay', replay);
323
324
  context.set('cursor', cursor); // if != 0, more remain
325
+ // Execute workflow with interceptors
324
326
  const workflowResponse = await storage_1.asyncLocalStorage.run(context, async () => {
325
- return await workflowFunction.apply(this, workflowInput.arguments);
327
+ // Get the interceptor service
328
+ const interceptorService = index_1.MemFlow.getInterceptorService();
329
+ // Create the workflow execution function
330
+ const execWorkflow = async () => {
331
+ return await workflowFunction.apply(this, workflowInput.arguments);
332
+ };
333
+ // Execute the workflow through the interceptor chain
334
+ return await interceptorService.executeChain(context, execWorkflow);
326
335
  });
327
336
  //if the embedded function has a try/catch, it can interrup the throw
328
337
  // throw here to interrupt the workflow if the embedded function caught and suppressed
@@ -22,7 +22,9 @@ function getChildInterruptPayload(context, options, execIndex) {
22
22
  }
23
23
  const parentWorkflowId = workflowId;
24
24
  const taskQueueName = options.taskQueue ?? options.entity;
25
- const workflowName = options.taskQueue ? options.workflowName : (options.entity ?? options.workflowName);
25
+ const workflowName = options.taskQueue
26
+ ? options.workflowName
27
+ : options.entity ?? options.workflowName;
26
28
  const workflowTopic = `${taskQueueName}-${workflowName}`;
27
29
  return {
28
30
  arguments: [...(options.args || [])],
@@ -67,7 +67,7 @@ async function execHook(options) {
67
67
  }
68
68
  const hookOptions = {
69
69
  ...options,
70
- args: [...options.args, { signal: options.signalId, $memflow: true }]
70
+ args: [...options.args, { signal: options.signalId, $memflow: true }],
71
71
  };
72
72
  // Execute the hook with the signal information
73
73
  await (0, hook_1.hook)(hookOptions);
@@ -27,7 +27,9 @@ async function hook(options) {
27
27
  targetTopic = workflowTopic;
28
28
  }
29
29
  // DEFENSIVE CHECK: Prevent infinite loops
30
- if (targetTopic === workflowTopic && !options.entity && !options.taskQueue) {
30
+ if (targetTopic === workflowTopic &&
31
+ !options.entity &&
32
+ !options.taskQueue) {
31
33
  throw new Error(`MemFlow Hook Error: Potential infinite loop detected!\n\n` +
32
34
  `The hook would target the same workflow topic ('${workflowTopic}') as the current workflow, ` +
33
35
  `creating an infinite loop.\n\n` +
@@ -39,7 +41,7 @@ async function hook(options) {
39
41
  `Provided options: ${JSON.stringify({
40
42
  workflowName: options.workflowName,
41
43
  taskQueue: options.taskQueue,
42
- entity: options.entity
44
+ entity: options.entity,
43
45
  }, null, 2)}`);
44
46
  }
45
47
  const payload = {
@@ -55,7 +55,8 @@ function wrapActivity(activityName, options) {
55
55
  throw new common_1.MemFlowTimeoutError(message, stack);
56
56
  }
57
57
  else {
58
- // Non-fatal error
58
+ // For any other error code, throw a MemFlowFatalError to stop the workflow
59
+ throw new common_1.MemFlowFatalError(message, stack);
59
60
  }
60
61
  }
61
62
  return result.$error;
@@ -4,7 +4,7 @@ import { ProviderConfig, ProvidersConfig } from '../../types/provider';
4
4
  /**
5
5
  * MeshCall connects any function as an idempotent endpoint.
6
6
  * Call functions from anywhere on the network connected to the
7
- * target backend (Postgres, Redis/ValKey, NATS, etc). Function
7
+ * target backend (Postgres, NATS, etc). Function
8
8
  * responses are cacheable and invocations can be scheduled to
9
9
  * run as idempotent cron jobs (this one runs nightly at midnight
10
10
  * and uses Postgres as the backend provider).
@@ -11,7 +11,7 @@ const factory_1 = require("./schemas/factory");
11
11
  /**
12
12
  * MeshCall connects any function as an idempotent endpoint.
13
13
  * Call functions from anywhere on the network connected to the
14
- * target backend (Postgres, Redis/ValKey, NATS, etc). Function
14
+ * target backend (Postgres, NATS, etc). Function
15
15
  * responses are cacheable and invocations can be scheduled to
16
16
  * run as idempotent cron jobs (this one runs nightly at midnight
17
17
  * and uses Postgres as the backend provider).
@@ -15,7 +15,7 @@ declare class ReporterService {
15
15
  private validateOptions;
16
16
  private generateDateTimeSets;
17
17
  private convertRangeToMinutes;
18
- private buildRedisKey;
18
+ private buildKeys;
19
19
  private aggregateData;
20
20
  private buildStatsResponse;
21
21
  private handleSegments;
@@ -13,10 +13,10 @@ class ReporterService {
13
13
  const { key, granularity, range, end, start } = options;
14
14
  this.validateOptions(options);
15
15
  const dateTimeSets = this.generateDateTimeSets(granularity, range, end, start);
16
- const redisKeys = dateTimeSets.map((dateTime) => this.buildRedisKey(key, dateTime));
17
- const rawData = await this.store.getJobStats(redisKeys);
16
+ const keys = dateTimeSets.map((dateTime) => this.buildKeys(key, dateTime));
17
+ const rawData = await this.store.getJobStats(keys);
18
18
  const [count, aggregatedData] = this.aggregateData(rawData);
19
- const statsResponse = this.buildStatsResponse(rawData, redisKeys, aggregatedData, count, options);
19
+ const statsResponse = this.buildStatsResponse(rawData, keys, aggregatedData, count, options);
20
20
  return statsResponse;
21
21
  }
22
22
  validateOptions(options) {
@@ -93,7 +93,7 @@ class ReporterService {
93
93
  return null;
94
94
  }
95
95
  }
96
- buildRedisKey(key, dateTime, subTarget = '') {
96
+ buildKeys(key, dateTime, subTarget = '') {
97
97
  return `hmsh:${this.appVersion.id}:s:${key}:${dateTime}${subTarget ? ':' + subTarget : ''}`;
98
98
  }
99
99
  aggregateData(rawData) {
@@ -115,12 +115,12 @@ class ReporterService {
115
115
  });
116
116
  return [count, aggregatedData];
117
117
  }
118
- buildStatsResponse(rawData, redisKeys, aggregatedData, count, options) {
118
+ buildStatsResponse(rawData, keys, aggregatedData, count, options) {
119
119
  const measures = [];
120
120
  const measureKeys = Object.keys(aggregatedData).filter((key) => key !== 'count');
121
121
  let segments = undefined;
122
122
  if (options.sparse !== true) {
123
- segments = this.handleSegments(rawData, redisKeys);
123
+ segments = this.handleSegments(rawData, keys);
124
124
  }
125
125
  measureKeys.forEach((key) => {
126
126
  const measure = {
@@ -179,12 +179,12 @@ class ReporterService {
179
179
  }
180
180
  const { key, granularity, range, end, start } = options;
181
181
  this.validateOptions(options);
182
- let redisKeys = [];
182
+ let keys = [];
183
183
  facets.forEach((facet) => {
184
184
  const dateTimeSets = this.generateDateTimeSets(granularity, range, end, start);
185
- redisKeys = redisKeys.concat(dateTimeSets.map((dateTime) => this.buildRedisKey(key, dateTime, `index:${facet}`)));
185
+ keys = keys.concat(dateTimeSets.map((dateTime) => this.buildKeys(key, dateTime, `index:${facet}`)));
186
186
  });
187
- const idsData = await this.store.getJobIds(redisKeys, idRange);
187
+ const idsData = await this.store.getJobIds(keys, idRange);
188
188
  const idsResponse = this.buildIdsResponse(idsData, options, facets);
189
189
  return idsResponse;
190
190
  }
@@ -254,12 +254,12 @@ class ReporterService {
254
254
  }
255
255
  const { key, granularity, range, end, start } = options;
256
256
  this.validateOptions(options);
257
- let redisKeys = [];
257
+ let keys = [];
258
258
  facets.forEach((facet) => {
259
259
  const dateTimeSets = this.generateDateTimeSets(granularity, range, end, start);
260
- redisKeys = redisKeys.concat(dateTimeSets.map((dateTime) => this.buildRedisKey(key, dateTime, `index:${facet}`)));
260
+ keys = keys.concat(dateTimeSets.map((dateTime) => this.buildKeys(key, dateTime, `index:${facet}`)));
261
261
  });
262
- const idsData = await this.store.getJobIds(redisKeys, [0, 1]);
262
+ const idsData = await this.store.getJobIds(keys, [0, 1]);
263
263
  const workerLists = this.buildWorkerLists(idsData);
264
264
  return workerLists;
265
265
  }
@@ -49,12 +49,20 @@ class ConsumptionManager {
49
49
  const features = this.stream.getProviderSpecificFeatures();
50
50
  const supportsNotifications = features.supportsNotifications;
51
51
  if (supportsNotifications) {
52
- this.logger.info(`router-stream-using-notifications`, { group, consumer, stream });
52
+ this.logger.info(`router-stream-using-notifications`, {
53
+ group,
54
+ consumer,
55
+ stream,
56
+ });
53
57
  this.lifecycleManager.setIsUsingNotifications(true);
54
58
  return this.consumeWithNotifications(stream, group, consumer, callback);
55
59
  }
56
60
  else {
57
- this.logger.info(`router-stream-using-polling`, { group, consumer, stream });
61
+ this.logger.info(`router-stream-using-polling`, {
62
+ group,
63
+ consumer,
64
+ stream,
65
+ });
58
66
  this.lifecycleManager.setIsUsingNotifications(false);
59
67
  return this.consumeWithPolling(stream, group, consumer, callback);
60
68
  }
@@ -67,7 +75,8 @@ class ConsumptionManager {
67
75
  return;
68
76
  }
69
77
  await this.throttleManager.customSleep(); // respect throttle
70
- if (this.lifecycleManager.isStopped(group, consumer, stream) || this.throttleManager.isPaused()) {
78
+ if (this.lifecycleManager.isStopped(group, consumer, stream) ||
79
+ this.throttleManager.isPaused()) {
71
80
  return;
72
81
  }
73
82
  // Process messages - use parallel processing for PostgreSQL
@@ -78,7 +87,7 @@ class ConsumptionManager {
78
87
  this.logger.debug('postgres-stream-parallel-processing', {
79
88
  streamName: stream,
80
89
  groupName: group,
81
- messageCount: messages.length
90
+ messageCount: messages.length,
82
91
  });
83
92
  const processingStart = Date.now();
84
93
  const processingPromises = messages.map(async (message) => {
@@ -93,7 +102,7 @@ class ConsumptionManager {
93
102
  streamName: stream,
94
103
  groupName: group,
95
104
  messageCount: messages.length,
96
- processingDuration: Date.now() - processingStart
105
+ processingDuration: Date.now() - processingStart,
97
106
  });
98
107
  }
99
108
  else {
@@ -153,7 +162,11 @@ class ConsumptionManager {
153
162
  consumer,
154
163
  });
155
164
  // Fall back to polling if notifications fail
156
- this.logger.info(`router-stream-fallback-to-polling`, { group, consumer, stream });
165
+ this.logger.info(`router-stream-fallback-to-polling`, {
166
+ group,
167
+ consumer,
168
+ stream,
169
+ });
157
170
  this.lifecycleManager.setIsUsingNotifications(false);
158
171
  return this.consumeWithPolling(stream, group, consumer, callback);
159
172
  }
@@ -223,7 +236,7 @@ class ConsumptionManager {
223
236
  this.logger.debug('postgres-stream-parallel-processing-polling', {
224
237
  streamName: stream,
225
238
  groupName: group,
226
- messageCount: messages.length
239
+ messageCount: messages.length,
227
240
  });
228
241
  const processingStart = Date.now();
229
242
  const processingPromises = messages.map(async (message) => {
@@ -238,7 +251,7 @@ class ConsumptionManager {
238
251
  streamName: stream,
239
252
  groupName: group,
240
253
  messageCount: messages.length,
241
- processingDuration: Date.now() - processingStart
254
+ processingDuration: Date.now() - processingStart,
242
255
  });
243
256
  }
244
257
  else {
@@ -299,7 +312,8 @@ class ConsumptionManager {
299
312
  }
300
313
  }
301
314
  catch (error) {
302
- if (this.lifecycleManager.getShouldConsume() && process.env.NODE_ENV !== 'test') {
315
+ if (this.lifecycleManager.getShouldConsume() &&
316
+ process.env.NODE_ENV !== 'test') {
303
317
  this.logger.error(`router-stream-error`, {
304
318
  error,
305
319
  stream,
@@ -82,16 +82,16 @@ class ErrorHandler {
82
82
  const [shouldRetry, timeout] = this.shouldRetry(input, output);
83
83
  if (shouldRetry) {
84
84
  await (0, utils_1.sleepFor)(timeout);
85
- return await publishMessage(input.metadata.topic, {
85
+ return (await publishMessage(input.metadata.topic, {
86
86
  data: input.data,
87
87
  //note: retain guid (this is a retry attempt)
88
88
  metadata: { ...input.metadata, try: (input.metadata.try || 0) + 1 },
89
89
  policies: input.policies,
90
- });
90
+ }));
91
91
  }
92
92
  else {
93
93
  const structuredError = this.structureError(input, output);
94
- return await publishMessage(null, structuredError);
94
+ return (await publishMessage(null, structuredError));
95
95
  }
96
96
  }
97
97
  }
@@ -3,20 +3,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.SearchServiceFactory = void 0;
4
4
  const utils_1 = require("../../modules/utils");
5
5
  const postgres_1 = require("./providers/postgres/postgres");
6
- const ioredis_1 = require("./providers/redis/ioredis");
7
- const redis_1 = require("./providers/redis/redis");
8
6
  class SearchServiceFactory {
9
7
  static async init(providerClient, storeProviderClient, namespace, appId, logger) {
10
8
  let service;
11
9
  if ((0, utils_1.identifyProvider)(providerClient) === 'postgres') {
12
10
  service = new postgres_1.PostgresSearchService(providerClient, storeProviderClient);
13
11
  }
14
- else if ((0, utils_1.identifyProvider)(providerClient) === 'redis') {
15
- service = new redis_1.RedisSearchService(providerClient, storeProviderClient);
16
- }
17
- else {
18
- service = new ioredis_1.IORedisSearchService(providerClient, storeProviderClient);
19
- }
20
12
  await service.init(namespace, appId, logger);
21
13
  return service;
22
14
  }