@hotmeshio/hotmesh 0.9.0 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. package/README.md +38 -23
  2. package/build/index.d.ts +12 -11
  3. package/build/index.js +15 -13
  4. package/build/modules/enums.d.ts +23 -34
  5. package/build/modules/enums.js +26 -38
  6. package/build/modules/errors.d.ts +16 -16
  7. package/build/modules/errors.js +37 -37
  8. package/build/package.json +23 -22
  9. package/build/services/activities/activity.js +1 -1
  10. package/build/services/dba/index.d.ts +171 -0
  11. package/build/services/dba/index.js +280 -0
  12. package/build/services/{memflow → durable}/client.d.ts +3 -3
  13. package/build/services/{memflow → durable}/client.js +13 -13
  14. package/build/services/{memflow → durable}/connection.d.ts +2 -2
  15. package/build/services/{memflow → durable}/connection.js +1 -1
  16. package/build/services/{memflow → durable}/exporter.d.ts +6 -6
  17. package/build/services/{memflow → durable}/exporter.js +2 -2
  18. package/build/services/{memflow → durable}/handle.d.ts +4 -4
  19. package/build/services/{memflow → durable}/handle.js +2 -2
  20. package/build/services/{memflow → durable}/index.d.ts +125 -33
  21. package/build/services/{memflow → durable}/index.js +145 -49
  22. package/build/services/{memflow → durable}/interceptor.d.ts +45 -22
  23. package/build/services/{memflow → durable}/interceptor.js +54 -21
  24. package/build/services/{memflow → durable}/schemas/factory.d.ts +4 -4
  25. package/build/services/{memflow → durable}/schemas/factory.js +5 -5
  26. package/build/services/{memflow → durable}/search.d.ts +1 -1
  27. package/build/services/{memflow → durable}/search.js +4 -4
  28. package/build/services/{memflow → durable}/worker.d.ts +11 -11
  29. package/build/services/{memflow → durable}/worker.js +61 -60
  30. package/build/services/{memflow → durable}/workflow/all.d.ts +5 -5
  31. package/build/services/{memflow → durable}/workflow/all.js +5 -5
  32. package/build/services/{memflow → durable}/workflow/common.d.ts +5 -5
  33. package/build/services/durable/workflow/common.js +47 -0
  34. package/build/services/{memflow → durable}/workflow/context.d.ts +5 -5
  35. package/build/services/{memflow → durable}/workflow/context.js +5 -5
  36. package/build/services/{memflow → durable}/workflow/emit.d.ts +5 -5
  37. package/build/services/{memflow → durable}/workflow/emit.js +5 -5
  38. package/build/services/{memflow → durable}/workflow/enrich.d.ts +4 -4
  39. package/build/services/{memflow → durable}/workflow/enrich.js +4 -4
  40. package/build/services/{memflow → durable}/workflow/entityMethods.d.ts +4 -4
  41. package/build/services/{memflow → durable}/workflow/entityMethods.js +4 -4
  42. package/build/services/{memflow → durable}/workflow/execChild.d.ts +9 -9
  43. package/build/services/{memflow → durable}/workflow/execChild.js +22 -22
  44. package/build/services/{memflow → durable}/workflow/execHook.d.ts +8 -8
  45. package/build/services/{memflow → durable}/workflow/execHook.js +10 -10
  46. package/build/services/{memflow → durable}/workflow/execHookBatch.d.ts +5 -5
  47. package/build/services/{memflow → durable}/workflow/execHookBatch.js +8 -8
  48. package/build/services/{memflow → durable}/workflow/hook.d.ts +5 -5
  49. package/build/services/{memflow → durable}/workflow/hook.js +11 -11
  50. package/build/services/{memflow → durable}/workflow/index.d.ts +6 -6
  51. package/build/services/{memflow → durable}/workflow/index.js +6 -6
  52. package/build/services/{memflow → durable}/workflow/interrupt.d.ts +7 -7
  53. package/build/services/{memflow → durable}/workflow/interrupt.js +7 -7
  54. package/build/services/{memflow → durable}/workflow/interruption.d.ts +10 -10
  55. package/build/services/{memflow → durable}/workflow/interruption.js +19 -19
  56. package/build/services/{memflow → durable}/workflow/proxyActivities.d.ts +7 -7
  57. package/build/services/{memflow → durable}/workflow/proxyActivities.js +31 -21
  58. package/build/services/{memflow → durable}/workflow/random.d.ts +4 -4
  59. package/build/services/{memflow → durable}/workflow/random.js +4 -4
  60. package/build/services/{memflow → durable}/workflow/searchMethods.d.ts +5 -5
  61. package/build/services/{memflow → durable}/workflow/searchMethods.js +5 -5
  62. package/build/services/{memflow → durable}/workflow/signal.d.ts +8 -8
  63. package/build/services/{memflow → durable}/workflow/signal.js +8 -8
  64. package/build/services/{memflow → durable}/workflow/sleepFor.d.ts +7 -7
  65. package/build/services/{memflow → durable}/workflow/sleepFor.js +10 -10
  66. package/build/services/{memflow → durable}/workflow/trace.d.ts +5 -5
  67. package/build/services/{memflow → durable}/workflow/trace.js +5 -5
  68. package/build/services/{memflow → durable}/workflow/waitFor.d.ts +9 -9
  69. package/build/services/{memflow → durable}/workflow/waitFor.js +12 -12
  70. package/build/services/hotmesh/index.d.ts +3 -3
  71. package/build/services/hotmesh/index.js +3 -3
  72. package/build/services/{meshcall → virtual}/index.d.ts +29 -29
  73. package/build/services/{meshcall → virtual}/index.js +49 -49
  74. package/build/services/{meshcall → virtual}/schemas/factory.d.ts +1 -1
  75. package/build/services/{meshcall → virtual}/schemas/factory.js +1 -1
  76. package/build/types/dba.d.ts +64 -0
  77. package/build/types/{memflow.d.ts → durable.d.ts} +74 -18
  78. package/build/types/error.d.ts +5 -5
  79. package/build/types/exporter.d.ts +1 -1
  80. package/build/types/index.d.ts +5 -4
  81. package/build/types/{meshcall.d.ts → virtual.d.ts} +15 -15
  82. package/build/types/virtual.js +2 -0
  83. package/index.ts +15 -13
  84. package/package.json +23 -22
  85. package/.claude/settings.local.json +0 -8
  86. package/build/services/memflow/workflow/common.js +0 -47
  87. package/build/vitest.config.d.ts +0 -2
  88. package/build/vitest.config.js +0 -18
  89. /package/build/services/{memflow → durable}/entity.d.ts +0 -0
  90. /package/build/services/{memflow → durable}/entity.js +0 -0
  91. /package/build/services/{memflow → durable}/workflow/didRun.d.ts +0 -0
  92. /package/build/services/{memflow → durable}/workflow/didRun.js +0 -0
  93. /package/build/services/{memflow → durable}/workflow/isSideEffectAllowed.d.ts +0 -0
  94. /package/build/services/{memflow → durable}/workflow/isSideEffectAllowed.js +0 -0
  95. /package/build/types/{memflow.js → dba.js} +0 -0
  96. /package/build/types/{meshcall.js → durable.js} +0 -0
@@ -0,0 +1,280 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DBA = void 0;
4
+ const utils_1 = require("../../modules/utils");
5
+ const postgres_1 = require("../connector/providers/postgres");
6
+ /**
7
+ * Database maintenance operations for HotMesh's Postgres backend.
8
+ *
9
+ * HotMesh uses soft-delete patterns: expired jobs and stream messages
10
+ * retain their rows with `expired_at` set but are never physically
11
+ * removed during normal operation. Over time, three tables accumulate
12
+ * dead rows:
13
+ *
14
+ * | Table | What accumulates |
15
+ * |---|---|
16
+ * | `{appId}.jobs` | Completed/expired jobs with `expired_at` set |
17
+ * | `{appId}.jobs_attributes` | Execution artifacts (`adata`, `hmark`, `jmark`, `status`, `other`) that are only needed during workflow execution |
18
+ * | `{appId}.streams` | Processed stream messages with `expired_at` set |
19
+ *
20
+ * The `DBA` service addresses this with two methods:
21
+ *
22
+ * - {@link DBA.prune | prune()} — Targets any combination of jobs,
23
+ * streams, and attributes independently. Each table can be pruned on
24
+ * its own schedule with its own retention window.
25
+ * - {@link DBA.deploy | deploy()} — Pre-deploys the Postgres function
26
+ * (e.g., during CI/CD migrations) without running a prune.
27
+ *
28
+ * ## Independent cron schedules (TypeScript)
29
+ *
30
+ * Each table can be targeted independently, allowing different retention
31
+ * windows and schedules:
32
+ *
33
+ * @example
34
+ * ```typescript
35
+ * import { Client as Postgres } from 'pg';
36
+ * import { DBA } from '@hotmeshio/hotmesh';
37
+ *
38
+ * const connection = {
39
+ * class: Postgres,
40
+ * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' },
41
+ * };
42
+ *
43
+ * // Cron 1 — Nightly: strip execution artifacts from completed jobs
44
+ * // Keeps all jobs and their jdata/udata; keeps all streams.
45
+ * await DBA.prune({
46
+ * appId: 'myapp', connection,
47
+ * jobs: false, streams: false, attributes: true,
48
+ * });
49
+ *
50
+ * // Cron 2 — Hourly: remove processed stream messages older than 24h
51
+ * await DBA.prune({
52
+ * appId: 'myapp', connection,
53
+ * expire: '24 hours',
54
+ * jobs: false, streams: true,
55
+ * });
56
+ *
57
+ * // Cron 3 — Weekly: remove expired jobs older than 30 days
58
+ * await DBA.prune({
59
+ * appId: 'myapp', connection,
60
+ * expire: '30 days',
61
+ * jobs: true, streams: false,
62
+ * });
63
+ * ```
64
+ *
65
+ * ## Direct SQL (schedulable via pg_cron)
66
+ *
67
+ * The underlying Postgres function can be called directly, without
68
+ * the TypeScript SDK. Schedule it via `pg_cron`, `crontab`, or any
69
+ * SQL client:
70
+ *
71
+ * ```sql
72
+ * -- Strip attributes only (keep all jobs and streams)
73
+ * SELECT * FROM myapp.prune('0 seconds', false, false, true);
74
+ *
75
+ * -- Prune streams older than 24 hours (keep jobs)
76
+ * SELECT * FROM myapp.prune('24 hours', false, true, false);
77
+ *
78
+ * -- Prune expired jobs older than 30 days (keep streams)
79
+ * SELECT * FROM myapp.prune('30 days', true, false, false);
80
+ *
81
+ * -- Prune everything older than 7 days and strip attributes
82
+ * SELECT * FROM myapp.prune('7 days', true, true, true);
83
+ * ```
84
+ */
85
+ class DBA {
86
+ /**
87
+ * @private
88
+ */
89
+ constructor() { }
90
+ /**
91
+ * Sanitizes an appId for use as a Postgres schema name.
92
+ * Mirrors the naming logic used during table deployment.
93
+ * @private
94
+ */
95
+ static safeName(input) {
96
+ if (!input)
97
+ return 'connections';
98
+ let name = input.trim().toLowerCase();
99
+ name = name.replace(/[^a-z0-9]+/g, '_');
100
+ if (name.length > 63) {
101
+ name = name.slice(0, 63);
102
+ }
103
+ name = name.replace(/_+$/g, '');
104
+ return name || 'connections';
105
+ }
106
+ /**
107
+ * Acquires a Postgres client from the connection config.
108
+ * @private
109
+ */
110
+ static async getClient(connection) {
111
+ if (postgres_1.PostgresConnection.isPoolClient(connection.class)) {
112
+ const poolClient = await connection.class.connect();
113
+ return {
114
+ client: poolClient,
115
+ release: async () => poolClient.release(),
116
+ };
117
+ }
118
+ const pgConnection = await postgres_1.PostgresConnection.connect((0, utils_1.guid)(), connection.class, connection.options);
119
+ return {
120
+ client: pgConnection.getClient(),
121
+ release: async () => { },
122
+ };
123
+ }
124
+ /**
125
+ * Returns the SQL for the server-side `prune()` function.
126
+ * @private
127
+ */
128
+ static getPruneFunctionSQL(schema) {
129
+ return `
130
+ CREATE OR REPLACE FUNCTION ${schema}.prune(
131
+ retention INTERVAL DEFAULT INTERVAL '7 days',
132
+ prune_jobs BOOLEAN DEFAULT TRUE,
133
+ prune_streams BOOLEAN DEFAULT TRUE,
134
+ strip_attributes BOOLEAN DEFAULT FALSE
135
+ )
136
+ RETURNS TABLE(
137
+ deleted_jobs BIGINT,
138
+ deleted_streams BIGINT,
139
+ stripped_attributes BIGINT
140
+ )
141
+ LANGUAGE plpgsql
142
+ AS $$
143
+ DECLARE
144
+ v_deleted_jobs BIGINT := 0;
145
+ v_deleted_streams BIGINT := 0;
146
+ v_stripped_attributes BIGINT := 0;
147
+ BEGIN
148
+ -- 1. Hard-delete expired jobs older than the retention window.
149
+ -- FK CASCADE on jobs_attributes handles attribute cleanup.
150
+ IF prune_jobs THEN
151
+ DELETE FROM ${schema}.jobs
152
+ WHERE expired_at IS NOT NULL
153
+ AND expired_at < NOW() - retention;
154
+ GET DIAGNOSTICS v_deleted_jobs = ROW_COUNT;
155
+ END IF;
156
+
157
+ -- 2. Hard-delete expired stream messages older than the retention window.
158
+ IF prune_streams THEN
159
+ DELETE FROM ${schema}.streams
160
+ WHERE expired_at IS NOT NULL
161
+ AND expired_at < NOW() - retention;
162
+ GET DIAGNOSTICS v_deleted_streams = ROW_COUNT;
163
+ END IF;
164
+
165
+ -- 3. Optionally strip execution artifacts from completed, live jobs.
166
+ -- Retains jdata (workflow return data) and udata (searchable data).
167
+ IF strip_attributes THEN
168
+ DELETE FROM ${schema}.jobs_attributes
169
+ WHERE job_id IN (
170
+ SELECT id FROM ${schema}.jobs
171
+ WHERE status = 0
172
+ AND is_live = TRUE
173
+ )
174
+ AND type NOT IN ('jdata', 'udata');
175
+ GET DIAGNOSTICS v_stripped_attributes = ROW_COUNT;
176
+ END IF;
177
+
178
+ deleted_jobs := v_deleted_jobs;
179
+ deleted_streams := v_deleted_streams;
180
+ stripped_attributes := v_stripped_attributes;
181
+ RETURN NEXT;
182
+ END;
183
+ $$;
184
+ `;
185
+ }
186
+ /**
187
+ * Deploys the `prune()` Postgres function into the target schema.
188
+ * Idempotent — uses `CREATE OR REPLACE` and can be called repeatedly.
189
+ *
190
+ * The function is automatically deployed when {@link DBA.prune} is called,
191
+ * but this method is exposed for explicit control (e.g., CI/CD
192
+ * migration scripts that provision database objects before the
193
+ * application starts).
194
+ *
195
+ * @param connection - Postgres provider configuration
196
+ * @param appId - Application identifier (schema name)
197
+ *
198
+ * @example
199
+ * ```typescript
200
+ * import { Client as Postgres } from 'pg';
201
+ * import { DBA } from '@hotmeshio/hotmesh';
202
+ *
203
+ * // Pre-deploy during CI/CD migration
204
+ * await DBA.deploy(
205
+ * {
206
+ * class: Postgres,
207
+ * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
208
+ * },
209
+ * 'myapp',
210
+ * );
211
+ * ```
212
+ */
213
+ static async deploy(connection, appId) {
214
+ const schema = DBA.safeName(appId);
215
+ const { client, release } = await DBA.getClient(connection);
216
+ try {
217
+ await client.query(DBA.getPruneFunctionSQL(schema));
218
+ }
219
+ finally {
220
+ await release();
221
+ }
222
+ }
223
+ /**
224
+ * Prunes expired data and/or strips execution artifacts from
225
+ * completed jobs. Each operation is independently controlled,
226
+ * so callers can target a single table per cron schedule.
227
+ *
228
+ * Operations (each enabled individually):
229
+ * 1. **jobs** — Hard-deletes expired jobs older than the retention
230
+ * window (FK CASCADE removes their attributes automatically)
231
+ * 2. **streams** — Hard-deletes expired stream messages older than
232
+ * the retention window
233
+ * 3. **attributes** — Strips non-essential attributes (`adata`,
234
+ * `hmark`, `jmark`, `status`, `other`) from completed jobs,
235
+ * retaining only `jdata` and `udata`
236
+ *
237
+ * @param options - Prune configuration
238
+ * @returns Counts of deleted/stripped rows
239
+ *
240
+ * @example
241
+ * ```typescript
242
+ * import { Client as Postgres } from 'pg';
243
+ * import { DBA } from '@hotmeshio/hotmesh';
244
+ *
245
+ * // Strip attributes only — keep all jobs and streams
246
+ * await DBA.prune({
247
+ * appId: 'myapp',
248
+ * connection: {
249
+ * class: Postgres,
250
+ * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
251
+ * },
252
+ * jobs: false,
253
+ * streams: false,
254
+ * attributes: true,
255
+ * });
256
+ * ```
257
+ */
258
+ static async prune(options) {
259
+ const schema = DBA.safeName(options.appId);
260
+ const expire = options.expire ?? '7 days';
261
+ const jobs = options.jobs ?? true;
262
+ const streams = options.streams ?? true;
263
+ const attributes = options.attributes ?? false;
264
+ await DBA.deploy(options.connection, options.appId);
265
+ const { client, release } = await DBA.getClient(options.connection);
266
+ try {
267
+ const result = await client.query(`SELECT * FROM ${schema}.prune($1::interval, $2::boolean, $3::boolean, $4::boolean)`, [expire, jobs, streams, attributes]);
268
+ const row = result.rows[0];
269
+ return {
270
+ jobs: Number(row.deleted_jobs),
271
+ streams: Number(row.deleted_streams),
272
+ attributes: Number(row.stripped_attributes),
273
+ };
274
+ }
275
+ finally {
276
+ await release();
277
+ }
278
+ }
279
+ }
280
+ exports.DBA = DBA;
@@ -1,7 +1,7 @@
1
1
  import { HotMesh } from '../hotmesh';
2
- import { ClientConfig, ClientWorkflow, Connection, WorkflowOptions } from '../../types/memflow';
2
+ import { ClientConfig, ClientWorkflow, Connection, WorkflowOptions } from '../../types/durable';
3
3
  /**
4
- * The MemFlow `Client` service is functionally
4
+ * The Durable `Client` service is functionally
5
5
  * equivalent to the Temporal `Client` service.
6
6
  * Start a new workflow execution by calling
7
7
  * `workflow.start`. Note the direct connection to
@@ -81,7 +81,7 @@ export declare class ClientService {
81
81
  */
82
82
  search: (hotMeshClient: HotMesh, index: string, query: string[]) => Promise<string[]>;
83
83
  /**
84
- * The MemFlow `Client` service is functionally
84
+ * The Durable `Client` service is functionally
85
85
  * equivalent to the Temporal `Client` service.
86
86
  * Starting a workflow is the primary use case and
87
87
  * is accessed by calling workflow.start().
@@ -11,7 +11,7 @@ const search_1 = require("./search");
11
11
  const handle_1 = require("./handle");
12
12
  const factory_1 = require("./schemas/factory");
13
13
  /**
14
- * The MemFlow `Client` service is functionally
14
+ * The Durable `Client` service is functionally
15
15
  * equivalent to the Temporal `Client` service.
16
16
  * Start a new workflow execution by calling
17
17
  * `workflow.start`. Note the direct connection to
@@ -83,7 +83,7 @@ class ClientService {
83
83
  //resolve, activate, and return the client
84
84
  const resolvedClient = await hotMeshClient;
85
85
  if (!readonly) {
86
- resolvedClient.engine.logger.info('memflow-readonly-client', {
86
+ resolvedClient.engine.logger.info('durable-readonly-client', {
87
87
  guid: resolvedClient.engine.guid,
88
88
  appId: targetNS,
89
89
  });
@@ -114,7 +114,7 @@ class ClientService {
114
114
  return await searchClient.sendIndexedQuery(index, query);
115
115
  };
116
116
  /**
117
- * The MemFlow `Client` service is functionally
117
+ * The Durable `Client` service is functionally
118
118
  * equivalent to the Temporal `Client` service.
119
119
  * Starting a workflow is the primary use case and
120
120
  * is accessed by calling workflow.start().
@@ -145,9 +145,9 @@ class ClientService {
145
145
  parentWorkflowId: options.parentWorkflowId,
146
146
  workflowId: options.workflowId || hotmesh_1.HotMesh.guid(),
147
147
  workflowTopic: workflowTopic,
148
- backoffCoefficient: options.config?.backoffCoefficient || enums_1.HMSH_MEMFLOW_EXP_BACKOFF,
149
- maximumAttempts: options.config?.maximumAttempts || enums_1.HMSH_MEMFLOW_MAX_ATTEMPTS,
150
- maximumInterval: (0, utils_1.s)(options.config?.maximumInterval || enums_1.HMSH_MEMFLOW_MAX_INTERVAL),
148
+ backoffCoefficient: options.config?.backoffCoefficient || enums_1.HMSH_DURABLE_EXP_BACKOFF,
149
+ maximumAttempts: options.config?.maximumAttempts || enums_1.HMSH_DURABLE_MAX_ATTEMPTS,
150
+ maximumInterval: (0, utils_1.s)(options.config?.maximumInterval || enums_1.HMSH_DURABLE_MAX_INTERVAL),
151
151
  };
152
152
  const context = { metadata: { trc, spn }, data: {} };
153
153
  const jobId = await hotMeshClient.pub(`${options.namespace ?? factory_1.APP_ID}.execute`, payload, context, {
@@ -191,9 +191,9 @@ class ClientService {
191
191
  arguments: [...options.args],
192
192
  id: options.workflowId,
193
193
  workflowTopic,
194
- backoffCoefficient: options.config?.backoffCoefficient || enums_1.HMSH_MEMFLOW_EXP_BACKOFF,
195
- maximumAttempts: options.config?.maximumAttempts || enums_1.HMSH_MEMFLOW_MAX_ATTEMPTS,
196
- maximumInterval: (0, utils_1.s)(options.config?.maximumInterval || enums_1.HMSH_MEMFLOW_MAX_INTERVAL),
194
+ backoffCoefficient: options.config?.backoffCoefficient || enums_1.HMSH_DURABLE_EXP_BACKOFF,
195
+ maximumAttempts: options.config?.maximumAttempts || enums_1.HMSH_DURABLE_MAX_ATTEMPTS,
196
+ maximumInterval: (0, utils_1.s)(options.config?.maximumInterval || enums_1.HMSH_DURABLE_MAX_INTERVAL),
197
197
  };
198
198
  //seed search data before entering
199
199
  const hotMeshClient = await this.getHotMeshClient(taskQueue, options.namespace);
@@ -237,7 +237,7 @@ class ClientService {
237
237
  * await client.workflow.search(
238
238
  * 'someTaskQueue'
239
239
  * 'someWorkflowName',
240
- * 'memflow',
240
+ * 'durable',
241
241
  * 'user',
242
242
  * ...args,
243
243
  * );
@@ -251,7 +251,7 @@ class ClientService {
251
251
  return await this.search(hotMeshClient, index, query);
252
252
  }
253
253
  catch (error) {
254
- hotMeshClient.engine.logger.error('memflow-client-search-err', {
254
+ hotMeshClient.engine.logger.error('durable-client-search-err', {
255
255
  error,
256
256
  });
257
257
  throw error;
@@ -314,7 +314,7 @@ class ClientService {
314
314
  await hotMesh.activate(version);
315
315
  }
316
316
  catch (error) {
317
- hotMesh.engine.logger.error('memflow-client-activate-err', {
317
+ hotMesh.engine.logger.error('durable-client-activate-err', {
318
318
  error,
319
319
  });
320
320
  throw error;
@@ -326,7 +326,7 @@ class ClientService {
326
326
  await hotMesh.activate(version);
327
327
  }
328
328
  catch (error) {
329
- hotMesh.engine.logger.error('memflow-client-deploy-activate-err', {
329
+ hotMesh.engine.logger.error('durable-client-deploy-activate-err', {
330
330
  error,
331
331
  });
332
332
  throw error;
@@ -1,10 +1,10 @@
1
- import { Connection } from '../../types/memflow';
1
+ import { Connection } from '../../types/durable';
2
2
  import { ProviderConfig, ProvidersConfig } from '../../types/provider';
3
3
  /**
4
4
  * The Connection service is used to declare the class
5
5
  * and connection options but does not connect quite yet. Connection
6
6
  * happens at a later lifecycle stage when a workflow
7
- * is started by the MemFlow Client module (`(new MemFlow.Client())).start()`).
7
+ * is started by the Durable Client module (`(new Durable.Client())).start()`).
8
8
  *
9
9
  * The config options optionall support a multi-connection setup
10
10
  * where the `store` connection explicitly defined along with `stream`, `sub`, etc.
@@ -5,7 +5,7 @@ exports.ConnectionService = void 0;
5
5
  * The Connection service is used to declare the class
6
6
  * and connection options but does not connect quite yet. Connection
7
7
  * happens at a later lifecycle stage when a workflow
8
- * is started by the MemFlow Client module (`(new MemFlow.Client())).start()`).
8
+ * is started by the Durable Client module (`(new Durable.Client())).start()`).
9
9
  *
10
10
  * The config options optionall support a multi-connection setup
11
11
  * where the `store` connection explicitly defined along with `stream`, `sub`, etc.
@@ -1,6 +1,6 @@
1
1
  import { ILogger } from '../logger';
2
2
  import { StoreService } from '../store';
3
- import { ExportOptions, MemFlowJobExport, TimelineType, TransitionType, ExportFields } from '../../types/exporter';
3
+ import { ExportOptions, DurableJobExport, TimelineType, TransitionType, ExportFields } from '../../types/exporter';
4
4
  import { ProviderClient, ProviderTransaction } from '../../types/provider';
5
5
  import { StringStringType, Symbols } from '../../types/serializer';
6
6
  declare class ExporterService {
@@ -11,17 +11,17 @@ declare class ExporterService {
11
11
  private static symbols;
12
12
  constructor(appId: string, store: StoreService<ProviderClient, ProviderTransaction>, logger: ILogger);
13
13
  /**
14
- * Convert the job hash from its compiles format into a MemFlowJobExport object with
14
+ * Convert the job hash from its compiles format into a DurableJobExport object with
15
15
  * facets that describe the workflow in terms relevant to narrative storytelling.
16
16
  */
17
- export(jobId: string, options?: ExportOptions): Promise<MemFlowJobExport>;
17
+ export(jobId: string, options?: ExportOptions): Promise<DurableJobExport>;
18
18
  /**
19
- * Inflates the job data into a MemFlowJobExport object
19
+ * Inflates the job data into a DurableJobExport object
20
20
  * @param jobHash - the job data
21
21
  * @param dependencyList - the list of dependencies for the job
22
22
  * @returns - the inflated job data
23
23
  */
24
- inflate(jobHash: StringStringType, options: ExportOptions): MemFlowJobExport;
24
+ inflate(jobHash: StringStringType, options: ExportOptions): DurableJobExport;
25
25
  resolveValue(raw: string, withValues: boolean): Record<string, any> | string | number | null;
26
26
  /**
27
27
  * Inflates the key
@@ -30,7 +30,7 @@ declare class ExporterService {
30
30
  * @private
31
31
  */
32
32
  inflateKey(key: string): string;
33
- filterFields(fullObject: MemFlowJobExport, block?: ExportFields[], allow?: ExportFields[]): Partial<MemFlowJobExport>;
33
+ filterFields(fullObject: DurableJobExport, block?: ExportFields[], allow?: ExportFields[]): Partial<DurableJobExport>;
34
34
  inflateTransition(match: RegExpMatchArray, value: string, transitionsObject: Record<string, TransitionType>): void;
35
35
  sortEntriesByCreated(obj: {
36
36
  [key: string]: TransitionType;
@@ -10,7 +10,7 @@ class ExporterService {
10
10
  this.store = store;
11
11
  }
12
12
  /**
13
- * Convert the job hash from its compiles format into a MemFlowJobExport object with
13
+ * Convert the job hash from its compiles format into a DurableJobExport object with
14
14
  * facets that describe the workflow in terms relevant to narrative storytelling.
15
15
  */
16
16
  async export(jobId, options = {}) {
@@ -23,7 +23,7 @@ class ExporterService {
23
23
  return jobExport;
24
24
  }
25
25
  /**
26
- * Inflates the job data into a MemFlowJobExport object
26
+ * Inflates the job data into a DurableJobExport object
27
27
  * @param jobHash - the job data
28
28
  * @param dependencyList - the list of dependencies for the job
29
29
  * @returns - the inflated job data
@@ -1,5 +1,5 @@
1
1
  import { HotMesh } from '../hotmesh';
2
- import { MemFlowJobExport, ExportOptions } from '../../types/exporter';
2
+ import { DurableJobExport, ExportOptions } from '../../types/exporter';
3
3
  import { JobInterruptOptions } from '../../types/job';
4
4
  import { StreamError } from '../../types/stream';
5
5
  import { ExporterService } from './exporter';
@@ -7,7 +7,7 @@ import { ExporterService } from './exporter';
7
7
  * The WorkflowHandleService provides methods to interact with a running
8
8
  * workflow. This includes exporting the workflow, sending signals, and
9
9
  * querying the state of the workflow. It is instanced/accessed via the
10
- * MemFlow.Client class.
10
+ * Durable.Client class.
11
11
  *
12
12
  * @example
13
13
  * ```typescript
@@ -43,11 +43,11 @@ export declare class WorkflowHandleService {
43
43
  /**
44
44
  * Exports the workflow state to a JSON object.
45
45
  */
46
- export(options?: ExportOptions): Promise<MemFlowJobExport>;
46
+ export(options?: ExportOptions): Promise<DurableJobExport>;
47
47
  /**
48
48
  * Sends a signal to the workflow. This is a way to send
49
49
  * a message to a workflow that is paused due to having
50
- * executed `MemFlow.workflow.waitFor`. The workflow
50
+ * executed `Durable.workflow.waitFor`. The workflow
51
51
  * will awaken if no other signals are pending.
52
52
  */
53
53
  signal(signalId: string, data: Record<any, any>): Promise<void>;
@@ -6,7 +6,7 @@ const exporter_1 = require("./exporter");
6
6
  * The WorkflowHandleService provides methods to interact with a running
7
7
  * workflow. This includes exporting the workflow, sending signals, and
8
8
  * querying the state of the workflow. It is instanced/accessed via the
9
- * MemFlow.Client class.
9
+ * Durable.Client class.
10
10
  *
11
11
  * @example
12
12
  * ```typescript
@@ -46,7 +46,7 @@ class WorkflowHandleService {
46
46
  /**
47
47
  * Sends a signal to the workflow. This is a way to send
48
48
  * a message to a workflow that is paused due to having
49
- * executed `MemFlow.workflow.waitFor`. The workflow
49
+ * executed `Durable.workflow.waitFor`. The workflow
50
50
  * will awaken if no other signals are pending.
51
51
  */
52
52
  async signal(signalId, data) {