@hotmeshio/hotmesh 0.13.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (194) hide show
  1. package/README.md +18 -22
  2. package/build/modules/enums.d.ts +60 -5
  3. package/build/modules/enums.js +62 -7
  4. package/build/modules/errors.d.ts +15 -3
  5. package/build/modules/errors.js +17 -2
  6. package/build/package.json +6 -1
  7. package/build/services/activities/activity/context.d.ts +22 -0
  8. package/build/services/activities/activity/context.js +76 -0
  9. package/build/services/activities/activity/index.d.ts +116 -0
  10. package/build/services/activities/activity/index.js +299 -0
  11. package/build/services/activities/activity/mapping.d.ts +12 -0
  12. package/build/services/activities/activity/mapping.js +63 -0
  13. package/build/services/activities/activity/process.d.ts +28 -0
  14. package/build/services/activities/activity/process.js +100 -0
  15. package/build/services/activities/activity/protocol.d.ts +39 -0
  16. package/build/services/activities/activity/protocol.js +151 -0
  17. package/build/services/activities/activity/state.d.ts +40 -0
  18. package/build/services/activities/activity/state.js +143 -0
  19. package/build/services/activities/activity/transition.d.ts +23 -0
  20. package/build/services/activities/activity/transition.js +71 -0
  21. package/build/services/activities/activity/verify.d.ts +22 -0
  22. package/build/services/activities/activity/verify.js +85 -0
  23. package/build/services/activities/await.d.ts +1 -4
  24. package/build/services/activities/await.js +2 -36
  25. package/build/services/activities/cycle.d.ts +1 -11
  26. package/build/services/activities/cycle.js +3 -46
  27. package/build/services/activities/hook.d.ts +2 -11
  28. package/build/services/activities/hook.js +30 -50
  29. package/build/services/activities/interrupt.d.ts +2 -4
  30. package/build/services/activities/interrupt.js +4 -38
  31. package/build/services/activities/signal.d.ts +1 -11
  32. package/build/services/activities/signal.js +3 -48
  33. package/build/services/activities/trigger.d.ts +1 -3
  34. package/build/services/activities/trigger.js +0 -3
  35. package/build/services/activities/worker.d.ts +3 -6
  36. package/build/services/activities/worker.js +4 -40
  37. package/build/services/connector/factory.d.ts +6 -0
  38. package/build/services/connector/factory.js +24 -0
  39. package/build/services/durable/activity.d.ts +1 -1
  40. package/build/services/durable/activity.js +2 -2
  41. package/build/services/durable/client.d.ts +24 -29
  42. package/build/services/durable/client.js +24 -29
  43. package/build/services/durable/connection.d.ts +13 -7
  44. package/build/services/durable/connection.js +13 -7
  45. package/build/services/durable/handle.d.ts +58 -40
  46. package/build/services/durable/handle.js +60 -40
  47. package/build/services/durable/index.d.ts +148 -286
  48. package/build/services/durable/index.js +157 -292
  49. package/build/services/durable/interceptor.d.ts +43 -33
  50. package/build/services/durable/interceptor.js +59 -39
  51. package/build/services/durable/schemas/factory.d.ts +1 -1
  52. package/build/services/durable/schemas/factory.js +168 -38
  53. package/build/services/durable/telemetry.d.ts +80 -0
  54. package/build/services/durable/telemetry.js +137 -0
  55. package/build/services/durable/worker.d.ts +100 -21
  56. package/build/services/durable/worker.js +304 -63
  57. package/build/services/durable/workflow/all.d.ts +1 -1
  58. package/build/services/durable/workflow/all.js +1 -1
  59. package/build/services/durable/workflow/cancellationScope.d.ts +104 -0
  60. package/build/services/durable/workflow/cancellationScope.js +139 -0
  61. package/build/services/durable/workflow/common.d.ts +5 -4
  62. package/build/services/durable/workflow/common.js +6 -1
  63. package/build/services/durable/workflow/{waitFor.d.ts → condition.d.ts} +9 -8
  64. package/build/services/durable/workflow/{waitFor.js → condition.js} +44 -11
  65. package/build/services/durable/workflow/continueAsNew.d.ts +65 -0
  66. package/build/services/durable/workflow/continueAsNew.js +92 -0
  67. package/build/services/durable/workflow/didRun.d.ts +1 -1
  68. package/build/services/durable/workflow/didRun.js +3 -3
  69. package/build/services/durable/workflow/enrich.d.ts +5 -0
  70. package/build/services/durable/workflow/enrich.js +5 -0
  71. package/build/services/durable/workflow/entityMethods.d.ts +7 -0
  72. package/build/services/durable/workflow/entityMethods.js +7 -0
  73. package/build/services/durable/workflow/execHook.js +3 -3
  74. package/build/services/durable/workflow/execHookBatch.js +2 -2
  75. package/build/services/durable/workflow/{execChild.d.ts → executeChild.d.ts} +4 -40
  76. package/build/services/durable/workflow/{execChild.js → executeChild.js} +36 -45
  77. package/build/services/durable/workflow/hook.d.ts +1 -1
  78. package/build/services/durable/workflow/hook.js +4 -3
  79. package/build/services/durable/workflow/index.d.ts +45 -50
  80. package/build/services/durable/workflow/index.js +46 -51
  81. package/build/services/durable/workflow/interruption.d.ts +7 -6
  82. package/build/services/durable/workflow/interruption.js +11 -7
  83. package/build/services/durable/workflow/patched.d.ts +72 -0
  84. package/build/services/durable/workflow/patched.js +110 -0
  85. package/build/services/durable/workflow/proxyActivities.d.ts +7 -7
  86. package/build/services/durable/workflow/proxyActivities.js +50 -15
  87. package/build/services/durable/workflow/searchMethods.d.ts +7 -0
  88. package/build/services/durable/workflow/searchMethods.js +7 -0
  89. package/build/services/durable/workflow/signal.d.ts +4 -4
  90. package/build/services/durable/workflow/signal.js +4 -4
  91. package/build/services/durable/workflow/{sleepFor.d.ts → sleep.d.ts} +7 -7
  92. package/build/services/durable/workflow/{sleepFor.js → sleep.js} +39 -10
  93. package/build/services/durable/workflow/terminate.d.ts +55 -0
  94. package/build/services/durable/workflow/{interrupt.js → terminate.js} +21 -21
  95. package/build/services/durable/workflow/trace.js +2 -2
  96. package/build/services/durable/workflow/uuid4.d.ts +14 -0
  97. package/build/services/durable/workflow/uuid4.js +39 -0
  98. package/build/services/durable/workflow/{context.d.ts → workflowInfo.d.ts} +5 -5
  99. package/build/services/durable/workflow/{context.js → workflowInfo.js} +7 -7
  100. package/build/services/engine/compiler.d.ts +19 -0
  101. package/build/services/engine/compiler.js +20 -0
  102. package/build/services/engine/completion.d.ts +46 -0
  103. package/build/services/engine/completion.js +145 -0
  104. package/build/services/engine/dispatch.d.ts +24 -0
  105. package/build/services/engine/dispatch.js +98 -0
  106. package/build/services/engine/index.d.ts +49 -81
  107. package/build/services/engine/index.js +175 -573
  108. package/build/services/engine/init.d.ts +42 -0
  109. package/build/services/engine/init.js +74 -0
  110. package/build/services/engine/pubsub.d.ts +50 -0
  111. package/build/services/engine/pubsub.js +118 -0
  112. package/build/services/engine/reporting.d.ts +20 -0
  113. package/build/services/engine/reporting.js +38 -0
  114. package/build/services/engine/schema.d.ts +23 -0
  115. package/build/services/engine/schema.js +62 -0
  116. package/build/services/engine/signal.d.ts +57 -0
  117. package/build/services/engine/signal.js +117 -0
  118. package/build/services/engine/state.d.ts +35 -0
  119. package/build/services/engine/state.js +61 -0
  120. package/build/services/engine/version.d.ts +31 -0
  121. package/build/services/engine/version.js +73 -0
  122. package/build/services/hotmesh/deployment.d.ts +21 -0
  123. package/build/services/hotmesh/deployment.js +25 -0
  124. package/build/services/hotmesh/index.d.ts +141 -532
  125. package/build/services/hotmesh/index.js +222 -673
  126. package/build/services/hotmesh/init.d.ts +42 -0
  127. package/build/services/hotmesh/init.js +93 -0
  128. package/build/services/hotmesh/jobs.d.ts +67 -0
  129. package/build/services/hotmesh/jobs.js +99 -0
  130. package/build/services/hotmesh/pubsub.d.ts +38 -0
  131. package/build/services/hotmesh/pubsub.js +54 -0
  132. package/build/services/hotmesh/quorum.d.ts +30 -0
  133. package/build/services/hotmesh/quorum.js +62 -0
  134. package/build/services/hotmesh/validation.d.ts +6 -0
  135. package/build/services/hotmesh/validation.js +28 -0
  136. package/build/services/quorum/index.js +1 -0
  137. package/build/services/router/consumption/index.d.ts +11 -5
  138. package/build/services/router/consumption/index.js +24 -17
  139. package/build/services/router/error-handling/index.d.ts +2 -2
  140. package/build/services/router/error-handling/index.js +14 -14
  141. package/build/services/router/index.d.ts +1 -1
  142. package/build/services/router/index.js +2 -2
  143. package/build/services/serializer/index.d.ts +22 -0
  144. package/build/services/serializer/index.js +39 -1
  145. package/build/services/store/index.d.ts +1 -0
  146. package/build/services/store/providers/postgres/exporter-sql.d.ts +2 -2
  147. package/build/services/store/providers/postgres/exporter-sql.js +4 -4
  148. package/build/services/store/providers/postgres/kvtables.js +7 -6
  149. package/build/services/store/providers/postgres/kvtypes/hash/basic.js +67 -52
  150. package/build/services/store/providers/postgres/kvtypes/hash/jsonb.js +87 -72
  151. package/build/services/store/providers/postgres/kvtypes/hash/udata.js +106 -79
  152. package/build/services/store/providers/postgres/kvtypes/hash/utils.d.ts +16 -0
  153. package/build/services/store/providers/postgres/kvtypes/hash/utils.js +29 -16
  154. package/build/services/store/providers/postgres/postgres.d.ts +1 -0
  155. package/build/services/store/providers/postgres/postgres.js +14 -4
  156. package/build/services/stream/factory.d.ts +3 -1
  157. package/build/services/stream/factory.js +2 -2
  158. package/build/services/stream/index.d.ts +1 -0
  159. package/build/services/stream/providers/nats/nats.d.ts +1 -0
  160. package/build/services/stream/providers/nats/nats.js +1 -0
  161. package/build/services/stream/providers/postgres/credentials.d.ts +56 -0
  162. package/build/services/stream/providers/postgres/credentials.js +129 -0
  163. package/build/services/stream/providers/postgres/kvtables.js +18 -0
  164. package/build/services/stream/providers/postgres/messages.js +7 -7
  165. package/build/services/stream/providers/postgres/notifications.js +16 -2
  166. package/build/services/stream/providers/postgres/postgres.d.ts +7 -0
  167. package/build/services/stream/providers/postgres/postgres.js +35 -4
  168. package/build/services/stream/providers/postgres/procedures.d.ts +21 -0
  169. package/build/services/stream/providers/postgres/procedures.js +213 -0
  170. package/build/services/stream/providers/postgres/secured.d.ts +34 -0
  171. package/build/services/stream/providers/postgres/secured.js +146 -0
  172. package/build/services/stream/providers/postgres/stats.d.ts +1 -0
  173. package/build/services/stream/providers/postgres/stats.js +1 -0
  174. package/build/services/stream/registry.d.ts +1 -1
  175. package/build/services/stream/registry.js +5 -2
  176. package/build/services/telemetry/index.d.ts +10 -1
  177. package/build/services/telemetry/index.js +40 -7
  178. package/build/services/worker/credentials.d.ts +51 -0
  179. package/build/services/worker/credentials.js +87 -0
  180. package/build/services/worker/index.d.ts +2 -2
  181. package/build/services/worker/index.js +7 -6
  182. package/build/types/codec.d.ts +84 -0
  183. package/build/types/codec.js +2 -0
  184. package/build/types/durable.d.ts +104 -28
  185. package/build/types/error.d.ts +10 -1
  186. package/build/types/hotmesh.d.ts +67 -4
  187. package/build/types/index.d.ts +2 -1
  188. package/build/types/provider.d.ts +2 -2
  189. package/build/types/quorum.d.ts +35 -1
  190. package/build/types/stream.d.ts +12 -6
  191. package/package.json +6 -1
  192. package/build/services/activities/activity.d.ts +0 -192
  193. package/build/services/activities/activity.js +0 -786
  194. package/build/services/durable/workflow/interrupt.d.ts +0 -55
@@ -44,69 +44,75 @@ function createUdataOperations(context) {
44
44
  // Version with replay storage
45
45
  const placeholders = fieldEntries
46
46
  .map(([fieldName, value], index) => {
47
- const baseIndex = index * 3 + 3;
48
- params.push(fieldName, value, 'udata');
49
- return `($${baseIndex}, $${baseIndex + 1}, $${baseIndex + 2}::${schemaName}.type_enum)`;
47
+ const baseIndex = index * 4 + 3;
48
+ const { symbol, dimension } = (0, utils_1.splitField)(fieldName);
49
+ params.push(symbol, dimension, value, 'udata');
50
+ return `($${baseIndex}, $${baseIndex + 1}, $${baseIndex + 2}, $${baseIndex + 3}::${schemaName}.type_enum)`;
50
51
  })
51
52
  .join(', ');
53
+ const { symbol: replaySym, dimension: replayDim } = (0, utils_1.splitField)(replayId);
54
+ const replayTypeIdx = 2 + fieldEntries.length * 4 + 1;
52
55
  sql = `
53
56
  WITH valid_job AS (
54
57
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
55
58
  ),
56
59
  upsert_fields AS (
57
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
58
- SELECT
60
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
61
+ SELECT
59
62
  job.id,
60
- vals.field,
63
+ vals.symbol,
64
+ vals.dimension,
61
65
  vals.value,
62
66
  vals.type
63
67
  FROM valid_job job
64
68
  CROSS JOIN (
65
69
  VALUES ${placeholders}
66
- ) AS vals(field, value, type)
67
- ON CONFLICT (job_id, field) DO UPDATE SET value = EXCLUDED.value
70
+ ) AS vals(symbol, dimension, value, type)
71
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE SET value = EXCLUDED.value
68
72
  RETURNING 1 as field_count
69
73
  ),
70
74
  count_result AS (
71
75
  SELECT COUNT(*) as new_fields_count FROM upsert_fields
72
76
  ),
73
77
  replay_insert AS (
74
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
75
- SELECT job.id, $2, new_fields_count::text, $${2 + fieldEntries.length * 3 + 1}::${schemaName}.type_enum
78
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
79
+ SELECT job.id, $2, '${replayDim}', new_fields_count::text, $${replayTypeIdx}::${schemaName}.type_enum
76
80
  FROM valid_job job, count_result
77
- ON CONFLICT (job_id, field) DO UPDATE
81
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
78
82
  SET value = EXCLUDED.value
79
83
  RETURNING 1
80
84
  )
81
85
  SELECT new_fields_count FROM count_result
82
86
  `;
83
- params.unshift(key, replayId);
87
+ params.unshift(key, replaySym);
84
88
  params.push((0, utils_1.deriveType)(replayId));
85
89
  }
86
90
  else {
87
91
  // Version without replay storage
88
92
  const placeholders = fieldEntries
89
93
  .map(([fieldName, value], index) => {
90
- const baseIndex = index * 3 + 2;
91
- params.push(fieldName, value, 'udata');
92
- return `($${baseIndex}, $${baseIndex + 1}, $${baseIndex + 2}::${schemaName}.type_enum)`;
94
+ const baseIndex = index * 4 + 2;
95
+ const { symbol, dimension } = (0, utils_1.splitField)(fieldName);
96
+ params.push(symbol, dimension, value, 'udata');
97
+ return `($${baseIndex}, $${baseIndex + 1}, $${baseIndex + 2}, $${baseIndex + 3}::${schemaName}.type_enum)`;
93
98
  })
94
99
  .join(', ');
95
100
  sql = `
96
101
  WITH valid_job AS (
97
102
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
98
103
  )
99
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
100
- SELECT
104
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
105
+ SELECT
101
106
  job.id,
102
- vals.field,
107
+ vals.symbol,
108
+ vals.dimension,
103
109
  vals.value,
104
110
  vals.type
105
111
  FROM valid_job job
106
112
  CROSS JOIN (
107
113
  VALUES ${placeholders}
108
- ) AS vals(field, value, type)
109
- ON CONFLICT (job_id, field) DO UPDATE SET value = EXCLUDED.value
114
+ ) AS vals(symbol, dimension, value, type)
115
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE SET value = EXCLUDED.value
110
116
  RETURNING 1 as count
111
117
  `;
112
118
  params.unshift(key);
@@ -116,38 +122,40 @@ function createUdataOperations(context) {
116
122
  function handleUdataGet(key, fields, options) {
117
123
  const tableName = context.tableForKey(key, 'hash');
118
124
  const fieldName = fields['@udata:get'];
125
+ const { symbol: fieldSym, dimension: fieldDim } = (0, utils_1.splitField)(fieldName);
119
126
  const replayId = Object.keys(fields).find((k) => k.includes('-') && k !== '@udata:get');
120
127
  const params = [];
121
128
  let sql = '';
122
129
  if (replayId) {
130
+ const { symbol: replaySym, dimension: replayDim } = (0, utils_1.splitField)(replayId);
123
131
  sql = `
124
132
  WITH field_data AS (
125
133
  SELECT COALESCE(a.value, '') as field_value
126
134
  FROM ${tableName} j
127
- LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id AND a.field = $2
135
+ LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id AND a.symbol = $2 AND a.dimension = $3
128
136
  WHERE j.key = $1 AND j.is_live
129
137
  ),
130
138
  replay_insert AS (
131
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
132
- SELECT j.id, $3, field_value, $4
139
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
140
+ SELECT j.id, $4, $5, field_value, $6
133
141
  FROM ${tableName} j, field_data
134
142
  WHERE j.key = $1 AND j.is_live
135
- ON CONFLICT (job_id, field) DO UPDATE
143
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
136
144
  SET value = EXCLUDED.value
137
145
  RETURNING 1
138
146
  )
139
147
  SELECT field_value as new_value FROM field_data
140
148
  `;
141
- params.push(key, fieldName, replayId, (0, utils_1.deriveType)(replayId));
149
+ params.push(key, fieldSym, fieldDim, replaySym, replayDim, (0, utils_1.deriveType)(replayId));
142
150
  }
143
151
  else {
144
152
  sql = `
145
153
  SELECT COALESCE(a.value, '') as new_value
146
154
  FROM ${tableName} j
147
- LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id AND a.field = $2
155
+ LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id AND a.symbol = $2 AND a.dimension = $3
148
156
  WHERE j.key = $1 AND j.is_live
149
157
  `;
150
- params.push(key, fieldName);
158
+ params.push(key, fieldSym, fieldDim);
151
159
  }
152
160
  return { sql, params };
153
161
  }
@@ -155,43 +163,54 @@ function createUdataOperations(context) {
155
163
  const tableName = context.tableForKey(key, 'hash');
156
164
  const fieldNames = JSON.parse(fields['@udata:mget']);
157
165
  const replayId = Object.keys(fields).find((k) => k.includes('-') && k !== '@udata:mget');
166
+ const symbols = fieldNames.map((f) => (0, utils_1.splitField)(f).symbol);
167
+ const dimensions = fieldNames.map((f) => (0, utils_1.splitField)(f).dimension);
158
168
  const params = [];
159
169
  let sql = '';
160
170
  if (replayId) {
171
+ const { symbol: replaySym, dimension: replayDim } = (0, utils_1.splitField)(replayId);
161
172
  sql = `
162
173
  WITH field_data AS (
163
174
  SELECT array_agg(COALESCE(a.value, '') ORDER BY field_order.idx) as field_values
164
175
  FROM ${tableName} j
165
176
  CROSS JOIN (
166
- SELECT unnest($2::text[]) as field_name, generate_subscripts($2::text[], 1) as idx
177
+ SELECT
178
+ unnest($2::text[]) as sym,
179
+ unnest($3::text[]) as dim,
180
+ generate_subscripts($2::text[], 1) as idx
167
181
  ) as field_order
168
- LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id AND a.field = field_order.field_name
182
+ LEFT JOIN ${tableName}_attributes a
183
+ ON j.id = a.job_id AND a.symbol = field_order.sym AND a.dimension = field_order.dim
169
184
  WHERE j.key = $1 AND j.is_live
170
185
  ),
171
186
  replay_insert AS (
172
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
173
- SELECT j.id, $3, array_to_string(field_values, '|||'), $4
187
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
188
+ SELECT j.id, $4, $5, array_to_string(field_values, '|||'), $6
174
189
  FROM ${tableName} j, field_data
175
190
  WHERE j.key = $1 AND j.is_live
176
- ON CONFLICT (job_id, field) DO UPDATE
191
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
177
192
  SET value = EXCLUDED.value
178
193
  RETURNING 1
179
194
  )
180
195
  SELECT field_values as new_value FROM field_data
181
196
  `;
182
- params.push(key, fieldNames, replayId, (0, utils_1.deriveType)(replayId));
197
+ params.push(key, symbols, dimensions, replaySym, replayDim, (0, utils_1.deriveType)(replayId));
183
198
  }
184
199
  else {
185
200
  sql = `
186
201
  SELECT array_agg(COALESCE(a.value, '') ORDER BY field_order.idx) as new_value
187
202
  FROM ${tableName} j
188
203
  CROSS JOIN (
189
- SELECT unnest($2::text[]) as field_name, generate_subscripts($2::text[], 1) as idx
204
+ SELECT
205
+ unnest($2::text[]) as sym,
206
+ unnest($3::text[]) as dim,
207
+ generate_subscripts($2::text[], 1) as idx
190
208
  ) as field_order
191
- LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id AND a.field = field_order.field_name
209
+ LEFT JOIN ${tableName}_attributes a
210
+ ON j.id = a.job_id AND a.symbol = field_order.sym AND a.dimension = field_order.dim
192
211
  WHERE j.key = $1 AND j.is_live
193
212
  `;
194
- params.push(key, fieldNames);
213
+ params.push(key, symbols, dimensions);
195
214
  }
196
215
  return { sql, params };
197
216
  }
@@ -199,33 +218,36 @@ function createUdataOperations(context) {
199
218
  const tableName = context.tableForKey(key, 'hash');
200
219
  const fieldNames = JSON.parse(fields['@udata:delete']);
201
220
  const replayId = Object.keys(fields).find((k) => k.includes('-') && k !== '@udata:delete');
221
+ const symbols = fieldNames.map((f) => (0, utils_1.splitField)(f).symbol);
222
+ const dimensions = fieldNames.map((f) => (0, utils_1.splitField)(f).dimension);
202
223
  const params = [];
203
224
  let sql = '';
204
225
  if (replayId) {
226
+ const { symbol: replaySym, dimension: replayDim } = (0, utils_1.splitField)(replayId);
205
227
  sql = `
206
228
  WITH deleted_fields AS (
207
229
  DELETE FROM ${tableName}_attributes
208
230
  WHERE job_id = (
209
231
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
210
232
  )
211
- AND field = ANY($2::text[])
233
+ AND (symbol, dimension) IN (SELECT unnest($2::text[]), unnest($3::text[]))
212
234
  RETURNING 1 as deleted_count
213
235
  ),
214
236
  count_result AS (
215
237
  SELECT COUNT(*) as total_deleted FROM deleted_fields
216
238
  ),
217
239
  replay_insert AS (
218
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
219
- SELECT j.id, $3, total_deleted::text, $4
240
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
241
+ SELECT j.id, $4, $5, total_deleted::text, $6
220
242
  FROM ${tableName} j, count_result
221
243
  WHERE j.key = $1 AND j.is_live
222
- ON CONFLICT (job_id, field) DO UPDATE
244
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
223
245
  SET value = EXCLUDED.value
224
246
  RETURNING 1
225
247
  )
226
248
  SELECT total_deleted as new_value FROM count_result
227
249
  `;
228
- params.push(key, fieldNames, replayId, (0, utils_1.deriveType)(replayId));
250
+ params.push(key, symbols, dimensions, replaySym, replayDim, (0, utils_1.deriveType)(replayId));
229
251
  }
230
252
  else {
231
253
  sql = `
@@ -234,66 +256,69 @@ function createUdataOperations(context) {
234
256
  WHERE job_id = (
235
257
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
236
258
  )
237
- AND field = ANY($2::text[])
259
+ AND (symbol, dimension) IN (SELECT unnest($2::text[]), unnest($3::text[]))
238
260
  RETURNING 1 as deleted_count
239
261
  )
240
262
  SELECT COUNT(*) as new_value FROM deleted_fields
241
263
  `;
242
- params.push(key, fieldNames);
264
+ params.push(key, symbols, dimensions);
243
265
  }
244
266
  return { sql, params };
245
267
  }
246
268
  function handleUdataIncrement(key, fields, options) {
247
269
  const tableName = context.tableForKey(key, 'hash');
248
270
  const { field, value } = JSON.parse(fields['@udata:increment']);
271
+ const { symbol: fieldSym, dimension: fieldDim } = (0, utils_1.splitField)(field);
249
272
  const replayId = Object.keys(fields).find((k) => k.includes('-') && k !== '@udata:increment');
250
273
  const schemaName = context.safeName(context.appId);
251
274
  const params = [];
252
275
  let sql = '';
253
276
  if (replayId) {
277
+ const { symbol: replaySym, dimension: replayDim } = (0, utils_1.splitField)(replayId);
254
278
  sql = `
255
279
  WITH valid_job AS (
256
280
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
257
281
  ),
258
282
  increment_result AS (
259
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
260
- SELECT id, $2, $3::text, $4::${schemaName}.type_enum
283
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
284
+ SELECT id, $2, $3, $4::text, $5::${schemaName}.type_enum
261
285
  FROM valid_job
262
- ON CONFLICT (job_id, field) DO UPDATE
263
- SET value = ((COALESCE(${tableName}_attributes.value, '0')::double precision) + $3::double precision)::text
286
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
287
+ SET value = ((COALESCE(${tableName}_attributes.value, '0')::double precision) + $4::double precision)::text
264
288
  RETURNING value
265
289
  ),
266
290
  replay_insert AS (
267
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
268
- SELECT job.id, $5, inc.value, $6::${schemaName}.type_enum
291
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
292
+ SELECT job.id, $6, $7, inc.value, $8::${schemaName}.type_enum
269
293
  FROM valid_job job, increment_result inc
270
- ON CONFLICT (job_id, field) DO UPDATE
294
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
271
295
  SET value = EXCLUDED.value
272
296
  RETURNING 1
273
297
  )
274
298
  SELECT value as new_value FROM increment_result
275
299
  `;
276
- params.push(key, field, value, 'udata', replayId, (0, utils_1.deriveType)(replayId));
300
+ params.push(key, fieldSym, fieldDim, value, 'udata', replaySym, replayDim, (0, utils_1.deriveType)(replayId));
277
301
  }
278
302
  else {
279
303
  sql = `
280
304
  WITH valid_job AS (
281
305
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
282
306
  )
283
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
284
- SELECT id, $2, $3::text, $4::${schemaName}.type_enum
307
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
308
+ SELECT id, $2, $3, $4::text, $5::${schemaName}.type_enum
285
309
  FROM valid_job
286
- ON CONFLICT (job_id, field) DO UPDATE
287
- SET value = ((COALESCE(${tableName}_attributes.value, '0')::double precision) + $3::double precision)::text
310
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
311
+ SET value = ((COALESCE(${tableName}_attributes.value, '0')::double precision) + $4::double precision)::text
288
312
  RETURNING value as new_value
289
313
  `;
290
- params.push(key, field, value, 'udata');
314
+ params.push(key, fieldSym, fieldDim, value, 'udata');
291
315
  }
292
316
  return { sql, params };
293
317
  }
294
318
  function handleUdataMultiply(key, fields, options) {
295
319
  const tableName = context.tableForKey(key, 'hash');
296
320
  const { field, value } = JSON.parse(fields['@udata:multiply']);
321
+ const { symbol: fieldSym, dimension: fieldDim } = (0, utils_1.splitField)(field);
297
322
  const replayId = Object.keys(fields).find((k) => k.includes('-') && k !== '@udata:multiply');
298
323
  const schemaName = context.safeName(context.appId);
299
324
  const params = [];
@@ -301,43 +326,44 @@ function createUdataOperations(context) {
301
326
  // For multiplication, we work with logarithms to support exponential multiplication
302
327
  // log(a * b) = log(a) + log(b), so exp(log(a) + log(b)) = a * b
303
328
  if (replayId) {
329
+ const { symbol: replaySym, dimension: replayDim } = (0, utils_1.splitField)(replayId);
304
330
  sql = `
305
331
  WITH valid_job AS (
306
332
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
307
333
  ),
308
334
  multiply_result AS (
309
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
310
- SELECT id, $2, ln($3::double precision)::text, $4::${schemaName}.type_enum
335
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
336
+ SELECT id, $2, $3, ln($4::double precision)::text, $5::${schemaName}.type_enum
311
337
  FROM valid_job
312
- ON CONFLICT (job_id, field) DO UPDATE
313
- SET value = (COALESCE(${tableName}_attributes.value::double precision, 0) + ln($3::double precision))::text
338
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
339
+ SET value = (COALESCE(${tableName}_attributes.value::double precision, 0) + ln($4::double precision))::text
314
340
  RETURNING value
315
341
  ),
316
342
  replay_insert AS (
317
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
318
- SELECT job.id, $5, mult.value, $6::${schemaName}.type_enum
343
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
344
+ SELECT job.id, $6, $7, mult.value, $8::${schemaName}.type_enum
319
345
  FROM valid_job job, multiply_result mult
320
- ON CONFLICT (job_id, field) DO UPDATE
346
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
321
347
  SET value = EXCLUDED.value
322
348
  RETURNING 1
323
349
  )
324
350
  SELECT value as new_value FROM multiply_result
325
351
  `;
326
- params.push(key, field, value, 'udata', replayId, (0, utils_1.deriveType)(replayId));
352
+ params.push(key, fieldSym, fieldDim, value, 'udata', replaySym, replayDim, (0, utils_1.deriveType)(replayId));
327
353
  }
328
354
  else {
329
355
  sql = `
330
356
  WITH valid_job AS (
331
357
  SELECT id FROM ${tableName} WHERE key = $1 AND is_live
332
358
  )
333
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
334
- SELECT id, $2, ln($3::double precision)::text, $4::${schemaName}.type_enum
359
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
360
+ SELECT id, $2, $3, ln($4::double precision)::text, $5::${schemaName}.type_enum
335
361
  FROM valid_job
336
- ON CONFLICT (job_id, field) DO UPDATE
337
- SET value = (COALESCE(${tableName}_attributes.value::double precision, 0) + ln($3::double precision))::text
362
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
363
+ SET value = (COALESCE(${tableName}_attributes.value::double precision, 0) + ln($4::double precision))::text
338
364
  RETURNING value as new_value
339
365
  `;
340
- params.push(key, field, value, 'udata');
366
+ params.push(key, fieldSym, fieldDim, value, 'udata');
341
367
  }
342
368
  return { sql, params };
343
369
  }
@@ -347,34 +373,35 @@ function createUdataOperations(context) {
347
373
  const params = [];
348
374
  let sql = '';
349
375
  if (replayId) {
376
+ const { symbol: replaySym, dimension: replayDim } = (0, utils_1.splitField)(replayId);
350
377
  sql = `
351
378
  WITH field_data AS (
352
- SELECT jsonb_object_agg(a.field, a.value) as field_values
379
+ SELECT jsonb_object_agg(a.symbol, a.value) as field_values
353
380
  FROM ${tableName} j
354
- LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id
381
+ LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id
355
382
  WHERE j.key = $1 AND j.is_live
356
- AND a.type = 'udata' AND a.field LIKE '\\_%'
383
+ AND a.type = 'udata' AND a.symbol LIKE '\\_%'
357
384
  ),
358
385
  replay_insert AS (
359
- INSERT INTO ${tableName}_attributes (job_id, field, value, type)
360
- SELECT j.id, $2, field_values::text, $3
386
+ INSERT INTO ${tableName}_attributes (job_id, symbol, dimension, value, type)
387
+ SELECT j.id, $2, $3, field_values::text, $4
361
388
  FROM ${tableName} j, field_data
362
389
  WHERE j.key = $1 AND j.is_live
363
- ON CONFLICT (job_id, field) DO UPDATE
390
+ ON CONFLICT (job_id, symbol, dimension) DO UPDATE
364
391
  SET value = EXCLUDED.value
365
392
  RETURNING 1
366
393
  )
367
394
  SELECT field_values as new_value FROM field_data
368
395
  `;
369
- params.push(key, replayId, (0, utils_1.deriveType)(replayId));
396
+ params.push(key, replaySym, replayDim, (0, utils_1.deriveType)(replayId));
370
397
  }
371
398
  else {
372
399
  sql = `
373
- SELECT jsonb_object_agg(a.field, a.value) as new_value
400
+ SELECT jsonb_object_agg(a.symbol, a.value) as new_value
374
401
  FROM ${tableName} j
375
- LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id
402
+ LEFT JOIN ${tableName}_attributes a ON j.id = a.job_id
376
403
  WHERE j.key = $1 AND j.is_live
377
- AND a.type = 'udata' AND a.field LIKE '\\_%'
404
+ AND a.type = 'udata' AND a.symbol LIKE '\\_%'
378
405
  `;
379
406
  params.push(key);
380
407
  }
@@ -3,6 +3,22 @@ import { PostgresJobEnumType } from './types';
3
3
  * Determines if a table name represents a jobs table
4
4
  */
5
5
  export declare function isJobsTable(tableName: string): boolean;
6
+ /**
7
+ * Splits a merged field string into its symbol and dimension components.
8
+ * The first comma separates symbol from dimension.
9
+ * If no comma is present, dimension is empty string.
10
+ *
11
+ * Examples:
12
+ * 'ab,0,1,0' → { symbol: 'ab', dimension: ',0,1,0' }
13
+ * '-proxy,0,0' → { symbol: '-proxy', dimension: ',0,0' }
14
+ * '_email' → { symbol: '_email', dimension: '' }
15
+ * 'jid' → { symbol: 'jid', dimension: '' }
16
+ * ':' → { symbol: ':', dimension: '' }
17
+ */
18
+ export declare function splitField(field: string): {
19
+ symbol: string;
20
+ dimension: string;
21
+ };
6
22
  /**
7
23
  * Derives the enumerated `type` value based on the field name when
8
24
  * setting a field in a jobs table (a 'jobshash' table type).
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.processRegularRows = exports.processJobsRows = exports.deriveType = exports.isJobsTable = void 0;
3
+ exports.processRegularRows = exports.processJobsRows = exports.deriveType = exports.splitField = exports.isJobsTable = void 0;
4
4
  /**
5
5
  * Determines if a table name represents a jobs table
6
6
  */
@@ -8,29 +8,42 @@ function isJobsTable(tableName) {
8
8
  return tableName.endsWith('jobs');
9
9
  }
10
10
  exports.isJobsTable = isJobsTable;
11
+ /**
12
+ * Splits a merged field string into its symbol and dimension components.
13
+ * The first comma separates symbol from dimension.
14
+ * If no comma is present, dimension is empty string.
15
+ *
16
+ * Examples:
17
+ * 'ab,0,1,0' → { symbol: 'ab', dimension: ',0,1,0' }
18
+ * '-proxy,0,0' → { symbol: '-proxy', dimension: ',0,0' }
19
+ * '_email' → { symbol: '_email', dimension: '' }
20
+ * 'jid' → { symbol: 'jid', dimension: '' }
21
+ * ':' → { symbol: ':', dimension: '' }
22
+ */
23
+ function splitField(field) {
24
+ const i = field.indexOf(',');
25
+ if (i === -1)
26
+ return { symbol: field, dimension: '' };
27
+ return { symbol: field.substring(0, i), dimension: field.substring(i) };
28
+ }
29
+ exports.splitField = splitField;
11
30
  /**
12
31
  * Derives the enumerated `type` value based on the field name when
13
32
  * setting a field in a jobs table (a 'jobshash' table type).
14
33
  */
15
34
  function deriveType(fieldName) {
16
- if (fieldName === ':') {
35
+ const { symbol, dimension } = splitField(fieldName);
36
+ if (symbol === ':')
17
37
  return 'status';
18
- }
19
- else if (fieldName.startsWith('_')) {
38
+ if (symbol.startsWith('_'))
20
39
  return 'udata';
21
- }
22
- else if (fieldName.startsWith('-')) {
23
- return fieldName.includes(',') ? 'hmark' : 'jmark';
24
- }
25
- else if (fieldName.length === 3) {
26
- return 'jdata';
27
- }
28
- else if (fieldName.includes(',')) {
40
+ if (symbol.startsWith('-'))
41
+ return dimension ? 'hmark' : 'jmark';
42
+ if (dimension)
29
43
  return 'adata';
30
- }
31
- else {
32
- return 'other';
33
- }
44
+ if (symbol.length === 3)
45
+ return 'jdata';
46
+ return 'other';
34
47
  }
35
48
  exports.deriveType = deriveType;
36
49
  /**
@@ -159,6 +159,7 @@ declare class PostgresStoreService extends StoreService<ProviderClient, Provider
159
159
  findJobs(queryString?: string, limit?: number, batchSize?: number, cursor?: string): Promise<[string, string[]]>;
160
160
  findJobFields(jobId: string, fieldMatchPattern?: string, limit?: number, batchSize?: number, // Unused in SQL provider
161
161
  cursor?: string): Promise<[string, Record<string, string>]>;
162
+ setCancel(jobId: string, appId: string): Promise<void>;
162
163
  setThrottleRate(options: ThrottleOptions): Promise<void>;
163
164
  getThrottleRates(): Promise<StringStringType>;
164
165
  getThrottleRate(topic: string): Promise<number>;
@@ -28,6 +28,7 @@ const errors_1 = require("../../../../modules/errors");
28
28
  const key_1 = require("../../../../modules/key");
29
29
  const serializer_1 = require("../../../serializer");
30
30
  const utils_1 = require("../../../../modules/utils");
31
+ const utils_2 = require("./kvtypes/hash/utils");
31
32
  const enums_1 = require("../../../../modules/enums");
32
33
  const cache_1 = require("../../cache");
33
34
  const __1 = require("../..");
@@ -998,7 +999,7 @@ class PostgresStoreService extends __1.StoreService {
998
999
  paramIndex++;
999
1000
  // Add dimension condition if applicable
1000
1001
  if (dimension) {
1001
- conditions.push(`a.field LIKE $${paramIndex}`);
1002
+ conditions.push(`a.dimension LIKE $${paramIndex}`);
1002
1003
  params.push(`%${dimension}%`);
1003
1004
  paramIndex++;
1004
1005
  }
@@ -1012,7 +1013,7 @@ class PostgresStoreService extends __1.StoreService {
1012
1013
  WITH valid_job AS (
1013
1014
  ${validJobSql}
1014
1015
  )
1015
- SELECT a.field, a.value
1016
+ SELECT a.symbol || a.dimension AS field, a.value
1016
1017
  FROM ${tableName}_attributes a
1017
1018
  JOIN valid_job j ON a.job_id = j.id
1018
1019
  WHERE ${conditions.join(' AND ')}
@@ -1027,6 +1028,13 @@ class PostgresStoreService extends __1.StoreService {
1027
1028
  const nextCursor = res.rows.length < limit ? '0' : String(offset + res.rows.length);
1028
1029
  return [nextCursor, matchingFields];
1029
1030
  }
1031
+ async setCancel(jobId, appId) {
1032
+ const jobKey = this.mintKey(key_1.KeyType.JOB_STATE, { appId, jobId });
1033
+ // Write the cancel marker as a jmark-type field via hset.
1034
+ // The hash module's splitField/deriveType classifies '-cancelled-'
1035
+ // as type='jmark', which findJobFields returns on re-entry.
1036
+ await this.kvsql().hset(jobKey, { '-cancelled-': '1' });
1037
+ }
1030
1038
  async setThrottleRate(options) {
1031
1039
  const key = this.mintKey(key_1.KeyType.THROTTLE_RATE, { appId: this.appId });
1032
1040
  //engine guids are session specific. no need to persist
@@ -1294,7 +1302,8 @@ class PostgresStoreService extends __1.StoreService {
1294
1302
  const schemaName = this.kvsql().safeName(this.appId);
1295
1303
  const sql = GET_ACTIVITY_INPUTS.replace(/{schema}/g, schemaName);
1296
1304
  const jobKeyPattern = `hmsh:${this.appId}:j:-${workflowId}-%`;
1297
- const result = await this.pgClient.query(sql, [jobKeyPattern, symbolField]);
1305
+ const { symbol, dimension } = (0, utils_2.splitField)(symbolField);
1306
+ const result = await this.pgClient.query(sql, [jobKeyPattern, symbol, dimension]);
1298
1307
  const byJobId = new Map();
1299
1308
  const byNameIndex = new Map();
1300
1309
  for (const row of result.rows) {
@@ -1329,7 +1338,8 @@ class PostgresStoreService extends __1.StoreService {
1329
1338
  const { buildChildWorkflowInputsQuery } = await Promise.resolve().then(() => __importStar(require('./exporter-sql')));
1330
1339
  const schemaName = this.kvsql().safeName(this.appId);
1331
1340
  const sql = buildChildWorkflowInputsQuery(childJobKeys.length, schemaName);
1332
- const result = await this.pgClient.query(sql, [...childJobKeys, symbolField]);
1341
+ const { symbol, dimension } = (0, utils_2.splitField)(symbolField);
1342
+ const result = await this.pgClient.query(sql, [...childJobKeys, symbol, dimension]);
1333
1343
  const childInputMap = new Map();
1334
1344
  for (const row of result.rows) {
1335
1345
  const jobKey = row.key;
@@ -3,6 +3,8 @@ import { ProviderClient, ProviderTransaction } from '../../types/provider';
3
3
  import { StreamInitializable } from './providers/stream-initializable';
4
4
  import { StreamService } from './index';
5
5
  declare class StreamServiceFactory {
6
- static init(provider: ProviderClient, storeProvider: ProviderClient, namespace: string, appId: string, logger: ILogger): Promise<StreamService<ProviderClient, ProviderTransaction> & StreamInitializable>;
6
+ static init(provider: ProviderClient, storeProvider: ProviderClient, namespace: string, appId: string, logger: ILogger, options?: {
7
+ securedWorker?: boolean;
8
+ }): Promise<StreamService<ProviderClient, ProviderTransaction> & StreamInitializable>;
7
9
  }
8
10
  export { StreamServiceFactory };
@@ -5,14 +5,14 @@ const utils_1 = require("../../modules/utils");
5
5
  const nats_1 = require("./providers/nats/nats");
6
6
  const postgres_1 = require("./providers/postgres/postgres");
7
7
  class StreamServiceFactory {
8
- static async init(provider, storeProvider, namespace, appId, logger) {
8
+ static async init(provider, storeProvider, namespace, appId, logger, options) {
9
9
  let service;
10
10
  const providerType = (0, utils_1.identifyProvider)(provider);
11
11
  if (providerType === 'nats') {
12
12
  service = new nats_1.NatsStreamService(provider, storeProvider);
13
13
  }
14
14
  else if (providerType === 'postgres') {
15
- service = new postgres_1.PostgresStreamService(provider, storeProvider);
15
+ service = new postgres_1.PostgresStreamService(provider, storeProvider, { securedWorker: options?.securedWorker });
16
16
  } //etc register other providers here
17
17
  await service.init(namespace, appId, logger);
18
18
  return service;
@@ -61,6 +61,7 @@ export declare abstract class StreamService<ClientProvider extends ProviderClien
61
61
  supportsTrimming: boolean;
62
62
  supportsRetry: boolean;
63
63
  supportsNotifications?: boolean;
64
+ supportsParallelProcessing?: boolean;
64
65
  maxMessageSize: number;
65
66
  maxBatchSize: number;
66
67
  };
@@ -53,6 +53,7 @@ declare class NatsStreamService extends StreamService<NatsClientType, NatsPubAck
53
53
  supportsTrimming: boolean;
54
54
  supportsRetry: boolean;
55
55
  supportsNotifications: boolean;
56
+ supportsParallelProcessing: boolean;
56
57
  maxMessageSize: number;
57
58
  maxBatchSize: number;
58
59
  };
@@ -217,6 +217,7 @@ class NatsStreamService extends index_1.StreamService {
217
217
  supportsTrimming: true,
218
218
  supportsRetry: false,
219
219
  supportsNotifications: false,
220
+ supportsParallelProcessing: false,
220
221
  maxMessageSize: 1024 * 1024,
221
222
  maxBatchSize: 256,
222
223
  };