@brandboostinggmbh/observable-workflows 0.10.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -37,7 +37,8 @@ declare function insertWorkflowRecord(options: InternalWorkflowContextOptions, {
37
37
  startTime,
38
38
  endTime,
39
39
  parentInstanceId,
40
- tenantId
40
+ tenantId,
41
+ triggerId
41
42
  }: {
42
43
  instanceId: string;
43
44
  workflowType: string;
@@ -49,6 +50,7 @@ declare function insertWorkflowRecord(options: InternalWorkflowContextOptions, {
49
50
  endTime?: number | null;
50
51
  parentInstanceId?: string | null;
51
52
  tenantId: string;
53
+ triggerId?: string | null;
52
54
  }): Promise<D1Result<Record<string, unknown>>>;
53
55
  declare function insertStepRecordFull(context: StepContextOptions, {
54
56
  instanceId,
@@ -133,6 +135,7 @@ declare function workflowTableRowToWorkflowRun(row: {
133
135
  startTime: number;
134
136
  endTime: number | null;
135
137
  parentInstanceId: string | null;
138
+ triggerId: string | null;
136
139
  }, serializer: Serializer, externalBlobStorage?: ExternalBlobStorage): Promise<WorkflowRun>;
137
140
  declare function updateWorkflowName(context: {
138
141
  D1: D1Database;
@@ -213,6 +216,7 @@ type WorkflowRun = {
213
216
  startTime: number;
214
217
  endTime: number | null;
215
218
  parentInstanceId: string | null;
219
+ triggerId: string | null;
216
220
  steps?: Step[];
217
221
  /** All Logs associated with the workflow. Includes logs that are part of steps */
218
222
  logs?: Log[];
@@ -346,13 +350,7 @@ type WorkflowContextOptions = {
346
350
  };
347
351
  type InternalWorkflowContextOptions = WorkflowContextOptions & Required<Pick<WorkflowContextOptions, 'serializer' | 'idFactory'>>;
348
352
  type WorkflowContextInstance = {
349
- call: <I>(args: {
350
- workflow: WorkflowFunction<I>;
351
- input: I;
352
- workflowName: string;
353
- tenantId: string;
354
- parentInstanceId?: string | undefined;
355
- }) => Promise<void>;
353
+ call: <I>(args: WorkflowCallParams<I>) => Promise<void>;
356
354
  retry: <I>(workflow: WorkflowFunction<I>, retryInstanceId: string, retryOptions?: RetryWorkflowOptions | undefined) => Promise<void>;
357
355
  };
358
356
  type QueueWorkflowContextOptions = {
@@ -391,6 +389,17 @@ type RetryWorkflowOptions = {
391
389
  /** If true the retry will attept to reuse all results from successful steps. Defaults to True */
392
390
  reuseSuccessfulSteps?: boolean;
393
391
  };
392
+ type WorkflowCallParams<I> = {
393
+ workflow: WorkflowFunction<I>;
394
+ input: I;
395
+ workflowName: string;
396
+ tenantId: string;
397
+ parentInstanceId?: string | undefined;
398
+ /** If true and a parentInstanceId is provided, this run will attept to reuse all results from successful steps. Defaults to true */
399
+ reuseSuccessfulSteps?: boolean;
400
+ /** Optional trigger identifier for workflow correlation */
401
+ triggerId?: string | null;
402
+ };
394
403
 
395
404
  //#endregion
396
405
  //#region src/observableWorkflows/createLogAccessor.d.ts
@@ -413,6 +422,7 @@ declare const createLogAccessor: (context: {
413
422
  listWorkflows: (limit: number, offset: number, filter?: WorkflowFilter) => Promise<WorkflowRun[]>;
414
423
  getWorkflow: (instanceId: string, populateData?: boolean) => Promise<WorkflowRun | null>;
415
424
  getWorkflowTypesByTenantId: (tenantId: string) => Promise<string[]>;
425
+ getWorkflowByTriggerId: (triggerId: string) => Promise<WorkflowRun | null>;
416
426
  getPropertiesKeys: (instanceId?: string) => Promise<WorkflowPropertyDefinition[]>;
417
427
  };
418
428
 
package/dist/index.js CHANGED
@@ -7,8 +7,10 @@ async function detectSchemaVersion(db) {
7
7
  let workflowTable = "missing";
8
8
  if (workflowTableInfo) {
9
9
  const hasInputRef = workflowTableInfo.sql.includes("inputRef");
10
+ const hasTriggerId = workflowTableInfo.sql.includes("triggerId");
10
11
  const inputHasNotNull = workflowTableInfo.sql.includes("input TEXT NOT NULL");
11
- if (hasInputRef && !inputHasNotNull) workflowTable = "v2+";
12
+ if (hasTriggerId && hasInputRef && !inputHasNotNull) workflowTable = "v4";
13
+ else if (hasInputRef && !inputHasNotNull && !hasTriggerId) workflowTable = "v2";
12
14
  else if (!hasInputRef && inputHasNotNull) workflowTable = "v1";
13
15
  else workflowTable = "v1";
14
16
  }
@@ -17,7 +19,7 @@ async function detectSchemaVersion(db) {
17
19
  if (stepTableInfo) {
18
20
  const hasResultRef = stepTableInfo.sql.includes("resultRef");
19
21
  const hasErrorRef = stepTableInfo.sql.includes("errorRef");
20
- if (hasResultRef && hasErrorRef) stepTable = "v2+";
22
+ if (hasResultRef && hasErrorRef) stepTable = "v2";
21
23
  else stepTable = "v1";
22
24
  }
23
25
  const logTableInfo = await db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='LogTable'`).first();
@@ -34,6 +36,83 @@ async function detectSchemaVersion(db) {
34
36
  };
35
37
  }
36
38
  /**
39
+ * Migrate WorkflowTable from V1 to V2 schema
40
+ * Adds inputRef column and makes input nullable
41
+ */
42
+ async function migrateWorkflowTableV1ToV2(db) {
43
+ const workflowTableInfo = await db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='WorkflowTable'`).first();
44
+ const hasInputRef = workflowTableInfo.sql.includes("inputRef");
45
+ const inputHasNotNull = workflowTableInfo.sql.includes("input TEXT NOT NULL");
46
+ if (!hasInputRef || inputHasNotNull) await db.batch([
47
+ db.prepare(
48
+ /* sql */
49
+ `CREATE TABLE WorkflowTable_new (
50
+ instanceId TEXT NOT NULL,
51
+ workflowType TEXT NOT NULL,
52
+ workflowName TEXT NOT NULL,
53
+ workflowMetadata TEXT NOT NULL,
54
+ input TEXT,
55
+ inputRef TEXT,
56
+ tenantId TEXT NOT NULL,
57
+ workflowStatus TEXT NOT NULL,
58
+ startTime INTEGER NOT NULL,
59
+ endTime INTEGER,
60
+ parentInstanceId TEXT,
61
+ PRIMARY KEY (instanceId)
62
+ )`
63
+ ),
64
+ db.prepare(
65
+ /* sql */
66
+ `INSERT INTO WorkflowTable_new
67
+ SELECT instanceId, workflowType, workflowName, workflowMetadata,
68
+ CASE WHEN input = '' THEN NULL ELSE input END as input,
69
+ NULL as inputRef, tenantId, workflowStatus, startTime, endTime, parentInstanceId
70
+ FROM WorkflowTable`
71
+ ),
72
+ db.prepare(`DROP TABLE WorkflowTable`),
73
+ db.prepare(`ALTER TABLE WorkflowTable_new RENAME TO WorkflowTable`)
74
+ ]);
75
+ }
76
+ /**
77
+ * Migrate WorkflowTable from V2/V3 to V4 schema
78
+ * Adds triggerId column with UNIQUE constraint
79
+ */
80
+ async function migrateWorkflowTableV2V3ToV4(db) {
81
+ const workflowTableInfo = await db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='WorkflowTable'`).first();
82
+ const hasTriggerId = workflowTableInfo.sql.includes("triggerId");
83
+ if (!hasTriggerId) await db.batch([
84
+ db.prepare(
85
+ /* sql */
86
+ `CREATE TABLE WorkflowTable_new (
87
+ instanceId TEXT NOT NULL,
88
+ workflowType TEXT NOT NULL,
89
+ workflowName TEXT NOT NULL,
90
+ workflowMetadata TEXT NOT NULL,
91
+ input TEXT,
92
+ inputRef TEXT,
93
+ tenantId TEXT NOT NULL,
94
+ workflowStatus TEXT NOT NULL,
95
+ startTime INTEGER NOT NULL,
96
+ endTime INTEGER,
97
+ parentInstanceId TEXT,
98
+ triggerId TEXT,
99
+ PRIMARY KEY (instanceId),
100
+ UNIQUE (triggerId)
101
+ )`
102
+ ),
103
+ db.prepare(
104
+ /* sql */
105
+ `INSERT INTO WorkflowTable_new
106
+ SELECT instanceId, workflowType, workflowName, workflowMetadata,
107
+ input, inputRef, tenantId, workflowStatus, startTime, endTime, parentInstanceId,
108
+ NULL as triggerId
109
+ FROM WorkflowTable`
110
+ ),
111
+ db.prepare(`DROP TABLE WorkflowTable`),
112
+ db.prepare(`ALTER TABLE WorkflowTable_new RENAME TO WorkflowTable`)
113
+ ]);
114
+ }
115
+ /**
37
116
  * Create or migrate WorkflowTable to the latest schema
38
117
  */
39
118
  async function migrateWorkflowTable(db, currentVersion) {
@@ -52,45 +131,21 @@ async function migrateWorkflowTable(db, currentVersion) {
52
131
  startTime INTEGER NOT NULL,
53
132
  endTime INTEGER,
54
133
  parentInstanceId TEXT,
55
- PRIMARY KEY (instanceId)
134
+ triggerId TEXT,
135
+ PRIMARY KEY (instanceId),
136
+ UNIQUE (triggerId)
56
137
  )`
57
138
  ).run();
58
139
  return;
59
140
  }
60
141
  if (currentVersion === "v1") {
61
- const workflowTableInfo = await db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='WorkflowTable'`).first();
62
- const hasInputRef = workflowTableInfo.sql.includes("inputRef");
63
- if (!hasInputRef) await db.prepare(`ALTER TABLE WorkflowTable ADD COLUMN inputRef TEXT`).run();
64
- const inputHasNotNull = workflowTableInfo.sql.includes("input TEXT NOT NULL");
65
- if (inputHasNotNull) await db.batch([
66
- db.prepare(
67
- /* sql */
68
- `CREATE TABLE WorkflowTable_new (
69
- instanceId TEXT NOT NULL,
70
- workflowType TEXT NOT NULL,
71
- workflowName TEXT NOT NULL,
72
- workflowMetadata TEXT NOT NULL,
73
- input TEXT,
74
- inputRef TEXT,
75
- tenantId TEXT NOT NULL,
76
- workflowStatus TEXT NOT NULL,
77
- startTime INTEGER NOT NULL,
78
- endTime INTEGER,
79
- parentInstanceId TEXT,
80
- PRIMARY KEY (instanceId)
81
- )`
82
- ),
83
- db.prepare(
84
- /* sql */
85
- `INSERT INTO WorkflowTable_new
86
- SELECT instanceId, workflowType, workflowName, workflowMetadata,
87
- CASE WHEN input = '' THEN NULL ELSE input END as input,
88
- inputRef, tenantId, workflowStatus, startTime, endTime, parentInstanceId
89
- FROM WorkflowTable`
90
- ),
91
- db.prepare(`DROP TABLE WorkflowTable`),
92
- db.prepare(`ALTER TABLE WorkflowTable_new RENAME TO WorkflowTable`)
93
- ]);
142
+ await migrateWorkflowTableV1ToV2(db);
143
+ await migrateWorkflowTableV2V3ToV4(db);
144
+ return;
145
+ }
146
+ if (currentVersion === "v2") {
147
+ await migrateWorkflowTableV2V3ToV4(db);
148
+ return;
94
149
  }
95
150
  }
96
151
  /**
@@ -173,6 +228,7 @@ async function migrateWorkflowPropertiesTable(db, currentVersion) {
173
228
  */
174
229
  async function createIndexes(db) {
175
230
  await db.prepare(`CREATE INDEX IF NOT EXISTS idx_workflows_parent_instance_id ON WorkflowTable (parentInstanceId)`).run();
231
+ await db.prepare(`CREATE INDEX IF NOT EXISTS idx_workflows_trigger_id ON WorkflowTable (triggerId)`).run();
176
232
  }
177
233
  /**
178
234
  * Main migration function that ensures all tables exist and are up-to-date.
@@ -200,14 +256,14 @@ function finalizeWorkflowRecord(options, { workflowStatus, endTime, instanceId }
200
256
  WHERE instanceId = ?`
201
257
  ).bind(workflowStatus, endTime, instanceId).run();
202
258
  }
203
- async function insertWorkflowRecord(options, { instanceId, workflowType, workflowName, workflowMetadata, input, workflowStatus, startTime, endTime, parentInstanceId, tenantId }) {
259
+ async function insertWorkflowRecord(options, { instanceId, workflowType, workflowName, workflowMetadata, input, workflowStatus, startTime, endTime, parentInstanceId, tenantId, triggerId }) {
204
260
  const { data: inputData, externalRef: inputRef } = await serializeWithExternalStorage(input, options.serializer, options.externalBlobStorage);
205
261
  return options.D1.prepare(
206
262
  /* sql */
207
263
  `INSERT INTO WorkflowTable
208
- (instanceId, workflowType, workflowName, workflowMetadata, input, inputRef, tenantId, workflowStatus, startTime, endTime, parentInstanceId)
209
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
210
- ).bind(instanceId, workflowType, workflowName, options.serializer.serialize(workflowMetadata), inputData, inputRef, tenantId, workflowStatus, startTime, endTime ?? null, parentInstanceId ?? null).run();
264
+ (instanceId, workflowType, workflowName, workflowMetadata, input, inputRef, tenantId, workflowStatus, startTime, endTime, parentInstanceId, triggerId)
265
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
266
+ ).bind(instanceId, workflowType, workflowName, options.serializer.serialize(workflowMetadata), inputData, inputRef, tenantId, workflowStatus, startTime, endTime ?? null, parentInstanceId ?? null, triggerId ?? null).run();
211
267
  }
212
268
  function insertStepRecordFull(context, { instanceId, name, status, metadata, startTime, endTime, result, error, resultRef, errorRef }) {
213
269
  return context.D1.prepare(
@@ -239,13 +295,68 @@ async function getStepRecord(context, stepName, instanceId) {
239
295
  };
240
296
  } else return null;
241
297
  }
298
+ /**
299
+ * FUTURE ENHANCEMENT: External Blob Storage for Logs
300
+ *
301
+ * Currently, log messages are stored directly in the LogTable's `log` TEXT column.
302
+ * For very large log messages, we may want to implement external blob storage similar
303
+ * to how step results and errors are handled.
304
+ *
305
+ * To implement this, the following changes would be needed:
306
+ *
307
+ * 1. Database Schema Migration:
308
+ * - Add `logRef` TEXT column to LogTable
309
+ * - Update migrateLogTable() in migrations.ts to handle schema version upgrade
310
+ *
311
+ * 2. Storage Functions:
312
+ * - Modify pushLogToDB() to use serializeWithExternalStorage() for log messages
313
+ * - Update LogBatcher to handle external storage for large logs
314
+ * - Consider batch operations for external storage efficiency
315
+ *
316
+ * 3. Retrieval Functions:
317
+ * - Update log retrieval in createLogAccessor.ts to use deserializeWithExternalStorage()
318
+ * - Handle both direct and external log data when populating Log objects
319
+ *
320
+ * 4. Configuration:
321
+ * - Consider separate threshold for logs vs step data (logs might need smaller threshold)
322
+ * - Add optional log-specific external storage configuration
323
+ *
324
+ * 5. Migration Strategy:
325
+ * - Decide how to handle existing large logs during schema migration
326
+ * - Consider background migration process for large log datasets
327
+ */
328
+ const MAX_LOG_SIZE = 1024 * 64;
329
+ function truncateLogMessage(message) {
330
+ const encoder = new TextEncoder();
331
+ const messageBytes = encoder.encode(message);
332
+ if (messageBytes.length <= MAX_LOG_SIZE) return message;
333
+ const truncateMarker = "\n... [LOG TRUNCATED - Original size: " + messageBytes.length + " bytes] ...";
334
+ const markerBytes = encoder.encode(truncateMarker);
335
+ const availableBytes = MAX_LOG_SIZE - markerBytes.length;
336
+ let truncatedBytes = messageBytes.slice(0, availableBytes);
337
+ const decoder = new TextDecoder("utf-8", {
338
+ fatal: false,
339
+ ignoreBOM: false
340
+ });
341
+ let truncatedMessage = decoder.decode(truncatedBytes);
342
+ if (truncatedMessage.includes("�")) for (let i = availableBytes - 3; i > availableBytes - 10 && i > 0; i--) {
343
+ truncatedBytes = messageBytes.slice(0, i);
344
+ const testMessage = decoder.decode(truncatedBytes);
345
+ if (!testMessage.includes("�")) {
346
+ truncatedMessage = testMessage;
347
+ break;
348
+ }
349
+ }
350
+ return truncatedMessage + truncateMarker;
351
+ }
242
352
  function pushLogToDB(options, { instanceId, stepName, message, timestamp, type, logOrder, tenantId }) {
353
+ const truncatedMessage = truncateLogMessage(message);
243
354
  return options.D1.prepare(
244
355
  /* sql */
245
356
  `INSERT INTO LogTable
246
357
  (instanceId, stepName, log, timestamp, type, logOrder, tenantId)
247
358
  VALUES (?, ?, ?, ?, ?, ?, ?)`
248
- ).bind(instanceId, stepName, message, timestamp, type, logOrder, tenantId).run();
359
+ ).bind(instanceId, stepName, truncatedMessage, timestamp, type, logOrder, tenantId).run();
249
360
  }
250
361
  var LogBatcher = class {
251
362
  batch = [];
@@ -256,9 +367,16 @@ var LogBatcher = class {
256
367
  this.batchSize = batchSize;
257
368
  this.flushInterval = flushInterval;
258
369
  }
259
- addLog(entry) {
260
- if (this.isDestroyed) return pushLogToDB(this.options, entry);
261
- this.batch.push(entry);
370
+ async addLog(entry) {
371
+ if (this.isDestroyed) {
372
+ await pushLogToDB(this.options, entry);
373
+ return Promise.resolve();
374
+ }
375
+ const truncatedEntry = {
376
+ ...entry,
377
+ message: truncateLogMessage(entry.message)
378
+ };
379
+ this.batch.push(truncatedEntry);
262
380
  if (!this.flushTimer) this.flushTimer = setTimeout(() => {
263
381
  this.flush();
264
382
  }, this.flushInterval);
@@ -398,7 +516,8 @@ async function workflowTableRowToWorkflowRun(row, serializer, externalBlobStorag
398
516
  workflowStatus: row.workflowStatus,
399
517
  startTime: row.startTime,
400
518
  endTime: row.endTime,
401
- parentInstanceId: row.parentInstanceId
519
+ parentInstanceId: row.parentInstanceId,
520
+ triggerId: row.triggerId
402
521
  };
403
522
  }
404
523
  async function updateWorkflowName(context, instanceId, newWorkflowName) {
@@ -643,6 +762,17 @@ const createLogAccessor = (context) => {
643
762
  }
644
763
  return null;
645
764
  };
765
+ const getWorkflowByTriggerId = async (triggerId) => {
766
+ const result = await context.D1.prepare(
767
+ /* sql */
768
+ `SELECT * FROM WorkflowTable WHERE triggerId = ? AND tenantId = ?`
769
+ ).bind(triggerId, context.tenantId).first();
770
+ if (result) {
771
+ const workflow = await workflowTableRowToWorkflowRun(result, internalSerializer, context.externalBlobStorage);
772
+ return workflow;
773
+ }
774
+ return null;
775
+ };
646
776
  const getWorkflowTypesByTenantId = async (tenantId) => {
647
777
  const result = await context.D1.prepare(
648
778
  /* sql */
@@ -761,6 +891,7 @@ const createLogAccessor = (context) => {
761
891
  listWorkflows,
762
892
  getWorkflow,
763
893
  getWorkflowTypesByTenantId,
894
+ getWorkflowByTriggerId,
764
895
  getPropertiesKeys
765
896
  };
766
897
  };
@@ -937,11 +1068,21 @@ function createWorkflowContext(options) {
937
1068
  serializer: options.serializer ?? defaultSerializer,
938
1069
  idFactory: options.idFactory ?? defaultIdFactory
939
1070
  };
940
- const call = async ({ workflow, input, workflowName, tenantId, parentInstanceId, reuseSuccessfulSteps }) => {
1071
+ const call = async ({ workflow, input, workflowName, tenantId, parentInstanceId, reuseSuccessfulSteps, triggerId }) => {
941
1072
  if (!ensuredTables) {
942
1073
  await ensureTables(options.D1);
943
1074
  ensuredTables = true;
944
1075
  }
1076
+ if (triggerId) {
1077
+ const logAccessor = createLogAccessor({
1078
+ D1: internalContext.D1,
1079
+ externalBlobStorage: internalContext.externalBlobStorage,
1080
+ serializer: internalContext.serializer,
1081
+ tenantId
1082
+ });
1083
+ const existingWorkflow = await logAccessor.getWorkflowByTriggerId(triggerId);
1084
+ throw new Error(`Workflow with triggerId ${triggerId} already exists: ${existingWorkflow?.instanceId}`);
1085
+ }
945
1086
  const instanceId = internalContext.idFactory();
946
1087
  const startTime = Date.now();
947
1088
  await insertWorkflowRecord(internalContext, {
@@ -954,7 +1095,8 @@ function createWorkflowContext(options) {
954
1095
  startTime,
955
1096
  endTime: null,
956
1097
  parentInstanceId,
957
- tenantId
1098
+ tenantId,
1099
+ triggerId
958
1100
  });
959
1101
  const logBatcher = new LogBatcher(internalContext);
960
1102
  let logOrder = 0;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@brandboostinggmbh/observable-workflows",
3
- "version": "0.10.0",
3
+ "version": "0.11.0",
4
4
  "description": "My awesome typescript library",
5
5
  "type": "module",
6
6
  "license": "MIT",