@brandboostinggmbh/observable-workflows 0.21.1 → 0.22.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,830 @@
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
+ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
+
8
+ ## NPM Changelog Conventions
9
+
10
+ For NPM packages, changelogs are conventionally stored in a `CHANGELOG.md` file at the root of the repository. When publishing to NPM:
11
+
12
+ - The changelog is included in the package if the file is not excluded in `.npmignore` or `package.json` files
13
+ - Many tools and package registries (including npm's website) will display the changelog from this file
14
+ - Users can view the changelog with `npm view @brandboostinggmbh/observable-workflows`
15
+ - The changelog file is also visible in the GitHub repository for easy reference
16
+
17
+ ## [Unreleased]
18
+
19
+ ### 🚧 Work in Progress
20
+
21
+ Changes that will be included in the next release will be documented here.
22
+
23
+ ---
24
+
25
+ ## [v0.22.1] - 2026-02-19
26
+
27
+ ### 📚 Documentation
28
+
29
+ - Add comprehensive CHANGELOG.md with full project history from v0.1.0 to v0.22.0
30
+ - Include CHANGELOG.md in published NPM package
31
+ - Add changelog reference section to README.md
32
+
33
+ ---
34
+
35
+ ## [v0.22.0] - 2026-02-19
36
+
37
+ ### ✨ Features
38
+
39
+ - **Delayed Workflow Scheduling**: Workflows can now be scheduled with a delay before execution
40
+ - New `delaySeconds` parameter in `enqueueWorkflow()` and `enqueueWorkflowBatch()` for specifying execution delays (0-2592000 seconds / 30 days)
41
+ - New `scheduledFor` timestamp field in `WorkflowQueueMessage` and `WorkflowRun` tracking when delayed workflows should execute
42
+ - Database schema V8 adds `delaySeconds` (INTEGER) and `scheduledFor` (INTEGER) columns to WorkflowTable
43
+ - Automatic migration from V6/V7 to V8 preserves existing data while adding new columns
44
+ - Queue consumer honors delay by using `delaySeconds` in Cloudflare Queue's batch send API
45
+ - Workflows remain in 'scheduled' status until delay expires and execution begins
46
+ - New `getWorkflowsScheduledBefore()` function for querying workflows scheduled to execute before a specific timestamp
47
+ - Enhanced `listWorkflows()` with `scheduledBefore` and `scheduledAfter` filter options for time-based scheduled workflow queries
48
+
49
+ ### 🔧 Improvements
50
+
51
+ - Database schema improvements with new columns for delayed execution tracking
52
+ - Migration system enhanced to support V6→V8 and V7→V8 upgrade paths
53
+ - Queue message validation ensures delaySeconds is within valid range (0-2592000)
54
+
55
+ ## [v0.21.1] - 2026-02-11
56
+
57
+ *Production release with no additional changes from v0.21.1-beta.2*
58
+
59
+ ## [v0.21.1-beta.2] - 2026-02-11
60
+
61
+ ### 💥 Breaking Changes
62
+
63
+ - `enqueueWorkflowBatch()` now enforces a maximum batch size limit of 100 workflows and throws an error if exceeded, preventing potential performance issues and queue system overload
64
+
65
+ ### 🔧 Improvements
66
+
67
+ - Batch size validation added before processing to fail fast with clear error message
68
+ - Improved error messaging for batch size violations
69
+
70
+ ## [v0.21.1-beta.1] - 2026-02-11
71
+
72
+ *Version bump with no user-facing changes*
73
+
74
+ ## [v0.21.0-beta.1] - 2026-02-05
75
+
76
+ ### ✨ Features
77
+
78
+ - **Workflow Trigger Identification**: Support for tracking the origin or cause of workflow execution via `triggerId`
79
+ - New `triggerId` parameter in `enqueueWorkflow()` and `enqueueWorkflowBatch()` for identifying what triggered the workflow execution
80
+ - New `getWorkflowByTriggerId()` function in log accessor for retrieving workflows by their trigger identifier
81
+ - `WorkflowQueueMessage` includes `triggerId` field for queue-based workflow execution tracking
82
+ - `WorkflowRun` type includes `triggerId: string | null` field for trigger correlation in workflow records
83
+
84
+ ### 💥 Breaking Changes
85
+
86
+ - **Function Signature Change**: `enqueueRetryWorkflow()` now accepts an object parameter instead of positional parameters
87
+ - Before: `enqueueRetryWorkflow(instanceId, retryConfig)`
88
+ - After: `enqueueRetryWorkflow({ instanceId, retryConfig, triggerId })`
89
+ - Migration: Wrap existing calls in object syntax, e.g., `enqueueRetryWorkflow({ instanceId, retryConfig })`
90
+
91
+ ### 🔧 Improvements
92
+
93
+ - Database schema V6 adds `triggerId` (TEXT, UNIQUE, nullable) column to WorkflowTable for trigger identification
94
+ - Automatic migration from V5 to V6 preserves existing data while adding trigger tracking capability
95
+ - Enhanced workflow enqueue operations to propagate `triggerId` through the execution pipeline
96
+
97
+ ## [v0.20.3-beta.1] - 2026-01-26
98
+
99
+ ### 🔧 Improvements
100
+
101
+ - **Query Optimization**: `getPropertiesKeys()` split into two separate optimized queries instead of using `(? IS NULL OR instanceId = ?)` pattern
102
+ - When `instanceId` provided: Uses `instanceId` index directly for targeted query
103
+ - When `instanceId` omitted: Uses `tenant_key` composite index for tenant-wide query
104
+ - Eliminates OR pattern that prevented SQLite from using indexes effectively
105
+ - Improves query performance by allowing index usage in both code paths
106
+
107
+ ## [v0.20.2-beta.1] - 2026-01-26
108
+
109
+ ### 🔧 Improvements
110
+
111
+ - **Query Optimization**: `listWorkflows()` now only uses `DISTINCT` when JOIN operations are present (property filters)
112
+ - Removes unnecessary `DISTINCT` overhead when querying workflows without property filters
113
+ - Significantly improves query performance for simple workflow listings by avoiding full result set processing
114
+ - Maintains `DISTINCT` when needed (with JOINs) to prevent duplicate results
115
+ - Added performance tracking with D1 metadata logging for query analysis
116
+
117
+ ## [v0.20.1-beta.1] - 2026-01-20
118
+
119
+ ### ✨ Features
120
+
121
+ - **Cleanup Manager Enhancement**: Add optional support for cleaning up scheduled and waiting workflows
122
+ - New `scheduledWorkflows` option (defaults to `false`) in `DeleteConfig` for including workflows in 'scheduled' status
123
+ - New `waitingWorkflows` option (defaults to `false`) in `DeleteConfig` for including workflows in 'waiting' status
124
+ - Enables selective cleanup of workflows in pre-execution states when explicitly opted in
125
+ - Maintains backward compatibility by keeping these statuses excluded by default
126
+
127
+ ### 🔧 Improvements
128
+
129
+ - Comprehensive test coverage for scheduled and waiting workflow cleanup scenarios
130
+ - Test coverage for multi-status cleanup operations with all flags enabled
131
+
132
+ ## [v0.20.0-beta.6] - 2026-01-19
133
+
134
+ ### 🔧 Improvements
135
+
136
+ - **Batch Performance Optimization**: `enqueueWorkflowBatch()` now uses `prepareWorkflowInsertStatements()` helper for efficient batch workflow insertion
137
+ - Pre-prepares all workflow insert statements before executing batch operation
138
+ - Reduces code duplication between single and batch workflow insertion logic
139
+ - Maintains consistent workflow record creation across enqueue operations
140
+
141
+ ## [v0.20.0-beta.5] - 2026-01-19
142
+
143
+ ### ✨ Features
144
+
145
+ - **Customizable D1 Retry Configuration**: Users can now override the default D1 retry behavior
146
+ - New `retryConfig` optional parameter in `createWorkflowContext()`, `createQueueWorkflowContext()`, and `createLogAccessor()`
147
+ - Allows customizing `maxRetries`, `initialDelayMs`, and `maxDelayMs` for D1 operations
148
+ - Configuration is propagated to all internal operations that use D1 retry logic
149
+ - Defaults to existing behavior when not specified (5 retries, 100ms initial delay, 5000ms max delay)
150
+
151
+ ### 🔧 Improvements
152
+
153
+ - Export `RetryConfig` type from library for users to define custom retry configurations
154
+ - Retry configuration is now threaded through all context creation functions for consistent retry behavior
155
+
156
+ ## [v0.20.0-beta.4] - 2026-01-19
157
+
158
+ *Version bump with no user-facing changes*
159
+
160
+ ## [v0.20.0-beta.3] - 2026-01-19
161
+
162
+ *Version bump with no user-facing changes*
163
+
164
+ ## [v0.20.0-beta.2] - 2026-01-19
165
+
166
+ *Version bump with no user-facing changes*
167
+
168
+ ## [v0.20.0-beta.1] - 2026-01-19
169
+
170
+ ### ✨ Features
171
+
172
+ - **Comprehensive D1 Retry Logic**: All D1 database operations now include automatic retry with exponential backoff and jitter
173
+ - New `retryD1Operation()` helper function wraps all D1 queries with configurable retry logic
174
+ - Handles transient network errors (SQLITE_BUSY, connection timeouts, network failures)
175
+ - Default configuration: 5 max retries, 100ms initial delay, 5s max delay, exponential backoff with jitter
176
+ - Applied to all database operations: workflow CRUD, step CRUD, property CRUD, cleanup operations, statistics queries
177
+ - `RetryConfig` type allows customization of retry behavior (maxRetries, initialDelayMs, maxDelayMs)
178
+
179
+ ### 🔧 Improvements
180
+
181
+ - Improved resilience for all database operations across workflow lifecycle (creation, execution, logging, cleanup)
182
+ - Retry logic added to workflow insertion, step insertion, log insertion, property operations, and query operations
183
+ - Enhanced error handling for transient D1 failures with automatic recovery
184
+ - Consistent retry behavior across all database interaction points in the library
185
+
186
+ ## [v0.19.0-beta.4] - 2025-11-04
187
+
188
+ ### 🔧 Improvements
189
+
190
+ - Add composite database index on WorkflowTable (parentInstanceId, tenantId) to optimize queries filtering by parent workflows within a tenant, improving performance for hierarchical workflow queries
191
+
192
+ ## [v0.19.0-beta.3] - 2025-11-04
193
+
194
+ ### 🔧 Improvements
195
+
196
+ - Add database indexes on WorkflowTable for improved query performance:
197
+ - Index on `tenantId` column for tenant-scoped queries
198
+ - Index on `workflowStatus` column for status-based filtering
199
+ - Index on `parentInstanceId` column for parent-child workflow relationship queries
200
+ - Add `release:beta` npm script for publishing beta releases to NPM
201
+
202
+ ## [v0.19.0-beta.2] - 2025-10-23
203
+
204
+ *Version bump with no user-facing changes*
205
+
206
+ ## [v0.19.0-beta.1] - 2025-10-20
207
+
208
+ ### ✨ Features
209
+
210
+ - **Workflow Dependencies System**: Workflows can now depend on other workflows with full dependency management support
211
+ - New `createWorkflowDependency()` function to create a single dependency relationship between workflows
212
+ - New `createWorkflowDependencies()` function to create multiple dependency relationships atomically with workflow creation
213
+ - New `deleteWorkflowDependency()` function to remove dependency relationships between workflows
214
+ - New `WorkflowDependencies` database table with composite primary key and tenant-scoped queries
215
+ - Workflows in 'waiting' status hold execution until all dependencies complete
216
+ - New `populateDependencies` and `populateDependents` options in `getWorkflow()` and `listWorkflows()` for fetching dependency relationships
217
+ - **Scheduled Workflow Support**: Workflows can now be pre-created and scheduled for later execution
218
+ - New `scheduledInstanceId` parameter in `WorkflowQueueMessage` for pre-creating workflow records before execution
219
+ - Workflows created in 'scheduled' status when pre-defined, or 'waiting' status when dependencies exist
220
+ - `enqueueWorkflow()` and `enqueueWorkflowBatch()` now return `ScheduledWorkflowExecutionStub` with `instanceId` and `workflowType`
221
+ - Enables workflow scheduling patterns where workflow records are created ahead of time
222
+ - **Workflow Dependency Polling**: New `handlePollWaitingWorkflows()` function for automatic dependency resolution
223
+ - Polls waiting workflows from the database
224
+ - Automatically enqueues workflows when all their dependencies complete
225
+ - Transitions 'waiting' → 'scheduled' status when dependencies are satisfied
226
+ - Supports cross-tenant waiting workflow discovery with `ignoreTenant` option
227
+ - **Enhanced listWorkflows Options**:
228
+ - New `populateResult` option to include workflow results in responses
229
+ - New `populateDependencies` option to fetch and attach all dependency relationships
230
+ - New `populateDependents` option to fetch and attach all dependent relationships
231
+ - New `ignoreTenant` option for cross-tenant workflow queries (useful for dependency polling)
232
+ - **New WorkflowStatus Types**: Added 'scheduled' and 'waiting' workflow status values to support pre-execution workflow states
233
+ - **New Exports**:
234
+ - `createCleanupManager` function for workflow data retention management
235
+ - `WorkflowStatus` type for type-safe status usage
236
+ - `WorkflowDependency` and `WorkflowDependencyRelation` types for dependency handling
237
+ - `updateWorkflow()` function for updating workflow records after creation
238
+ - `workflowTableRowToWorkflowRun()` helper for converting database rows to workflow objects
239
+ - Workflow dependency helper functions: `prepareWorkflowDependencyStatement()`, `createWorkflowDependency()`, `createWorkflowDependencies()`, `deleteWorkflowDependency()`
240
+
241
+ ### 🔧 Improvements
242
+
243
+ - **Database Schema Enhancement**: New WorkflowDependencies table (V7) with efficient many-to-many relationship tracking
244
+ - Composite primary key on (dependencyWorkflowId, dependentWorkflowId) ensures unique relationships
245
+ - Foreign key constraints with CASCADE DELETE for referential integrity
246
+ - Optimized indexes on dependencyWorkflowId, dependentWorkflowId, and tenantId for fast queries
247
+ - createdAt timestamp for audit trail and relationship history
248
+ - **Enhanced insertWorkflowRecord()**: Now supports atomic creation of workflows with their dependencies
249
+ - Accepts optional dependencies array
250
+ - Automatically creates appropriate status based on dependency presence
251
+ - Returns success metadata for both workflow and dependency inserts
252
+ - **Improved getWorkflow()**: Enhanced with dependency fetching capabilities
253
+ - New `populateDependencies` option to fetch all workflows this workflow depends on
254
+ - New `populateDependents` option to fetch all workflows that depend on this workflow
255
+ - Maintains clean separation between data fetching and population options
256
+ - **TypeScript Type Improvements**: Enhanced type system for workflow dependencies
257
+ - New `ExtractDependencyResult<W>` type for typed dependency extraction
258
+ - New `WorkflowDependencyRun<I, O, TYPE>` type for typed dependency relationships
259
+ - Typed `getDependencies()` in workflow context for strong type safety across dependency workflows
260
+ - **Batch Enqueue Performance**: `enqueueWorkflowBatch()` maintains order while efficiently handling dependencies and scheduling
261
+
262
+ ## [v0.18.2] - 2025-10-07
263
+
264
+ ### 🐛 Bug Fixes
265
+
266
+ - Fix `getPropertiesKeys()` function to use `deserializeWorkflowPropertyValue()` instead of `tryDeserializeObj()` for consistent property value deserialization across workflow properties
267
+
268
+ ## [v0.18.1] - 2025-10-07
269
+
270
+ ### ✨ Features
271
+
272
+ - Export new helper functions `serializeWorkflowPropertyValue()` and `deserializeWorkflowPropertyValue()` for handling workflow property serialization and deserialization
273
+
274
+ ### 🔧 Improvements
275
+
276
+ - Improve property deserialization in `listWorkflows()` by using `deserializeWorkflowPropertyValue()` instead of `tryDeserializeObj()` for proper type-specific deserialization
277
+ - Enhance property value deserialization to handle different value types (string, number, boolean, object) with appropriate type conversion and JSON parsing with fallback error handling
278
+
279
+ ## [v0.18.0] - 2025-10-06
280
+
281
+ ### 🔧 Improvements
282
+
283
+ - Index creation is now idempotent - checks for existing indexes before attempting to create them, eliminating redundant creation attempts and preventing potential conflicts when re-running migrations
284
+ - Optimized index creation performance by batching index creation statements together for execution, reducing the number of database round-trips
285
+ - Fine-tuned query batch size for stability by reducing property loading batch size from 100 to 80 to provide additional safety margin below SQLite's variable limit
286
+
287
+ ## [v0.17.7] - 2025-10-05
288
+
289
+ *Version bump with no user-facing changes*
290
+
291
+ ## [v0.17.6] - 2025-10-05
292
+
293
+ ### 🔧 Improvements
294
+
295
+ - Property loading in `listWorkflows` now processes in batches of 100 instanceIds at a time to avoid hitting SQLite's 999 variable limit when populating properties for large result sets
296
+
297
+ ## [v0.17.5] - 2025-10-05
298
+
299
+ *Version bump with no user-facing changes*
300
+
301
+ ## [v0.17.4] - 2025-10-04
302
+
303
+ ### ✨ Features
304
+
305
+ - Add `populateProperties` option to `listWorkflows()` method for efficient workflow property fetching - when enabled, properties are fetched and attached to each workflow in a single additional query
306
+
307
+ ### 🔧 Improvements
308
+
309
+ - Efficient property fetching using IN clause with all instanceIds in one query, then grouped by instanceId and attached to workflows, improving performance for bulk workflow retrieval with properties
310
+
311
+ ## [v0.17.3] - 2025-09-16
312
+
313
+ ### 🔧 Improvements
314
+
315
+ - Add composite database index `idx_workflows_tenant_starttime` on WorkflowTable (tenantId, startTime DESC) to optimize tenant and time-based workflow queries
316
+
317
+ ## [v0.17.2] - 2025-09-15
318
+
319
+ ### 🔧 Improvements
320
+
321
+ - Add `debugLogs` option to `listWorkflows()` function for SQL query debugging - when enabled, logs the SQL query being executed, the bindings used, and a message when the query completes
322
+
323
+ ## [v0.17.1] - 2025-09-14
324
+
325
+ ### 🔧 Improvements
326
+
327
+ - Add console logging to cleanup manager for better visibility during deletion operations, including workflow deletion progress, associated steps deletion, external storage key deletion, database deletion phase, and messages when no workflows match deletion criteria
328
+
329
+ ## [v0.17.0] - 2025-09-10
330
+
331
+ ### 💥 Breaking Changes
332
+
333
+ - `queueIdentifier` is now a required field in `QueueWorkflowContextOptions` - must be provided when creating queue workflow contexts
334
+
335
+ ### ✨ Features
336
+
337
+ - Queue handlers now validate and track workflow queue routing with `queueIdentifier` support in `WorkflowQueueMessage` - a warning is logged if a workflow message is processed by the incorrect queue handler
338
+
339
+ ## [v0.16.4] - 2025-09-09
340
+
341
+ ### 🔧 Improvements
342
+
343
+ - Optimize `listSteps` query performance by refactoring SQL query construction to use dynamic WHERE clause building instead of null-checking pattern
344
+ - Add composite database indexes for improved query performance: `idx_steps_instance_starttime` on StepTable (instanceId, startTime) and `idx_workflow_properties_tenant_key` on WorkflowProperties (tenantId, key, valueType)
345
+
346
+ ## [v0.16.3] - 2025-09-06
347
+
348
+ ### ✨ Features
349
+
350
+ - Add `workflowAverageWallTimeMilis` property to workflow statistics for tracking average workflow execution time in aggregated results
351
+
352
+ ## [v0.16.2] - 2025-09-05
353
+
354
+ *Version bump with no user-facing changes*
355
+
356
+ ## [v0.16.1] - 2025-09-05
357
+
358
+ ### ✨ Features
359
+
360
+ - Add `successfulWorkflowCount` and `failedWorkflowCount` fields to workflow statistics query results for better insights into workflow completion status
361
+
362
+ ## [v0.16.0] - 2025-09-04
363
+
364
+ ### ✨ Features
365
+
366
+ - Add `getWorkflowTypesStatistics()` function to retrieve aggregated statistics for workflow types including counts, wall time totals, and incomplete workflow counts with optional filtering by tenant ID and time range
367
+
368
+ ## [v0.15.6] - 2025-09-01
369
+
370
+ ### 🔧 Improvements
371
+
372
+ - Add database index on StepTable startTime column for improved query performance when filtering or sorting steps by start time
373
+
374
+ ## [v0.15.5] - 2025-09-01
375
+
376
+ ### 🔧 Improvements
377
+
378
+ - Optimize SQL query for orphaned log deletion by querying StepTable directly instead of using NOT EXISTS subquery with WorkflowTable, improving performance for cleanup operations
379
+
380
+ ## [v0.15.4] - 2025-08-31
381
+
382
+ ### ✨ Features
383
+
384
+ - Add `deleteOrphanedSteps()` function to cleanup manager for removing steps that reference non-existent workflows
385
+ - Add `deleteOrphanedLogs()` function to cleanup manager for removing logs that reference non-existent steps or workflows
386
+
387
+ ## [v0.15.3] - 2025-08-29
388
+
389
+ ### 🔧 Improvements
390
+
391
+ - Add database indexes on foreign key columns (StepTable.instanceId and LogTable.instanceId) for improved query performance, particularly for cascade deletion and JOIN operations
392
+
393
+ ## [v0.15.1] - 2025-08-28
394
+
395
+ ### 🔧 Improvements
396
+
397
+ - Add specific index on WorkflowTable for status and time-based filtering to improve query performance when filtering by workflow status with time ranges
398
+ - Optimize workflow deletion SQL query in cleanup manager to use explicit filter predicate instead of subquery for better performance
399
+
400
+ ## [v0.15.0] - 2025-08-28
401
+
402
+ ### ✨ Features
403
+
404
+ - Add `deleteRefsFromExternalStorage` configuration option to cleanup manager for selective external storage cleanup during workflow deletion
405
+ - Add `delete()` method to `ExternalBlobStorage` interface for batch deletion of external storage keys
406
+ - Implement bulk delete capability in R2 external blob storage with fallback to individual deletes for resilience
407
+
408
+ ### 🔧 Improvements
409
+
410
+ - Enhance `countAffectedWorkflows()` to return detailed information including affected steps array and count of external storage keys to be deleted
411
+ - Enhance `deleteOldWorkflows()` to return deleted steps array and count of external storage keys actually deleted, enabling better tracking of cleanup operations
412
+ - Improve error handling in external storage deletion - errors are logged but do not fail the entire cleanup operation, allowing partial deletions to succeed
413
+ - Refactor cleanup manager with new helper functions `getAffectedWorkflows()`, `getAffectedSteps()`, and `collectExternalStorageKeys()` for better code organization and maintainability
414
+ - Add `limit` parameter to cleanup functions to prevent out-of-memory errors when processing large numbers of workflows
415
+
416
+ ### 💥 Breaking Changes
417
+
418
+ - `ExternalBlobStorage` interface now requires implementing the `delete(...keys: string[]): Promise<number>` method. Existing implementations must add this method.
419
+ - `countAffectedWorkflows()` now accepts a `limit` parameter as the second argument
420
+ - `deleteOldWorkflows()` now accepts a `limit` parameter as the second argument
421
+
422
+ ---
423
+
424
+ ## [v0.14.0] - 2025-08-27
425
+
426
+ ### ✨ Features
427
+
428
+ - Add Cleanup Manager system with `createCleanupManager()` for automated workflow data retention management
429
+ - Add `countAffectedWorkflows()` function to preview workflows matching cleanup criteria before deletion
430
+ - Add `deleteOldWorkflows()` function to delete workflows matching time-based, count-based, or type-based criteria
431
+ - Add database schema V5 with CASCADE DELETE constraints to automatically remove related steps and logs when workflows are deleted
432
+
433
+ ### 🔧 Improvements
434
+
435
+ - Improve data consistency with foreign key constraints ensuring referential integrity between workflows, steps, and logs
436
+ - Add migration system supporting upgrade from schema V2 and V3 to V5
437
+
438
+ ## [v0.13.3] - 2025-08-23
439
+
440
+ *Internal debugging release with temporary diagnostic logging*
441
+
442
+ ## [v0.13.2] - 2025-08-23
443
+
444
+ ### 🐛 Bug Fixes
445
+
446
+ - Fix incorrect table name 'WorkflowProperties' to correct table name 'WorkflowProperty' in deleteWorkflowProperties query
447
+
448
+ ## [v0.13.1] - 2025-08-22
449
+
450
+ ### 💥 Breaking Changes
451
+
452
+ - Move `populateInput` parameter from function signature to filter options object in `listWorkflows()` for consistent parameter structure
453
+
454
+ ## [v0.13.0] - 2025-08-22
455
+
456
+ ### ✨ Features
457
+
458
+ - Add `populateInput` parameter to `listWorkflows()` to control whether workflow inputs are deserialized and loaded (defaults to `false` to reduce memory usage when listing many workflows)
459
+
460
+ ## [v0.12.2] - 2025-08-20
461
+
462
+ ### 💥 Breaking Changes
463
+
464
+ - Remove `WorkflowBatchItem` type alias export - users should directly type workflow parameters with required `workflow`, `input`, `workflowName`, and optional `triggerId` fields
465
+
466
+ ## [v0.12.1] - 2025-08-20
467
+
468
+ ### 🔧 Improvements
469
+
470
+ - Export `WorkflowBatchItem` type alias for improved type safety when using `enqueueWorkflowBatch`
471
+
472
+ ## [v0.12.0] - 2025-08-19
473
+
474
+ ### ✨ Features
475
+
476
+ - Add `reuseSuccessfulSteps` option to `enqueueRetryWorkflow()` for controlling whether retry operations reuse successful step results from the parent workflow instance (defaults to `true`)
477
+
478
+ ---
479
+
480
+ ## [v0.15.2] - 2026-02-19
481
+
482
+ ### 🐛 Bug Fixes
483
+
484
+ - Fix workflow batch deletion to process deletes in batches of 100 to prevent hitting D1's variable binding limit when deleting large numbers of workflows
485
+
486
+ ## [v0.11.5] - 2026-02-19
487
+
488
+ ### 🐛 Bug Fixes
489
+
490
+ - Remove debug console.log statements from listWorkflows method to reduce noise in logs
491
+
492
+ ## [v0.11.4] - 2025-08-06
493
+
494
+ ### 🔧 Improvements
495
+
496
+ - Add database index on WorkflowTable startTime column for improved query performance
497
+
498
+ ## [v0.11.3] - 2026-02-19
499
+
500
+ ### 🔧 Improvements
501
+
502
+ - Enhanced debug logging in `listWorkflows` to include the SQL query being executed
503
+
504
+ ## [v0.11.2] - 2025-08-06
505
+
506
+ ### 🔧 Improvements
507
+
508
+ - Added debug logging to `listWorkflows` function to help track query execution and result processing
509
+
510
+ ## [v0.11.1] - 2026-02-19
511
+
512
+ ### ✨ Features
513
+
514
+ - Export `HandleWorkflowQueueMessageParams` type for better type safety when handling queue messages
515
+ - Add support for `triggerId` parameter in workflow retry operations to improve workflow traceability
516
+
517
+ ### 🔧 Improvements
518
+
519
+ - Refactor `handleWorkflowQueueMessage` function to use object destructuring for cleaner API
520
+ - Refactor `retry` function to use object parameters for better maintainability
521
+ - Improve error handling to check for existing workflows before throwing error messages
522
+ - Add `triggerId` generation for workflow runs and retries to enable better request tracking
523
+
524
+ ## [v0.11.0] - 2025-07-25
525
+
526
+ ### ✨ Features
527
+
528
+ - Add workflow trigger identification support via new `triggerId` parameter in `WorkflowCallParams`
529
+ - Add `getWorkflowByTriggerId()` function to log accessor for retrieving workflows by trigger ID
530
+
531
+ ### 💥 Breaking Changes
532
+
533
+ - `WorkflowRun` type now includes `triggerId: string | null` field for trigger correlation
534
+
535
+ ### 🔧 Improvements
536
+
537
+ - Add duplicate workflow prevention by checking for existing workflows with same `triggerId` before execution
538
+ - Enhance database schema with V4 migration to support `triggerId` with UNIQUE constraint on WorkflowTable
539
+ - Add comprehensive migration documentation explaining versioning system and table evolution strategy
540
+ - Improve database migration system with specific migration functions `migrateWorkflowTableV1ToV2()` and `migrateWorkflowTableV2V3ToV4()`
541
+
542
+ ## [v0.10.1] - 2026-02-19
543
+
544
+ ### 🐛 Bug Fixes
545
+
546
+ - Add log message truncation to prevent database storage errors for oversized logs. Messages exceeding 64KB are truncated with a UTF-8 safe truncation point and an indicator appended.
547
+
548
+ ## [v0.10.0] - 2026-02-19
549
+
550
+ ### ✨ Features
551
+
552
+ - Add `enqueueWorkflowBatch` function to batch-enqueue multiple workflows at once, improving efficiency when processing multiple workflow requests
553
+
554
+ ## [v0.9.0] - 2025-02-19
555
+
556
+ ### ✨ Features
557
+
558
+ - Added `LogBatcher` class for efficient batch processing of log entries with automatic periodic flushing and size-based triggers
559
+ - Added `populateData` parameter to `listSteps()` function to control whether result/error fields are populated from external storage
560
+
561
+ ### 🔧 Improvements
562
+
563
+ - Improved performance of log operations by batching database writes using D1's batch API instead of individual pushes
564
+ - Optimized memory usage by defaulting to not populating result/error fields in `listSteps()` - can be enabled via `populateData` option when needed
565
+ - Updated `getWorkflow()` to support `populateData` parameter for consistent data retrieval behavior
566
+
567
+ ## [v0.8.6] - 2025-07-04
568
+
569
+ ### 🔧 Improvements
570
+
571
+ - `listSteps()` now returns `null` for result and error fields to optimize memory usage. Results and errors are not populated in this function and should be retrieved separately using `getStep()` if needed.
572
+
573
+ ### 🐛 Bug Fixes
574
+
575
+ - Fixed workflow retry functionality to properly deserialize workflow input from external blob storage when retrying with `reuseSuccessfulSteps` option
576
+
577
+ ## [v0.8.5] - 2025-07-04
578
+
579
+ ### 🐛 Bug Fixes
580
+
581
+ - Removed debug logging from deserializeWithExternalStorage that was cluttering console output
582
+
583
+ ## [v0.8.4] - 2025-07-04
584
+
585
+ ### 🐛 Bug Fixes
586
+
587
+ - Added debug logging to external storage deserialization to help diagnose data loading issues
588
+
589
+ ## [v0.8.3] - 2025-07-04
590
+
591
+ ### 🐛 Bug Fixes
592
+
593
+ - Fixed step result retrieval to properly check external blob storage when accessing step data through `getStep()` method
594
+
595
+ ## [v0.8.2] - 2025-07-04
596
+
597
+ ### 🐛 Bug Fixes
598
+
599
+ - Simplified error logging to improve clarity and reduce noise in error reports
600
+
601
+ ## [v0.8.1] - 2025-07-04
602
+
603
+ ### 🐛 Bug Fixes
604
+
605
+ - Improved error logging for serialization/deserialization failures with structured error details including message, cause, and stack trace
606
+
607
+ ## [v0.8.0] - 2025-07-03
608
+
609
+ ### ✨ Features
610
+
611
+ - Enhanced `listSteps()` function with flexible API - now supports both object-based parameters (`listSteps({ limit, offset, instanceId })`) and maintains backward compatibility with the original positional parameter signature. The new API allows omitting limit/offset parameters to retrieve all steps without pagination
612
+
613
+ ## [v0.7.3] - 2025-07-03
614
+
615
+ ### ✨ Features
616
+
617
+ - Added comprehensive database schema migration system with automatic version detection and idempotent migrations that preserve existing data
618
+
619
+ ## [v0.7.2] - 2025-07-02
620
+
621
+ *Internal refactoring release with no user-facing changes*
622
+
623
+ ## [v0.7.1] - 2025-07-02
624
+
625
+ ### 🐛 Bug Fixes
626
+
627
+ - Remove console output during database table initialization to reduce unnecessary logging noise
628
+
629
+ ## [v0.7.0] - 2025-07-02
630
+
631
+ ### ✨ Features
632
+
633
+ - **External Blob Storage Support**: Added external storage capability for large workflow data (step results, errors, and inputs) that exceeds D1 database size limits
634
+ - New `ExternalBlobStorage` interface for pluggable storage backends
635
+ - New `createR2ExternalBlobStorage()` function for Cloudflare R2 integration with configurable threshold, key prefix, and custom ID generation
636
+ - New helper functions `serializeWithExternalStorage()` and `deserializeWithExternalStorage()` for transparent external storage handling
637
+ - Automatic fallback to direct database storage when external storage fails
638
+ - Optional `externalBlobStorage` parameter in `createWorkflowContext()` and `createLogAccessor()`
639
+ - **Comprehensive Documentation**: Complete README.md rewrite with extensive usage examples, API reference, best practices, and quick start guide
640
+
641
+ ### 🔧 Improvements
642
+
643
+ - **Database Schema Migration**: Enhanced `ensureTables()` function with automatic schema migration support to add external blob storage columns (`inputRef`, `resultRef`, `errorRef`) to existing tables
644
+ - **Data Retrieval**: Updated `createLogAccessor()` to automatically handle external blob storage references when retrieving workflow data, steps, and logs
645
+
646
+ ### 📚 Exports
647
+
648
+ - `ExternalBlobStorage` (type)
649
+ - `R2ExternalBlobStorageOptions` (type)
650
+ - `createR2ExternalBlobStorage()` (function)
651
+ - `serializeWithExternalStorage()` (function)
652
+ - `deserializeWithExternalStorage()` (function)
653
+
654
+ ## [v0.6.0] - 2025-06-05
655
+
656
+ ### ✨ Features
657
+
658
+ - Add comprehensive filtering capabilities to `listWorkflows()` function with support for:
659
+ - Text search across workflow names and types
660
+ - Status and type filtering with single or multiple values
661
+ - Date range filtering for start and end times (with `gte`, `lte`, `gt`, `lt` operators)
662
+ - Custom property filtering with comparison operators (`equals`, `contains`, `gt`, `gte`, `lt`, `lte`, `in`)
663
+ - Combined multi-criteria filtering
664
+ - Export new filter types: `WorkflowFilter`, `DateRangeFilter`, `StringFilter`, and `PropertyFilter`
665
+
666
+ ### 🔧 Development
667
+
668
+ - Add Cloudflare Workers testing environment with `@cloudflare/vitest-pool-workers` and `wrangler` for realistic D1 database testing
669
+ - Add comprehensive test coverage for filtering functionality and logging behavior
670
+ - Temporarily disable lint job in CI workflow
671
+
672
+ ## [v0.5.0] - 2025-06-04
673
+
674
+ ### ✨ Features
675
+
676
+ - Console methods (log, info, error, warn) now accept multiple arguments of any type, matching the standard JavaScript console API
677
+ - Console methods automatically serialize objects, arrays, and other non-string values to strings for logging
678
+ - Added support for circular reference detection in logged objects to prevent serialization errors
679
+
680
+ ## [v0.4.8] - 2025-05-22
681
+
682
+ ### 🐛 Bug Fixes
683
+
684
+ - Removed temporary debug logging from error handler that was cluttering console output
685
+
686
+ ## [v0.4.7] - 2025-05-22
687
+
688
+ ### 🐛 Bug Fixes
689
+
690
+ - Added temporary debug logging to error handler in step context to diagnose error serialization issues
691
+
692
+ ## [v0.4.6] - 2025-05-21
693
+
694
+ ### 🐛 Bug Fixes
695
+
696
+ - Fixed step result retrieval to return raw cached results without unnecessary deserialization
697
+
698
+ ## [v0.4.5] - 2025-05-21
699
+
700
+ ### 🐛 Bug Fixes
701
+
702
+ - Fixed serialization of step execution results and errors in workflow history to ensure consistent data format when persisting to storage
703
+
704
+ ## [v0.4.4] - 2025-05-21
705
+
706
+ ### 🐛 Bug Fixes
707
+
708
+ - Fixed step reuse feature to correctly deserialize metadata and results when reusing successful steps from previous workflow runs
709
+
710
+ ## [v0.4.3] - 2025-05-21
711
+
712
+ ### 🐛 Bug Fixes
713
+
714
+ - Added debug logging to step context creation to help diagnose workflow step reuse behavior
715
+
716
+ ## [v0.4.2] - 2025-05-21
717
+
718
+ ### 🐛 Bug Fixes
719
+
720
+ - Added temporary debug logging to step reuse mechanism to help diagnose issues with workflow step caching
721
+
722
+ ## [v0.4.1] - 2025-05-21
723
+
724
+ ### ✨ Features
725
+
726
+ - Added `reuseSuccessfulSteps` option to control whether workflow retries reuse results from successful steps in the parent instance (defaults to `true`)
727
+ - Added `RetryWorkflowOptions` parameter to the `retry()` method, allowing granular control over retry behavior per workflow
728
+
729
+ ### 🐛 Bug Fixes
730
+
731
+ - Fixed missing `parentInstanceId` being passed to step context during workflow execution
732
+
733
+ ## [v0.4.0] - 2025-05-20
734
+
735
+ ### 🐛 Bug Fixes
736
+
737
+ - Fix input deserialization in workflow retry to use configured serializer instead of JSON.parse
738
+
739
+ ## [v0.3.8] - 2025-05-20
740
+
741
+ ### ✨ Features
742
+
743
+ - Add `WorkflowContextInstance` type export for typed workflow context references
744
+ - Update `createWorkflowContext()` to return `WorkflowContextInstance` for improved type safety
745
+
746
+ ### 💥 Breaking Changes
747
+
748
+ - **QueueWorkflowContextOptions:** Replace `serializer` and `idFactory` options with `workflowContext` instance parameter - queue contexts now require a pre-configured workflow context instead of individual configuration options
749
+
750
+ ## [v0.3.7] - 2025-05-20
751
+
752
+ *Version bump with no functional changes*
753
+
754
+ ## [v0.3.6] - 2025-05-20
755
+
756
+ ### ⚠️ BREAKING CHANGES
757
+
758
+ - Remove `valueType` parameter from `setWorkflowProperty()` - type is now auto-detected from value
759
+
760
+ ## [v0.3.5] - 2025-05-19
761
+
762
+ ### 🐛 Bug Fixes
763
+
764
+ - Remove unnecessary generic type parameter from `handleWorkflowQueueMessage` to fix type inference issues with workflow resolver
765
+
766
+ ## [v0.3.4] - 2025-05-19
767
+
768
+ *Maintenance release with no user-facing changes*
769
+
770
+ ## [v0.3.3] - 2025-05-19
771
+
772
+ ### ✨ Features
773
+
774
+ - Add `tenantId` to workflow context for multi-tenant support
775
+
776
+ ## [v0.3.2] - 2025-05-19
777
+
778
+ ### ✨ Features
779
+
780
+ - Add simplified step syntax: `step('stepName', callback)` in addition to `step({ name, metadata }, callback)`
781
+
782
+ ## [v0.3.1] - 2025-05-19
783
+
784
+ ### ⚠️ BREAKING CHANGES
785
+
786
+ - Change `createWorkflowContext()` from async to sync function (tables now created lazily on first execution)
787
+
788
+ ## [v0.3.0] - 2025-05-19
789
+
790
+ ### ✨ Features
791
+
792
+ - Add default serializer and ID factory implementations
793
+ - Make serializer parameter optional in `createLogAccessor()` with automatic fallback to defaults
794
+
795
+ ## [v0.2.2] - 2025-05-19
796
+
797
+ *Minor release with internal improvements*
798
+
799
+ ## [v0.2.1] - 2025-05-19
800
+
801
+ *Minor release with internal improvements*
802
+
803
+ ## [v0.2.0] - 2025-05-16
804
+
805
+ ### ✨ Features
806
+
807
+ - Add parent-child workflow relationship support with `parentInstanceId` parameter
808
+
809
+ ## [v0.1.0] - 2025-05-16
810
+
811
+ ### ✨ Features
812
+
813
+ - Add `defineWorkflow()` to define typed workflow functions with metadata
814
+ - Add `createWorkflowContext()` for direct workflow execution with step tracking, logging, and retry capabilities
815
+ - Add `createQueueWorkflowContext()` for queue-based workflow execution with batch enqueueing and dependency management
816
+ - Add `createStepContext()` for granular step-level execution, logging, and result reuse
817
+ - Add `createLogAccessor()` with comprehensive query methods: `listWorkflows()`, `getWorkflow()`, `listSteps()`, `getStep()`, and `getWorkflowTypesStatistics()`
818
+ - Add workflow filtering with advanced query capabilities including property filters, date ranges, and text search
819
+ - Add workflow dependency system for coordinating workflows that must complete before others start
820
+ - Add workflow retry functionality with optional step result reuse
821
+ - Add workflow properties system for attaching custom metadata to workflows
822
+ - Add external blob storage support for large data that exceeds D1 size limits
823
+ - Add retry configuration for D1 operations with exponential backoff and jitter
824
+ - Add batch workflow enqueueing for efficient bulk operations (up to 100 workflows per batch)
825
+ - Add `handleWorkflowQueueMessage()` for processing queued workflow execution messages
826
+ - Add `handlePollWaitingWorkflows()` to transition waiting workflows to scheduled state when dependencies complete
827
+
828
+ ## [v0.0.2] - 2025-05-15
829
+
830
+ *Minor release with internal improvements*
package/README.md CHANGED
@@ -429,6 +429,10 @@ const typedWorkflow = defineWorkflow<MyInput>(
429
429
  )
430
430
  ```
431
431
 
432
+ ## Changelog
433
+
434
+ See [CHANGELOG.md](./CHANGELOG.md) for a detailed history of changes to this project.
435
+
432
436
  ## Contributing
433
437
 
434
438
  We welcome contributions! Please see our [Contributing Guidelines](CONTRIBUTING.md) for details.
package/dist/index.d.ts CHANGED
@@ -91,6 +91,16 @@ interface PrepareWorkflowInsertParams {
91
91
  * When provided, dependency insert statements are included in the returned statements array.
92
92
  */
93
93
  dependencies?: Array<WorkflowDependency>;
94
+ /**
95
+ * Optional delay in seconds before the queue message is delivered.
96
+ * Stored in the DB for transparency so the original delay intent is preserved.
97
+ */
98
+ delaySeconds?: number | null;
99
+ /**
100
+ * Absolute epoch-millisecond timestamp indicating the earliest time the queue message
101
+ * will be delivered. `null` or absent when no delay was requested.
102
+ */
103
+ scheduledFor?: number | null;
94
104
  }
95
105
  /** Result from preparing workflow insert statements */
96
106
 
@@ -214,6 +224,8 @@ declare function workflowTableRowToWorkflowRun({
214
224
  endTime: number | null;
215
225
  parentInstanceId: string | null;
216
226
  triggerId: string | null;
227
+ delaySeconds?: number | null;
228
+ scheduledFor?: number | null;
217
229
  };
218
230
  serializer: Serializer;
219
231
  externalBlobStorage?: ExternalBlobStorage;
@@ -259,6 +271,8 @@ declare function updateWorkflow(context: InternalWorkflowContextOptions, instanc
259
271
  startTime?: number;
260
272
  parentInstanceId?: string | null;
261
273
  triggerId?: string | null;
274
+ delaySeconds?: number | null;
275
+ scheduledFor?: number | null;
262
276
  }): Promise<D1Result<Record<string, unknown>> | {
263
277
  success: boolean;
264
278
  meta: {
@@ -484,6 +498,22 @@ type WorkflowRun = {
484
498
  isRetryOf?: WorkflowRun | null;
485
499
  retries?: WorkflowRun[] | null;
486
500
  properties?: WorkflowProperty[];
501
+ /**
502
+ * Optional queue delivery delay in seconds. Stored for transparency so the original
503
+ * delay intent is preserved. For waiting workflows, this value is applied when the
504
+ * workflow transitions from 'waiting' to 'scheduled'.
505
+ */
506
+ delaySeconds?: number | null;
507
+ /**
508
+ * Absolute epoch-millisecond timestamp indicating the earliest time the queue message
509
+ * will be delivered. Computed as `enqueueTime + delaySeconds * 1000` at the moment the
510
+ * message is actually sent to the queue.
511
+ *
512
+ * - For directly-scheduled workflows: set at enqueue time.
513
+ * - For waiting workflows: set when dependencies finish and the workflow transitions to 'scheduled'.
514
+ * - `null` or absent when no delay was requested.
515
+ */
516
+ scheduledFor?: number | null;
487
517
  /** Workflows that this workflow depends on (must complete before this workflow can start) */
488
518
  dependencies?: WorkflowDependencyRelation[];
489
519
  /** Workflows that depend on this workflow (will start after this workflow completes) */
@@ -694,6 +724,12 @@ type WorkflowEnqueueBatchItem<I, O, TYPE extends string = string> = {
694
724
  dependencies?: WorkflowDependency[];
695
725
  /** Optional trigger identifier for workflow correlation. If not provided, one will be auto-generated. */
696
726
  triggerId?: string | null;
727
+ /**
728
+ * Optional delay in seconds before the queue message is delivered.
729
+ * Maps directly to the Cloudflare Queue `delaySeconds` option.
730
+ * The message will not be delivered to a consumer until at least this many seconds have passed.
731
+ */
732
+ delaySeconds?: number;
697
733
  };
698
734
  type WorkflowQueueMessage = {
699
735
  type: 'workflow-run';
@@ -704,6 +740,8 @@ type WorkflowQueueMessage = {
704
740
  triggerId?: string | null;
705
741
  queueIdentifier?: string;
706
742
  scheduledInstanceId?: string | undefined;
743
+ /** Queue delivery delay in seconds (passed through to Cloudflare Queue) */
744
+ delaySeconds?: number;
707
745
  } | {
708
746
  type: 'workflow-retry';
709
747
  workflowType: string;
@@ -713,6 +751,7 @@ type WorkflowQueueMessage = {
713
751
  /** If true the workflow will attempt to reuse all results from successful steps. Defaults to True */
714
752
  reuseSuccessfulSteps?: boolean;
715
753
  queueIdentifier?: string;
754
+ scheduledInstanceId?: string | undefined;
716
755
  };
717
756
  type RetryWorkflowOptions = {
718
757
  /** If true the retry will attept to reuse all results from successful steps. Defaults to True */
@@ -723,6 +762,8 @@ type RetryWorkflowParams<I, O> = {
723
762
  retryInstanceId: string;
724
763
  triggerId?: string | null;
725
764
  retryOptions?: RetryWorkflowOptions;
765
+ /** Optional: Provide an existing workflow instance ID. When provided, retry will update the existing instance instead of creating a new one. */
766
+ scheduledInstanceId?: string | undefined;
726
767
  };
727
768
  type WorkflowCallParams<I, O> = {
728
769
  workflow: WorkflowFunction<I, O>;
@@ -878,7 +919,7 @@ declare const createLogAccessor: (context: {
878
919
  declare function createQueueWorkflowContext(options: QueueWorkflowContextOptions): {
879
920
  enqueueWorkflow: <I, O, TYPE extends string>(params: WorkflowEnqueueBatchItem<I, O, TYPE>) => Promise<ScheduledWorkflowExecutionStub<O, TYPE>>;
880
921
  enqueueWorkflowBatch: <I, O>(workflows: Array<WorkflowEnqueueBatchItem<I, O>>) => Promise<Array<ScheduledWorkflowExecutionStub<O, string>>>;
881
- enqueueRetryWorkflow: <I, O>(workflow: WorkflowFunction<I, O>, tenantId: string, oldInstanceId: string, reuseSuccessfulSteps?: boolean) => Promise<void>;
922
+ enqueueRetryWorkflow: <I, O, TYPE extends string>(workflow: WorkflowFunction<I, O, TYPE>, tenantId: string, oldInstanceId: string, reuseSuccessfulSteps?: boolean, delaySeconds?: number) => Promise<ScheduledWorkflowExecutionStub<O, TYPE>>;
882
923
  handleWorkflowQueueMessage: ({
883
924
  message,
884
925
  env,
package/dist/index.js CHANGED
@@ -9,8 +9,10 @@ async function detectSchemaVersion(db, retryConfig) {
9
9
  const hasInputRef = workflowTableInfo.sql.includes("inputRef");
10
10
  const hasTriggerId = workflowTableInfo.sql.includes("triggerId");
11
11
  const hasResultRef = workflowTableInfo.sql.includes("resultRef");
12
+ const hasDelaySeconds = workflowTableInfo.sql.includes("delaySeconds");
12
13
  const inputHasNotNull = workflowTableInfo.sql.includes("input TEXT NOT NULL");
13
- if (hasResultRef && hasTriggerId && hasInputRef && !inputHasNotNull) workflowTable = "v6";
14
+ if (hasDelaySeconds && hasResultRef && hasTriggerId && hasInputRef && !inputHasNotNull) workflowTable = "v8";
15
+ else if (hasResultRef && hasTriggerId && hasInputRef && !inputHasNotNull) workflowTable = "v6";
14
16
  else if (hasTriggerId && hasInputRef && !inputHasNotNull && !hasResultRef) workflowTable = "v4";
15
17
  else if (hasInputRef && !inputHasNotNull && !hasTriggerId) workflowTable = "v2";
16
18
  else if (!hasInputRef && inputHasNotNull) workflowTable = "v1";
@@ -123,6 +125,23 @@ async function migrateWorkflowTableV4ToV6(db, retryConfig) {
123
125
  if (!hasResultRef) await retryD1Operation(() => db.batch([db.prepare(`ALTER TABLE WorkflowTable ADD COLUMN result TEXT`), db.prepare(`ALTER TABLE WorkflowTable ADD COLUMN resultRef TEXT`)]), retryConfig);
124
126
  }
125
127
  /**
128
+ * Migrate WorkflowTable from V6 to V8 schema
129
+ * Adds delaySeconds and scheduledFor columns for delayed workflow scheduling.
130
+ *
131
+ * - `delaySeconds` (INTEGER, nullable): The original delay intent in seconds. Preserved so that
132
+ * waiting workflows can apply the delay when they transition to 'scheduled'.
133
+ * - `scheduledFor` (INTEGER, nullable): Absolute epoch-millisecond timestamp indicating the
134
+ * earliest time the queue message will be delivered. For directly-scheduled workflows this is
135
+ * computed at enqueue time (`startTime + delaySeconds * 1000`). For waiting workflows it is
136
+ * computed at transition time (`transitionTime + delaySeconds * 1000`). NULL when no delay
137
+ * was requested.
138
+ */
139
+ async function migrateWorkflowTableV6ToV8(db, retryConfig) {
140
+ const workflowTableInfo = await retryD1Operation(() => db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='WorkflowTable'`).first(), retryConfig);
141
+ const hasDelaySeconds = workflowTableInfo.sql.includes("delaySeconds");
142
+ if (!hasDelaySeconds) await retryD1Operation(() => db.batch([db.prepare(`ALTER TABLE WorkflowTable ADD COLUMN delaySeconds INTEGER`), db.prepare(`ALTER TABLE WorkflowTable ADD COLUMN scheduledFor INTEGER`)]), retryConfig);
143
+ }
144
+ /**
126
145
  * Create or migrate WorkflowTable to the latest schema
127
146
  */
128
147
  async function migrateWorkflowTable(db, currentVersion, retryConfig) {
@@ -142,6 +161,8 @@ async function migrateWorkflowTable(db, currentVersion, retryConfig) {
142
161
  endTime INTEGER,
143
162
  parentInstanceId TEXT,
144
163
  triggerId TEXT,
164
+ delaySeconds INTEGER,
165
+ scheduledFor INTEGER,
145
166
  PRIMARY KEY (instanceId),
146
167
  UNIQUE (triggerId)
147
168
  )`).run(), retryConfig);
@@ -151,15 +172,22 @@ async function migrateWorkflowTable(db, currentVersion, retryConfig) {
151
172
  await migrateWorkflowTableV1ToV2(db, retryConfig);
152
173
  await migrateWorkflowTableV2V3ToV4(db, retryConfig);
153
174
  await migrateWorkflowTableV4ToV6(db, retryConfig);
175
+ await migrateWorkflowTableV6ToV8(db, retryConfig);
154
176
  return;
155
177
  }
156
178
  if (currentVersion === "v2") {
157
179
  await migrateWorkflowTableV2V3ToV4(db, retryConfig);
158
180
  await migrateWorkflowTableV4ToV6(db, retryConfig);
181
+ await migrateWorkflowTableV6ToV8(db, retryConfig);
159
182
  return;
160
183
  }
161
184
  if (currentVersion === "v4") {
162
185
  await migrateWorkflowTableV4ToV6(db, retryConfig);
186
+ await migrateWorkflowTableV6ToV8(db, retryConfig);
187
+ return;
188
+ }
189
+ if (currentVersion === "v6") {
190
+ await migrateWorkflowTableV6ToV8(db, retryConfig);
163
191
  return;
164
192
  }
165
193
  }
@@ -468,11 +496,11 @@ async function finalizeWorkflowRecord(options, { workflowStatus, endTime, instan
468
496
  *
469
497
  * @internal
470
498
  */
471
- async function prepareWorkflowInsertStatements(options, { instanceId, workflowType, workflowName, workflowMetadata, input, workflowStatus, startTime, endTime, parentInstanceId, tenantId, triggerId, dependencies }) {
499
+ async function prepareWorkflowInsertStatements(options, { instanceId, workflowType, workflowName, workflowMetadata, input, workflowStatus, startTime, endTime, parentInstanceId, tenantId, triggerId, dependencies, delaySeconds, scheduledFor }) {
472
500
  const { data: inputData, externalRef: inputRef } = await serializeWithExternalStorage(input, options.serializer, options.externalBlobStorage);
473
501
  const insertWorkflowStatement = options.D1.prepare(`INSERT INTO WorkflowTable
474
- (instanceId, workflowType, workflowName, workflowMetadata, input, inputRef, tenantId, workflowStatus, startTime, endTime, parentInstanceId, triggerId)
475
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`).bind(instanceId, workflowType, workflowName, options.serializer.serialize(workflowMetadata), inputData, inputRef, tenantId, workflowStatus, startTime, endTime ?? null, parentInstanceId ?? null, triggerId ?? null);
502
+ (instanceId, workflowType, workflowName, workflowMetadata, input, inputRef, tenantId, workflowStatus, startTime, endTime, parentInstanceId, triggerId, delaySeconds, scheduledFor)
503
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`).bind(instanceId, workflowType, workflowName, options.serializer.serialize(workflowMetadata), inputData, inputRef, tenantId, workflowStatus, startTime, endTime ?? null, parentInstanceId ?? null, triggerId ?? null, delaySeconds ?? null, scheduledFor ?? null);
476
504
  if (!dependencies || dependencies.length === 0) return {
477
505
  statements: [insertWorkflowStatement],
478
506
  instanceId,
@@ -777,7 +805,9 @@ async function workflowTableRowToWorkflowRun({ row, serializer, externalBlobStor
777
805
  startTime: row.startTime,
778
806
  endTime: row.endTime,
779
807
  parentInstanceId: row.parentInstanceId,
780
- triggerId: row.triggerId
808
+ triggerId: row.triggerId,
809
+ delaySeconds: row.delaySeconds ?? null,
810
+ scheduledFor: row.scheduledFor ?? null
781
811
  };
782
812
  }
783
813
  async function updateWorkflowName(context, instanceId, newWorkflowName) {
@@ -840,6 +870,14 @@ async function updateWorkflow(context, instanceId, updates) {
840
870
  setClauses.push("triggerId = ?");
841
871
  bindings.push(updates.triggerId);
842
872
  }
873
+ if (updates.delaySeconds !== void 0) {
874
+ setClauses.push("delaySeconds = ?");
875
+ bindings.push(updates.delaySeconds);
876
+ }
877
+ if (updates.scheduledFor !== void 0) {
878
+ setClauses.push("scheduledFor = ?");
879
+ bindings.push(updates.scheduledFor);
880
+ }
843
881
  if (updates.workflowMetadata !== void 0) {
844
882
  setClauses.push("workflowMetadata = ?");
845
883
  bindings.push(context.serializer.serialize(updates.workflowMetadata));
@@ -1785,7 +1823,7 @@ function createQueueWorkflowContext(options) {
1785
1823
  };
1786
1824
  const idFactory = internalContext.idFactory;
1787
1825
  const enqueueWorkflow = async (params) => {
1788
- const { workflow, tenantId, input, initialName, dependencies, triggerId: providedTriggerId } = params;
1826
+ const { workflow, tenantId, input, initialName, dependencies, triggerId: providedTriggerId, delaySeconds } = params;
1789
1827
  const triggerId = providedTriggerId ?? idFactory();
1790
1828
  const instanceId = idFactory();
1791
1829
  const startTime = Date.now();
@@ -1801,9 +1839,12 @@ function createQueueWorkflowContext(options) {
1801
1839
  parentInstanceId: void 0,
1802
1840
  tenantId,
1803
1841
  triggerId,
1804
- dependencies
1842
+ dependencies,
1843
+ delaySeconds: delaySeconds ?? null,
1844
+ scheduledFor: null
1805
1845
  });
1806
1846
  else {
1847
+ const scheduledFor = delaySeconds != null ? startTime + delaySeconds * 1e3 : null;
1807
1848
  await insertWorkflowRecord(internalContext, {
1808
1849
  instanceId,
1809
1850
  workflowType: workflow.workflowType,
@@ -1815,7 +1856,9 @@ function createQueueWorkflowContext(options) {
1815
1856
  endTime: null,
1816
1857
  parentInstanceId: void 0,
1817
1858
  tenantId,
1818
- triggerId
1859
+ triggerId,
1860
+ delaySeconds: delaySeconds ?? null,
1861
+ scheduledFor
1819
1862
  });
1820
1863
  await options.QUEUE.send({
1821
1864
  type: "workflow-run",
@@ -1826,15 +1869,42 @@ function createQueueWorkflowContext(options) {
1826
1869
  triggerId,
1827
1870
  queueIdentifier: options.queueIdentifier,
1828
1871
  scheduledInstanceId: instanceId
1829
- });
1872
+ }, delaySeconds != null ? { delaySeconds } : void 0);
1830
1873
  }
1831
1874
  return {
1832
1875
  instanceId,
1833
1876
  workflowType: workflow.workflowType
1834
1877
  };
1835
1878
  };
1836
- const enqueueRetryWorkflow = async (workflow, tenantId, oldInstanceId, reuseSuccessfulSteps) => {
1879
+ const enqueueRetryWorkflow = async (workflow, tenantId, oldInstanceId, reuseSuccessfulSteps, delaySeconds) => {
1837
1880
  const triggerId = idFactory();
1881
+ const instanceId = idFactory();
1882
+ const startTime = Date.now();
1883
+ const scheduledFor = delaySeconds != null ? startTime + delaySeconds * 1e3 : null;
1884
+ const logAccessor = createLogAccessor({
1885
+ D1: options.D1,
1886
+ externalBlobStorage: options.externalBlobStorage,
1887
+ serializer: internalContext.serializer,
1888
+ tenantId,
1889
+ retryConfig: options.retryConfig
1890
+ });
1891
+ const oldRun = await logAccessor.getWorkflowShallow(oldInstanceId, { populateInput: true });
1892
+ if (!oldRun) throw new Error(`Cannot retry: no workflow found for instanceId ${oldInstanceId}`);
1893
+ await insertWorkflowRecord(internalContext, {
1894
+ instanceId,
1895
+ workflowType: workflow.workflowType,
1896
+ workflowName: oldRun.workflowName ?? "unknown",
1897
+ workflowMetadata: workflow.metadata,
1898
+ input: oldRun.input,
1899
+ workflowStatus: "scheduled",
1900
+ startTime,
1901
+ endTime: null,
1902
+ parentInstanceId: oldInstanceId,
1903
+ tenantId,
1904
+ triggerId,
1905
+ delaySeconds: delaySeconds ?? null,
1906
+ scheduledFor
1907
+ });
1838
1908
  await options.QUEUE.send({
1839
1909
  type: "workflow-retry",
1840
1910
  workflowType: workflow.workflowType,
@@ -1842,8 +1912,13 @@ function createQueueWorkflowContext(options) {
1842
1912
  tenantId,
1843
1913
  triggerId,
1844
1914
  reuseSuccessfulSteps: reuseSuccessfulSteps ?? true,
1845
- queueIdentifier: options.queueIdentifier
1846
- });
1915
+ queueIdentifier: options.queueIdentifier,
1916
+ scheduledInstanceId: instanceId
1917
+ }, delaySeconds != null ? { delaySeconds } : void 0);
1918
+ return {
1919
+ instanceId,
1920
+ workflowType: workflow.workflowType
1921
+ };
1847
1922
  };
1848
1923
  /**
1849
1924
  * Enqueue multiple workflows in a single batch operation.
@@ -1866,10 +1941,11 @@ function createQueueWorkflowContext(options) {
1866
1941
  if (workflows.length > 100) throw new Error(`enqueueWorkflowBatch: Cannot enqueue more than 100 workflows in a single batch (received ${workflows.length}). Split into smaller chunks.`);
1867
1942
  const startTime = Date.now();
1868
1943
  console.log(`enqueueWorkflowBatch: Starting batch of ${workflows.length} workflows`);
1869
- const preparedWorkflows = await Promise.all(workflows.map(async ({ workflow, tenantId, input, initialName, dependencies, triggerId: providedTriggerId }) => {
1944
+ const preparedWorkflows = await Promise.all(workflows.map(async ({ workflow, tenantId, input, initialName, dependencies, triggerId: providedTriggerId, delaySeconds }) => {
1870
1945
  const instanceId = idFactory();
1871
1946
  const triggerId = providedTriggerId ?? idFactory();
1872
1947
  const hasDependencies = dependencies && dependencies.length > 0;
1948
+ const scheduledFor = !hasDependencies && delaySeconds != null ? startTime + delaySeconds * 1e3 : null;
1873
1949
  const prepared = await prepareWorkflowInsertStatements(internalContext, {
1874
1950
  instanceId,
1875
1951
  workflowType: workflow.workflowType,
@@ -1882,14 +1958,17 @@ function createQueueWorkflowContext(options) {
1882
1958
  parentInstanceId: void 0,
1883
1959
  tenantId,
1884
1960
  triggerId,
1885
- dependencies
1961
+ dependencies,
1962
+ delaySeconds: delaySeconds ?? null,
1963
+ scheduledFor
1886
1964
  });
1887
1965
  return {
1888
1966
  ...prepared,
1889
1967
  tenantId,
1890
1968
  triggerId,
1891
1969
  initialName,
1892
- shouldQueue: !hasDependencies
1970
+ shouldQueue: !hasDependencies,
1971
+ delaySeconds
1893
1972
  };
1894
1973
  }));
1895
1974
  const workflowsWithDeps = preparedWorkflows.filter((p) => !p.shouldQueue).length;
@@ -1900,16 +1979,19 @@ function createQueueWorkflowContext(options) {
1900
1979
  console.log(`enqueueWorkflowBatch: Executing D1 batch with ${allStatements.length} statements`);
1901
1980
  await retryD1Operation(() => options.D1.batch(allStatements), options.retryConfig);
1902
1981
  }
1903
- const messagesToQueue = preparedWorkflows.filter((p) => p.shouldQueue).map((p) => ({ body: {
1904
- type: "workflow-run",
1905
- workflowType: p.workflowType,
1906
- workflowName: p.initialName,
1907
- input: workflows.find((w) => w.workflow.workflowType === p.workflowType && w.initialName === p.initialName)?.input,
1908
- tenantId: p.tenantId,
1909
- triggerId: p.triggerId,
1910
- queueIdentifier: options.queueIdentifier,
1911
- scheduledInstanceId: p.instanceId
1912
- } }));
1982
+ const messagesToQueue = preparedWorkflows.filter((p) => p.shouldQueue).map((p) => ({
1983
+ body: {
1984
+ type: "workflow-run",
1985
+ workflowType: p.workflowType,
1986
+ workflowName: p.initialName,
1987
+ input: workflows.find((w) => w.workflow.workflowType === p.workflowType && w.initialName === p.initialName)?.input,
1988
+ tenantId: p.tenantId,
1989
+ triggerId: p.triggerId,
1990
+ queueIdentifier: options.queueIdentifier,
1991
+ scheduledInstanceId: p.instanceId
1992
+ },
1993
+ ...p.delaySeconds != null ? { delaySeconds: p.delaySeconds } : {}
1994
+ }));
1913
1995
  if (messagesToQueue.length > 0) {
1914
1996
  console.log(`enqueueWorkflowBatch: Sending ${messagesToQueue.length} messages to queue`);
1915
1997
  await options.QUEUE.sendBatch(messagesToQueue);
@@ -1942,7 +2024,8 @@ function createQueueWorkflowContext(options) {
1942
2024
  workflow: workflowFunction,
1943
2025
  retryInstanceId: message.retryInstanceId,
1944
2026
  triggerId: message.triggerId,
1945
- retryOptions: { reuseSuccessfulSteps: message.reuseSuccessfulSteps ?? true }
2027
+ retryOptions: { reuseSuccessfulSteps: message.reuseSuccessfulSteps ?? true },
2028
+ scheduledInstanceId: message.scheduledInstanceId
1946
2029
  });
1947
2030
  }
1948
2031
  };
@@ -1968,7 +2051,12 @@ function createQueueWorkflowContext(options) {
1968
2051
  const allDepsFinished = dependencies.every((dep) => dep.workflow.workflowStatus === "completed" || dep.workflow.workflowStatus === "failed");
1969
2052
  if (allDepsFinished) {
1970
2053
  console.log(`Enqueuing waiting workflow ${wf.instanceId} as all dependencies are finished`);
1971
- await updateWorkflow(internalContext, wf.instanceId, { workflowStatus: "scheduled" });
2054
+ const now = Date.now();
2055
+ const scheduledFor = wf.delaySeconds != null ? now + wf.delaySeconds * 1e3 : null;
2056
+ await updateWorkflow(internalContext, wf.instanceId, {
2057
+ workflowStatus: "scheduled",
2058
+ scheduledFor
2059
+ });
1972
2060
  await options.QUEUE.send({
1973
2061
  type: "workflow-run",
1974
2062
  workflowType: wf.workflowType,
@@ -1978,7 +2066,7 @@ function createQueueWorkflowContext(options) {
1978
2066
  triggerId: wf.triggerId,
1979
2067
  queueIdentifier: options.queueIdentifier,
1980
2068
  scheduledInstanceId: wf.instanceId
1981
- });
2069
+ }, wf.delaySeconds != null ? { delaySeconds: wf.delaySeconds } : void 0);
1982
2070
  } else {
1983
2071
  const unfinishedDeps = dependencies.filter((dep) => dep.workflow.workflowStatus !== "completed" && dep.workflow.workflowStatus !== "failed");
1984
2072
  console.log(`Workflow ${wf.instanceId} is still waiting on dependencies. Unfinished dependencies: ${unfinishedDeps.map((d) => d.workflow.instanceId + ": " + d.workflow.workflowStatus).join(", ")}`);
@@ -2244,7 +2332,7 @@ function createWorkflowContext(options) {
2244
2332
  throw error;
2245
2333
  }
2246
2334
  };
2247
- const retry = async ({ workflow, retryInstanceId, triggerId, retryOptions }) => {
2335
+ const retry = async ({ workflow, retryInstanceId, triggerId, retryOptions, scheduledInstanceId }) => {
2248
2336
  if (!ensuredTables) {
2249
2337
  await ensureTables(options.D1, options.retryConfig);
2250
2338
  ensuredTables = true;
@@ -2264,7 +2352,8 @@ function createWorkflowContext(options) {
2264
2352
  parentInstanceId: retryInstanceId,
2265
2353
  reuseSuccessfulSteps: retryOptions?.reuseSuccessfulSteps,
2266
2354
  tenantId,
2267
- triggerId
2355
+ triggerId,
2356
+ scheduledInstanceId
2268
2357
  });
2269
2358
  };
2270
2359
  return {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@brandboostinggmbh/observable-workflows",
3
- "version": "0.21.1",
3
+ "version": "0.22.1",
4
4
  "description": "My awesome typescript library",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -14,7 +14,8 @@
14
14
  },
15
15
  "author": "Tim <tim.stepanov@brand-boosting.de>",
16
16
  "files": [
17
- "dist"
17
+ "dist",
18
+ "CHANGELOG.md"
18
19
  ],
19
20
  "main": "./dist/index.js",
20
21
  "module": "./dist/index.js",