@cascade-flow/backend-postgres 0.2.13 → 0.2.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -84,7 +84,7 @@ export declare class PostgresBackend extends Backend {
84
84
  attemptNumber: number;
85
85
  terminal: boolean;
86
86
  nextRetryAt?: number;
87
- failureReason: "exhausted-retries" | "worker-crash" | "timeout" | "cancelled" | "execution-error";
87
+ failureReason: "exhausted-retries" | "worker-crash" | "timeout" | "cancelled" | "execution-error" | "step-removed";
88
88
  }): Promise<void>;
89
89
  saveStepFailedAndScheduleRetry(workflowSlug: string, runId: string, stepId: string, error: StepError, failureMetadata: {
90
90
  duration: number;
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EACL,OAAO,EACP,KAAK,iBAAiB,EACtB,KAAK,oBAAoB,EACzB,KAAK,SAAS,EACd,KAAK,UAAU,EAEf,KAAK,QAAQ,EACb,KAAK,KAAK,EACV,KAAK,SAAS,EACd,KAAK,aAAa,EAkBlB,KAAK,aAAa,EAClB,KAAK,QAAQ,EACb,KAAK,gBAAgB,EACrB,KAAK,oBAAoB,EACzB,KAAK,eAAe,EACpB,KAAK,cAAc,EACnB,KAAK,gBAAgB,EACrB,KAAK,aAAa,EAClB,KAAK,aAAa,EAClB,KAAK,iBAAiB,EACtB,KAAK,YAAY,EACjB,KAAK,gBAAgB,EACrB,KAAK,eAAe,EACpB,KAAK,UAAU,EACf,KAAK,UAAU,EACf,KAAK,oBAAoB,EACzB,KAAK,WAAW,EAChB,KAAK,gBAAgB,EAmBtB,MAAM,iCAAiC,CAAC;AAMzC;;;;;;;;;;GAUG;AACH,qBAAa,eAAgB,SAAQ,OAAO;IAC1C,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,WAAW,CAAkB;IAErC;;;;;OAKG;gBACS,gBAAgB,EAAE,MAAM,EAAE,MAAM,GAAE,MAAsB;IAMpE;;;;OAIG;IACG,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAS3B,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC;IAetC;;OAEG;IACH,OAAO,CAAC,aAAa;IAIrB;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAI1B;;OAEG;IACH,OAAO,CAAC,eAAe;IASjB,aAAa,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIjE,SAAS,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIhE,OAAO,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;IAyBzE,iBAAiB,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,GAAG,MAAM;IAS/F,WAAW,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;IAkB7E,UAAU,CACd,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAA;KAAE,GAC7C,OAAO,CAAC,SAAS,EAAE,CAAC;IACjB,UAAU,CACd,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE;QAAE,QAAQ,EAAE,UAAU,CAAA;KAAE,GAChC,OAAO,CAAC,aAAa,EAAE,CAAC;IACrB,UAAU,CACd,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,UAAU,GAAG,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAA;KAAE,GAC5D,OAAO,CAAC,KAAK,EAAE,CAAC;IAwBb,2BAA2B,CAC/B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,CAAC;IAI9B,aAAa,CACjB,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,IAAI,CAAC;YASF,uBAAuB;IAqE/B,gBAAgB,CACpB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,GAAG,CAAC,MAAM,CAAC,GACnB,OAAO,CAAC,IAAI,CAAC;YASF,0BAA0B;IAkBlC,4BAA4B,CAChC,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC,IAAI,CAAC;YAQF,sCAAsC;IAiB9C,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,EACnB,eAAe,EAAE,OAAO,GACvB,OAAO,CAAC,IAAI,CAAC;IAsCV,cAAc,CAClB,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,EACnB,cAAc,EAAE,GAAG,CAAC,MAAM,CAAC,GAC1B,OAAO,CAAC,IAAI,CAAC;IA2CV,SAAS,CAAC,MAAM,EAAE;QACtB,WAAW,EAAE,MAAM,CAAC;QACpB,UAAU,EAAE,MAAM,CAAC;QACnB,KAAK,CAAC,EAAE,OAAO,CAAC;KACjB,GAAG,OAAO,CAAC;QACV,KAAK,EAAE,MAAM,CAAC;KACf,CAAC;IAqGI,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE;QACR,WAAW,EAAE,MAAM,CAAC;QACpB,MAAM,EAAE,SAAS,GAAG,OAAO,GAAG,sBAAsB,CAAC;QACrD,aAAa,EAAE,MAAM,CAAC;QACtB,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,GACA,OAAO,CAAC,IAAI,CAAC;IAqBV,aAAa,CACjB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,iBAAiB,GAC1B,OAAO,CAAC,IAAI,CAAC;IAwBV,gBAAgB,CACpB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,OAAO,EACf,QAAQ,EAAE,oBAAoB,EAC9B,YAAY,GAAE,OAAe,GAC5B,OAAO,CAAC,IAAI,CAAC;IAqDV,cAAc,CAClB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,SAAS,EAChB,QAAQ,EAAE;QACR,QAAQ,EAAE,MAAM,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;QACtB,QAAQ,EAAE,OAAO,CAAC;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,EAAE,mBAAmB,GAAG,cAAc,GAAG,SAAS,GAAG,WAAW,GAAG,iBAAiB,CAAC;KACnG,GACA,OAAO,CAAC,IAAI,CAAC;IAwBV,8BAA8B,CAClC,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,SAAS,EAChB,eAAe,EAAE;QACf,QAAQ,EAAE,MAAM,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;QACtB,WAAW,EAAE,MAAM,CAAC;QACpB,aAAa,EAAE,iBAAiB,GAAG,SAAS,CAAC;QAC7C,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,eAAe,CAAC,EAAE,MAAM,CAAC;KAC1B,EACD,gBAAgB,EAAE;QAChB,WAAW,EAAE,MAAM,CAAC;QACpB,iBAAiB,EAAE,MAAM,CAAC;QAC1B,YAAY,EAAE,MAAM,CAAC;QACrB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,eAAe,CAAC,EAAE,MAAM,CAAC;KAC1B,GACA,OAAO,CAAC,IAAI,CAAC;IAgFV,eAAe,CACnB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE;QACR,QAAQ,EAAE,SAAS,GAAG,SAAS,CAAC;QAChC,MAAM,EAAE,MAAM,CAAC;QACf,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;QACtB,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,GACA,OAAO,CAAC,IAAI,CAAC;IAuBV,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,UAAU,EAAE;QACV,IAAI,EAAE,MAAM,CAAC;QACb,cAAc,EAAE,MAAM,CAAC;QACvB,aAAa,EAAE,MAAM,CAAC;QACtB,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,gBAAgB,CAAC,EAAE;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,cAAc,EAAE,MAAM,CAAA;SAAE,CAAC;KAC7D,GACA,OAAO,CAAC,IAAI,CAAC;IAwBV,wBAAwB,CAC5B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,UAAU,EAAE;QACV,IAAI,EAAE,MAAM,CAAC;QACb,cAAc,EAAE,MAAM,CAAC;QACvB,aAAa,EAAE,MAAM,CAAC;QACtB,KAAK,EAAE,SAAS,CAAC;KAClB,GACA,OAAO,CAAC,IAAI,CAAC;IAoBV,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,aAAa,EAAE,MAAM,GACpB,OAAO,CAAC,IAAI,CAAC;IAmBV,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE;QACR,gBAAgB,EAAE,MAAM,CAAC;QACzB,WAAW,EAAE,MAAM,CAAC;QACpB,aAAa,EAAE,MAAM,CAAC;QACtB,cAAc,EAAE,MAAM,CAAC;QACvB,aAAa,EAAE,MAAM,CAAC;QACtB,aAAa,EAAE,MAAM,CAAC;KACvB,GACA,OAAO,CAAC,IAAI,CAAC;IAuBV,YAAY,CAChB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,IAAI,EAAE,QAAQ,EAAE,GACf,OAAO,CAAC,IAAI,CAAC;IAOV,YAAY,CAChB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,aAAa,CAAC,EAAE,MAAM,GACrB,OAAO,CAAC,QAAQ,EAAE,GAAG,IAAI,CAAC;IAkBvB,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,qBAAqB,EAAE,MAAM,CAAC;QAAC,cAAc,EAAE,OAAO,CAAC;QAAC,QAAQ,EAAE,OAAO,CAAA;KAAE,GACzG,OAAO,CAAC,IAAI,CAAC;IAoBV,2BAA2B,CAC/B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE;QACN,qBAAqB,EAAE,MAAM,CAAC;QAC9B,SAAS,EAAE,OAAO,CAAC;QACnB,OAAO,EAAE,OAAO,CAAC;QACjB,KAAK,CAAC,EAAE,SAAS,CAAC;QAClB,gBAAgB,CAAC,EAAE,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC;KAC7D,GACA,OAAO,CAAC,IAAI,CAAC;IAiBV,oBAAoB,CACxB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,OAAO,EACf,QAAQ,EAAE;QAAE,qBAAqB,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,GACnG,OAAO,CAAC,IAAI,CAAC;IAqBV,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,SAAS,EAChB,QAAQ,EAAE;QAAE,qBAAqB,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,cAAc,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAA;KAAE,EAC1G,aAAa,EAAE,aAAa,GAAG,cAAc,GAAG,SAAS,GAAG,WAAW,GACtE,OAAO,CAAC,IAAI,CAAC;IAsBV,mBAAmB,CACvB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,aAAa,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAA;KAAE,GACjG,OAAO,CAAC,IAAI,CAAC;IAoBV,qBAAqB,CACzB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QAAE,qBAAqB,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,cAAc,EAAE,MAAM,CAAA;KAAE,GACrG,OAAO,CAAC,IAAI,CAAC;IAoBV,wBAAwB,CAC5B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QACR,qBAAqB,EAAE,MAAM,CAAC;QAC9B,qBAAqB,EAAE,MAAM,CAAC;QAC9B,YAAY,EAAE,MAAM,EAAE,CAAC;QACvB,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB,GACA,OAAO,CAAC,IAAI,CAAC;IAoBV,gBAAgB,CACpB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QACR,SAAS,EAAE,MAAM,CAAC;QAClB,WAAW,EAAE,MAAM,CAAC;QACpB,QAAQ,EAAE,MAAM,CAAC;QACjB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,cAAc,EAAE,OAAO,CAAC;QACxB,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACnC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;KACjB,GACA,OAAO,CAAC,IAAI,CAAC;IA6BV,SAAS,CAAC,UAAU,EAAE,aAAa,GAAG,OAAO,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,OAAO,CAAA;KAAE,CAAC;IAiDhF,QAAQ,CAAC,OAAO,CAAC,EAAE;QACvB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,MAAM,CAAC,EAAE,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC9B,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;QAChB,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;IAoDjB,SAAS,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAoCxD,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,GAAG,IAAI,CAAC;IAgB/C,cAAc,CAClB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,GACZ,OAAO,CACR,KAAK,CAAC;QACJ,MAAM,EAAE,MAAM,CAAC;QACf,KAAK,EAAE,SAAS,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;KACvB,CAAC,CACH;IA4CK,mBAAmB,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAIxC,kBAAkB,CAAC,OAAO,CAAC,EAAE;QACjC,eAAe,CAAC,EAAE,MAAM,CAAC;QACzB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,GAAG,OAAO,CAAC,KAAK,CAAC;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAarE,eAAe,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAwBtF,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,iBAAiB,GAC1B,OAAO,CAAC;QAAE,aAAa,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC;IAyCtC,iBAAiB,CACrB,cAAc,EAAE,MAAM,EACtB,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,KAAK,CAAC;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAmDpE,gBAAgB,CAAC,YAAY,EAAE,oBAAoB,GAAG,OAAO,CAAC,IAAI,CAAC;IAgBnE,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,gBAAgB,GAAG,IAAI,CAAC;IAInE,oBAAoB,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;IAInD,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAIzD,UAAU,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAQnD,qBAAqB,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,EAAE,eAAe,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBrF,kBAAkB,CAAC,YAAY,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,GAAG,IAAI,CAAC;IAoB5F,yBAAyB,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,GAAG,IAAI,CAAC;IAoBhF,oBAAoB,CAAC,YAAY,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;IAmB1G;;;OAGG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ5B;;;OAGG;YACW,sBAAsB;IA8E9B,gBAAgB,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,aAAa,CAAC;IASpE,aAAa,CAAC,OAAO,CAAC,EAAE;QAC5B,SAAS,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC;QAC3C,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,gBAAgB,CAAC,EAAE,OAAO,GAAG,YAAY,GAAG,UAAU,CAAC;QACvD,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB,GAAG,OAAO,CAAC;QACV,MAAM,EAAE,KAAK,CAAC;YACZ,WAAW,EAAE,MAAM,CAAC;YACpB,YAAY,EAAE,MAAM,CAAC;YACrB,SAAS,EAAE,MAAM,CAAC;YAClB,WAAW,EAAE,MAAM,CAAC;YACpB,KAAK,EAAE,MAAM,CAAC;YACd,YAAY,EAAE,MAAM,CAAC;YACrB,SAAS,EAAE,MAAM,CAAC;YAClB,QAAQ,EAAE,MAAM,CAAC;SAClB,CAAC,CAAC;QACH,KAAK,EAAE,MAAM,CAAC;KACf,CAAC;IAmFI,cAAc,CAClB,WAAW,EAAE,MAAM,EACnB,gBAAgB,EAAE,OAAO,GAAG,YAAY,GAAG,UAAU,EACrD,OAAO,CAAC,EAAE;QACR,SAAS,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC;QAC3C,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB,GACA,OAAO,CAAC;QACT,WAAW,EAAE,MAAM,CAAC;QACpB,YAAY,EAAE,MAAM,CAAC;QACrB,SAAS,EAAE,MAAM,CAAC;QAClB,WAAW,EAAE,MAAM,CAAC;QACpB,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,SAAS,EAAE,MAAM,CAAC;QAClB,QAAQ,EAAE,MAAM,CAAC;QACjB,WAAW,EAAE,KAAK,CAAC;YACjB,YAAY,EAAE,MAAM,CAAC;YACrB,KAAK,EAAE,MAAM,CAAC;YACd,MAAM,EAAE,MAAM,CAAC;YACf,aAAa,EAAE,MAAM,CAAC;YACtB,WAAW,EAAE,MAAM,CAAC;SACrB,CAAC,CAAC;QACH,KAAK,EAAE,MAAM,CAAC;KACf,CAAC;IAsHI,gBAAgB,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,aAAa,CAAC;IAKpE,oBAAoB,CACxB,OAAO,CAAC,EAAE,gBAAgB,GACzB,OAAO,CAAC,iBAAiB,CAAC;IASvB,eAAe,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,YAAY,CAAC;IASlE,mBAAmB,CACvB,OAAO,CAAC,EAAE,gBAAgB,GACzB,OAAO,CAAC,gBAAgB,CAAC;IAKtB,kBAAkB,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,eAAe,CAAC;IAKxE,aAAa,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,UAAU,CAAC;IAgB9D,aAAa,CACjB,OAAO,CAAC,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAC/C,OAAO,CAAC,UAAU,CAAC;IAchB,uBAAuB,IAAI,OAAO,CAAC,oBAAoB,CAAC;IAuBxD,cAAc,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,WAAW,CAAC;IAUhE,mBAAmB,CACvB,OAAO,CAAC,EAAE,gBAAgB,GACzB,OAAO,CAAC,gBAAgB,CAAC;CA6C7B"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EACL,OAAO,EACP,KAAK,iBAAiB,EACtB,KAAK,oBAAoB,EACzB,KAAK,SAAS,EACd,KAAK,UAAU,EAEf,KAAK,QAAQ,EACb,KAAK,KAAK,EACV,KAAK,SAAS,EACd,KAAK,aAAa,EAkBlB,KAAK,aAAa,EAClB,KAAK,QAAQ,EACb,KAAK,gBAAgB,EACrB,KAAK,oBAAoB,EACzB,KAAK,eAAe,EACpB,KAAK,cAAc,EACnB,KAAK,gBAAgB,EACrB,KAAK,aAAa,EAClB,KAAK,aAAa,EAClB,KAAK,iBAAiB,EACtB,KAAK,YAAY,EACjB,KAAK,gBAAgB,EACrB,KAAK,eAAe,EACpB,KAAK,UAAU,EACf,KAAK,UAAU,EACf,KAAK,oBAAoB,EACzB,KAAK,WAAW,EAChB,KAAK,gBAAgB,EAmBtB,MAAM,iCAAiC,CAAC;AAMzC;;;;;;;;;;GAUG;AACH,qBAAa,eAAgB,SAAQ,OAAO;IAC1C,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,WAAW,CAAkB;IAErC;;;;;OAKG;gBACS,gBAAgB,EAAE,MAAM,EAAE,MAAM,GAAE,MAAsB;IAMpE;;;;OAIG;IACG,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAS3B,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC;IAetC;;OAEG;IACH,OAAO,CAAC,aAAa;IAIrB;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAI1B;;OAEG;IACH,OAAO,CAAC,eAAe;IASjB,aAAa,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIjE,SAAS,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIhE,OAAO,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;IAyBzE,iBAAiB,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,GAAG,MAAM;IAS/F,WAAW,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;IAkB7E,UAAU,CACd,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAA;KAAE,GAC7C,OAAO,CAAC,SAAS,EAAE,CAAC;IACjB,UAAU,CACd,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE;QAAE,QAAQ,EAAE,UAAU,CAAA;KAAE,GAChC,OAAO,CAAC,aAAa,EAAE,CAAC;IACrB,UAAU,CACd,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,UAAU,GAAG,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAA;KAAE,GAC5D,OAAO,CAAC,KAAK,EAAE,CAAC;IAwBb,2BAA2B,CAC/B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,CAAC;IAI9B,aAAa,CACjB,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,IAAI,CAAC;YASF,uBAAuB;IAqE/B,gBAAgB,CACpB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,GAAG,CAAC,MAAM,CAAC,GACnB,OAAO,CAAC,IAAI,CAAC;YASF,0BAA0B;IAkBlC,4BAA4B,CAChC,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC,IAAI,CAAC;YAQF,sCAAsC;IAiB9C,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,EACnB,eAAe,EAAE,OAAO,GACvB,OAAO,CAAC,IAAI,CAAC;IAsCV,cAAc,CAClB,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,EACnB,cAAc,EAAE,GAAG,CAAC,MAAM,CAAC,GAC1B,OAAO,CAAC,IAAI,CAAC;IA2CV,SAAS,CAAC,MAAM,EAAE;QACtB,WAAW,EAAE,MAAM,CAAC;QACpB,UAAU,EAAE,MAAM,CAAC;QACnB,KAAK,CAAC,EAAE,OAAO,CAAC;KACjB,GAAG,OAAO,CAAC;QACV,KAAK,EAAE,MAAM,CAAC;KACf,CAAC;IAqGI,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE;QACR,WAAW,EAAE,MAAM,CAAC;QACpB,MAAM,EAAE,SAAS,GAAG,OAAO,GAAG,sBAAsB,CAAC;QACrD,aAAa,EAAE,MAAM,CAAC;QACtB,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,GACA,OAAO,CAAC,IAAI,CAAC;IAqBV,aAAa,CACjB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,iBAAiB,GAC1B,OAAO,CAAC,IAAI,CAAC;IAwBV,gBAAgB,CACpB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,OAAO,EACf,QAAQ,EAAE,oBAAoB,EAC9B,YAAY,GAAE,OAAe,GAC5B,OAAO,CAAC,IAAI,CAAC;IAqDV,cAAc,CAClB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,SAAS,EAChB,QAAQ,EAAE;QACR,QAAQ,EAAE,MAAM,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;QACtB,QAAQ,EAAE,OAAO,CAAC;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,EAAE,mBAAmB,GAAG,cAAc,GAAG,SAAS,GAAG,WAAW,GAAG,iBAAiB,GAAG,cAAc,CAAC;KACpH,GACA,OAAO,CAAC,IAAI,CAAC;IAwBV,8BAA8B,CAClC,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,SAAS,EAChB,eAAe,EAAE;QACf,QAAQ,EAAE,MAAM,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;QACtB,WAAW,EAAE,MAAM,CAAC;QACpB,aAAa,EAAE,iBAAiB,GAAG,SAAS,CAAC;QAC7C,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,eAAe,CAAC,EAAE,MAAM,CAAC;KAC1B,EACD,gBAAgB,EAAE;QAChB,WAAW,EAAE,MAAM,CAAC;QACpB,iBAAiB,EAAE,MAAM,CAAC;QAC1B,YAAY,EAAE,MAAM,CAAC;QACrB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,eAAe,CAAC,EAAE,MAAM,CAAC;KAC1B,GACA,OAAO,CAAC,IAAI,CAAC;IAgFV,eAAe,CACnB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE;QACR,QAAQ,EAAE,SAAS,GAAG,SAAS,CAAC;QAChC,MAAM,EAAE,MAAM,CAAC;QACf,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;QACtB,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,GACA,OAAO,CAAC,IAAI,CAAC;IAuBV,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,UAAU,EAAE;QACV,IAAI,EAAE,MAAM,CAAC;QACb,cAAc,EAAE,MAAM,CAAC;QACvB,aAAa,EAAE,MAAM,CAAC;QACtB,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,gBAAgB,CAAC,EAAE;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,cAAc,EAAE,MAAM,CAAA;SAAE,CAAC;KAC7D,GACA,OAAO,CAAC,IAAI,CAAC;IAwBV,wBAAwB,CAC5B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,UAAU,EAAE;QACV,IAAI,EAAE,MAAM,CAAC;QACb,cAAc,EAAE,MAAM,CAAC;QACvB,aAAa,EAAE,MAAM,CAAC;QACtB,KAAK,EAAE,SAAS,CAAC;KAClB,GACA,OAAO,CAAC,IAAI,CAAC;IAoBV,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,aAAa,EAAE,MAAM,GACpB,OAAO,CAAC,IAAI,CAAC;IAmBV,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE;QACR,gBAAgB,EAAE,MAAM,CAAC;QACzB,WAAW,EAAE,MAAM,CAAC;QACpB,aAAa,EAAE,MAAM,CAAC;QACtB,cAAc,EAAE,MAAM,CAAC;QACvB,aAAa,EAAE,MAAM,CAAC;QACtB,aAAa,EAAE,MAAM,CAAC;KACvB,GACA,OAAO,CAAC,IAAI,CAAC;IAuBV,YAAY,CAChB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,IAAI,EAAE,QAAQ,EAAE,GACf,OAAO,CAAC,IAAI,CAAC;IAOV,YAAY,CAChB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,aAAa,CAAC,EAAE,MAAM,GACrB,OAAO,CAAC,QAAQ,EAAE,GAAG,IAAI,CAAC;IAkBvB,iBAAiB,CACrB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,qBAAqB,EAAE,MAAM,CAAC;QAAC,cAAc,EAAE,OAAO,CAAC;QAAC,QAAQ,EAAE,OAAO,CAAA;KAAE,GACzG,OAAO,CAAC,IAAI,CAAC;IAoBV,2BAA2B,CAC/B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE;QACN,qBAAqB,EAAE,MAAM,CAAC;QAC9B,SAAS,EAAE,OAAO,CAAC;QACnB,OAAO,EAAE,OAAO,CAAC;QACjB,KAAK,CAAC,EAAE,SAAS,CAAC;QAClB,gBAAgB,CAAC,EAAE,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC;KAC7D,GACA,OAAO,CAAC,IAAI,CAAC;IAiBV,oBAAoB,CACxB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,OAAO,EACf,QAAQ,EAAE;QAAE,qBAAqB,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,GACnG,OAAO,CAAC,IAAI,CAAC;IAqBV,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,SAAS,EAChB,QAAQ,EAAE;QAAE,qBAAqB,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,cAAc,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAA;KAAE,EAC1G,aAAa,EAAE,aAAa,GAAG,cAAc,GAAG,SAAS,GAAG,WAAW,GACtE,OAAO,CAAC,IAAI,CAAC;IAsBV,mBAAmB,CACvB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,aAAa,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAA;KAAE,GACjG,OAAO,CAAC,IAAI,CAAC;IAoBV,qBAAqB,CACzB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QAAE,qBAAqB,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,cAAc,EAAE,MAAM,CAAA;KAAE,GACrG,OAAO,CAAC,IAAI,CAAC;IAoBV,wBAAwB,CAC5B,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QACR,qBAAqB,EAAE,MAAM,CAAC;QAC9B,qBAAqB,EAAE,MAAM,CAAC;QAC9B,YAAY,EAAE,MAAM,EAAE,CAAC;QACvB,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB,GACA,OAAO,CAAC,IAAI,CAAC;IAoBV,gBAAgB,CACpB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE;QACR,SAAS,EAAE,MAAM,CAAC;QAClB,WAAW,EAAE,MAAM,CAAC;QACpB,QAAQ,EAAE,MAAM,CAAC;QACjB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,cAAc,EAAE,OAAO,CAAC;QACxB,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACnC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;KACjB,GACA,OAAO,CAAC,IAAI,CAAC;IA6BV,SAAS,CAAC,UAAU,EAAE,aAAa,GAAG,OAAO,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,OAAO,CAAA;KAAE,CAAC;IAiDhF,QAAQ,CAAC,OAAO,CAAC,EAAE;QACvB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,MAAM,CAAC,EAAE,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC9B,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;QAChB,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;IAoDjB,SAAS,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAoCxD,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,GAAG,IAAI,CAAC;IAgB/C,cAAc,CAClB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,GACZ,OAAO,CACR,KAAK,CAAC;QACJ,MAAM,EAAE,MAAM,CAAC;QACf,KAAK,EAAE,SAAS,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;KACvB,CAAC,CACH;IA4CK,mBAAmB,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAIxC,kBAAkB,CAAC,OAAO,CAAC,EAAE;QACjC,eAAe,CAAC,EAAE,MAAM,CAAC;QACzB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,GAAG,OAAO,CAAC,KAAK,CAAC;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAarE,eAAe,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAwBtF,kBAAkB,CACtB,YAAY,EAAE,MAAM,EACpB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,iBAAiB,GAC1B,OAAO,CAAC;QAAE,aAAa,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC;IAyCtC,iBAAiB,CACrB,cAAc,EAAE,MAAM,EACtB,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,KAAK,CAAC;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAmDpE,gBAAgB,CAAC,YAAY,EAAE,oBAAoB,GAAG,OAAO,CAAC,IAAI,CAAC;IAgBnE,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,gBAAgB,GAAG,IAAI,CAAC;IAInE,oBAAoB,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;IAInD,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAIzD,UAAU,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAQnD,qBAAqB,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,EAAE,eAAe,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBrF,kBAAkB,CAAC,YAAY,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,GAAG,IAAI,CAAC;IAoB5F,yBAAyB,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,GAAG,IAAI,CAAC;IAoBhF,oBAAoB,CAAC,YAAY,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;IAmB1G;;;OAGG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ5B;;;OAGG;YACW,sBAAsB;IA8E9B,gBAAgB,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,aAAa,CAAC;IASpE,aAAa,CAAC,OAAO,CAAC,EAAE;QAC5B,SAAS,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC;QAC3C,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,gBAAgB,CAAC,EAAE,OAAO,GAAG,YAAY,GAAG,UAAU,CAAC;QACvD,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB,GAAG,OAAO,CAAC;QACV,MAAM,EAAE,KAAK,CAAC;YACZ,WAAW,EAAE,MAAM,CAAC;YACpB,YAAY,EAAE,MAAM,CAAC;YACrB,SAAS,EAAE,MAAM,CAAC;YAClB,WAAW,EAAE,MAAM,CAAC;YACpB,KAAK,EAAE,MAAM,CAAC;YACd,YAAY,EAAE,MAAM,CAAC;YACrB,SAAS,EAAE,MAAM,CAAC;YAClB,QAAQ,EAAE,MAAM,CAAC;SAClB,CAAC,CAAC;QACH,KAAK,EAAE,MAAM,CAAC;KACf,CAAC;IAmFI,cAAc,CAClB,WAAW,EAAE,MAAM,EACnB,gBAAgB,EAAE,OAAO,GAAG,YAAY,GAAG,UAAU,EACrD,OAAO,CAAC,EAAE;QACR,SAAS,CAAC,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC;QAC3C,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB,GACA,OAAO,CAAC;QACT,WAAW,EAAE,MAAM,CAAC;QACpB,YAAY,EAAE,MAAM,CAAC;QACrB,SAAS,EAAE,MAAM,CAAC;QAClB,WAAW,EAAE,MAAM,CAAC;QACpB,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,SAAS,EAAE,MAAM,CAAC;QAClB,QAAQ,EAAE,MAAM,CAAC;QACjB,WAAW,EAAE,KAAK,CAAC;YACjB,YAAY,EAAE,MAAM,CAAC;YACrB,KAAK,EAAE,MAAM,CAAC;YACd,MAAM,EAAE,MAAM,CAAC;YACf,aAAa,EAAE,MAAM,CAAC;YACtB,WAAW,EAAE,MAAM,CAAC;SACrB,CAAC,CAAC;QACH,KAAK,EAAE,MAAM,CAAC;KACf,CAAC;IAsHI,gBAAgB,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,aAAa,CAAC;IAKpE,oBAAoB,CACxB,OAAO,CAAC,EAAE,gBAAgB,GACzB,OAAO,CAAC,iBAAiB,CAAC;IASvB,eAAe,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,YAAY,CAAC;IASlE,mBAAmB,CACvB,OAAO,CAAC,EAAE,gBAAgB,GACzB,OAAO,CAAC,gBAAgB,CAAC;IAKtB,kBAAkB,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,eAAe,CAAC;IAKxE,aAAa,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,UAAU,CAAC;IAgB9D,aAAa,CACjB,OAAO,CAAC,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAC/C,OAAO,CAAC,UAAU,CAAC;IAchB,uBAAuB,IAAI,OAAO,CAAC,oBAAoB,CAAC;IAuBxD,cAAc,CAAC,OAAO,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,WAAW,CAAC;IAUhE,mBAAmB,CACvB,OAAO,CAAC,EAAE,gBAAgB,GACzB,OAAO,CAAC,gBAAgB,CAAC;CA6C7B"}
package/dist/index.js.map CHANGED
@@ -42,7 +42,7 @@
42
42
  "'use strict'\n\nconst EventEmitter = require('events').EventEmitter\nconst util = require('util')\nconst utils = require('../utils')\n\nconst NativeQuery = (module.exports = function (config, values, callback) {\n EventEmitter.call(this)\n config = utils.normalizeQueryConfig(config, values, callback)\n this.text = config.text\n this.values = config.values\n this.name = config.name\n this.queryMode = config.queryMode\n this.callback = config.callback\n this.state = 'new'\n this._arrayMode = config.rowMode === 'array'\n\n // if the 'row' event is listened for\n // then emit them as they come in\n // without setting singleRowMode to true\n // this has almost no meaning because libpq\n // reads all rows into memory before returning any\n this._emitRowEvents = false\n this.on(\n 'newListener',\n function (event) {\n if (event === 'row') this._emitRowEvents = true\n }.bind(this)\n )\n})\n\nutil.inherits(NativeQuery, EventEmitter)\n\nconst errorFieldMap = {\n sqlState: 'code',\n statementPosition: 'position',\n messagePrimary: 'message',\n context: 'where',\n schemaName: 'schema',\n tableName: 'table',\n columnName: 'column',\n dataTypeName: 'dataType',\n constraintName: 'constraint',\n sourceFile: 'file',\n sourceLine: 'line',\n sourceFunction: 'routine',\n}\n\nNativeQuery.prototype.handleError = function (err) {\n // copy pq error fields into the error object\n const fields = this.native.pq.resultErrorFields()\n if (fields) {\n for (const key in fields) {\n const normalizedFieldName = errorFieldMap[key] || key\n err[normalizedFieldName] = fields[key]\n }\n }\n if (this.callback) {\n this.callback(err)\n } else {\n this.emit('error', err)\n }\n this.state = 'error'\n}\n\nNativeQuery.prototype.then = function (onSuccess, onFailure) {\n return this._getPromise().then(onSuccess, onFailure)\n}\n\nNativeQuery.prototype.catch = function (callback) {\n return this._getPromise().catch(callback)\n}\n\nNativeQuery.prototype._getPromise = function () {\n if (this._promise) return this._promise\n this._promise = new Promise(\n function (resolve, reject) {\n this._once('end', resolve)\n this._once('error', reject)\n }.bind(this)\n )\n return this._promise\n}\n\nNativeQuery.prototype.submit = function (client) {\n this.state = 'running'\n const self = this\n this.native = client.native\n client.native.arrayMode = this._arrayMode\n\n let after = function (err, rows, results) {\n client.native.arrayMode = false\n setImmediate(function () {\n self.emit('_done')\n })\n\n // handle possible query error\n if (err) {\n return self.handleError(err)\n }\n\n // emit row events for each row in the result\n if (self._emitRowEvents) {\n if (results.length > 1) {\n rows.forEach((rowOfRows, i) => {\n rowOfRows.forEach((row) => {\n self.emit('row', row, results[i])\n })\n })\n } else {\n rows.forEach(function (row) {\n self.emit('row', row, results)\n })\n }\n }\n\n // handle successful result\n self.state = 'end'\n self.emit('end', results)\n if (self.callback) {\n self.callback(null, results)\n }\n }\n\n if (process.domain) {\n after = process.domain.bind(after)\n }\n\n // named query\n if (this.name) {\n if (this.name.length > 63) {\n console.error('Warning! Postgres only supports 63 characters for query names.')\n console.error('You supplied %s (%s)', this.name, this.name.length)\n console.error('This can cause conflicts and silent errors executing queries')\n }\n const values = (this.values || []).map(utils.prepareValue)\n\n // check if the client has already executed this named query\n // if so...just execute it again - skip the planning phase\n if (client.namedQueries[this.name]) {\n if (this.text && client.namedQueries[this.name] !== this.text) {\n const err = new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`)\n return after(err)\n }\n return client.native.execute(this.name, values, after)\n }\n // plan the named query the first time, then execute it\n return client.native.prepare(this.name, this.text, values.length, function (err) {\n if (err) return after(err)\n client.namedQueries[self.name] = self.text\n return self.native.execute(self.name, values, after)\n })\n } else if (this.values) {\n if (!Array.isArray(this.values)) {\n const err = new Error('Query values must be an array')\n return after(err)\n }\n const vals = this.values.map(utils.prepareValue)\n client.native.query(this.text, vals, after)\n } else if (this.queryMode === 'extended') {\n client.native.query(this.text, [], after)\n } else {\n client.native.query(this.text, after)\n }\n}\n",
43
43
  "'use strict'\n\n// eslint-disable-next-line\nvar Native\n// eslint-disable-next-line no-useless-catch\ntry {\n // Wrap this `require()` in a try-catch to avoid upstream bundlers from complaining that this might not be available since it is an optional import\n Native = require('pg-native')\n} catch (e) {\n throw e\n}\nconst TypeOverrides = require('../type-overrides')\nconst EventEmitter = require('events').EventEmitter\nconst util = require('util')\nconst ConnectionParameters = require('../connection-parameters')\n\nconst NativeQuery = require('./query')\n\nconst Client = (module.exports = function (config) {\n EventEmitter.call(this)\n config = config || {}\n\n this._Promise = config.Promise || global.Promise\n this._types = new TypeOverrides(config.types)\n\n this.native = new Native({\n types: this._types,\n })\n\n this._queryQueue = []\n this._ending = false\n this._connecting = false\n this._connected = false\n this._queryable = true\n\n // keep these on the object for legacy reasons\n // for the time being. TODO: deprecate all this jazz\n const cp = (this.connectionParameters = new ConnectionParameters(config))\n if (config.nativeConnectionString) cp.nativeConnectionString = config.nativeConnectionString\n this.user = cp.user\n\n // \"hiding\" the password so it doesn't show up in stack traces\n // or if the client is console.logged\n Object.defineProperty(this, 'password', {\n configurable: true,\n enumerable: false,\n writable: true,\n value: cp.password,\n })\n this.database = cp.database\n this.host = cp.host\n this.port = cp.port\n\n // a hash to hold named queries\n this.namedQueries = {}\n})\n\nClient.Query = NativeQuery\n\nutil.inherits(Client, EventEmitter)\n\nClient.prototype._errorAllQueries = function (err) {\n const enqueueError = (query) => {\n process.nextTick(() => {\n query.native = this.native\n query.handleError(err)\n })\n }\n\n if (this._hasActiveQuery()) {\n enqueueError(this._activeQuery)\n this._activeQuery = null\n }\n\n this._queryQueue.forEach(enqueueError)\n this._queryQueue.length = 0\n}\n\n// connect to the backend\n// pass an optional callback to be called once connected\n// or with an error if there was a connection error\nClient.prototype._connect = function (cb) {\n const self = this\n\n if (this._connecting) {\n process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.')))\n return\n }\n\n this._connecting = true\n\n this.connectionParameters.getLibpqConnectionString(function (err, conString) {\n if (self.connectionParameters.nativeConnectionString) conString = self.connectionParameters.nativeConnectionString\n if (err) return cb(err)\n self.native.connect(conString, function (err) {\n if (err) {\n self.native.end()\n return cb(err)\n }\n\n // set internal states to connected\n self._connected = true\n\n // handle connection errors from the native layer\n self.native.on('error', function (err) {\n self._queryable = false\n self._errorAllQueries(err)\n self.emit('error', err)\n })\n\n self.native.on('notification', function (msg) {\n self.emit('notification', {\n channel: msg.relname,\n payload: msg.extra,\n })\n })\n\n // signal we are connected now\n self.emit('connect')\n self._pulseQueryQueue(true)\n\n cb()\n })\n })\n}\n\nClient.prototype.connect = function (callback) {\n if (callback) {\n this._connect(callback)\n return\n }\n\n return new this._Promise((resolve, reject) => {\n this._connect((error) => {\n if (error) {\n reject(error)\n } else {\n resolve()\n }\n })\n })\n}\n\n// send a query to the server\n// this method is highly overloaded to take\n// 1) string query, optional array of parameters, optional function callback\n// 2) object query with {\n// string query\n// optional array values,\n// optional function callback instead of as a separate parameter\n// optional string name to name & cache the query plan\n// optional string rowMode = 'array' for an array of results\n// }\nClient.prototype.query = function (config, values, callback) {\n let query\n let result\n let readTimeout\n let readTimeoutTimer\n let queryCallback\n\n if (config === null || config === undefined) {\n throw new TypeError('Client was passed a null or undefined query')\n } else if (typeof config.submit === 'function') {\n readTimeout = config.query_timeout || this.connectionParameters.query_timeout\n result = query = config\n // accept query(new Query(...), (err, res) => { }) style\n if (typeof values === 'function') {\n config.callback = values\n }\n } else {\n readTimeout = config.query_timeout || this.connectionParameters.query_timeout\n query = new NativeQuery(config, values, callback)\n if (!query.callback) {\n let resolveOut, rejectOut\n result = new this._Promise((resolve, reject) => {\n resolveOut = resolve\n rejectOut = reject\n }).catch((err) => {\n Error.captureStackTrace(err)\n throw err\n })\n query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res))\n }\n }\n\n if (readTimeout) {\n queryCallback = query.callback\n\n readTimeoutTimer = setTimeout(() => {\n const error = new Error('Query read timeout')\n\n process.nextTick(() => {\n query.handleError(error, this.connection)\n })\n\n queryCallback(error)\n\n // we already returned an error,\n // just do nothing if query completes\n query.callback = () => {}\n\n // Remove from queue\n const index = this._queryQueue.indexOf(query)\n if (index > -1) {\n this._queryQueue.splice(index, 1)\n }\n\n this._pulseQueryQueue()\n }, readTimeout)\n\n query.callback = (err, res) => {\n clearTimeout(readTimeoutTimer)\n queryCallback(err, res)\n }\n }\n\n if (!this._queryable) {\n query.native = this.native\n process.nextTick(() => {\n query.handleError(new Error('Client has encountered a connection error and is not queryable'))\n })\n return result\n }\n\n if (this._ending) {\n query.native = this.native\n process.nextTick(() => {\n query.handleError(new Error('Client was closed and is not queryable'))\n })\n return result\n }\n\n this._queryQueue.push(query)\n this._pulseQueryQueue()\n return result\n}\n\n// disconnect from the backend server\nClient.prototype.end = function (cb) {\n const self = this\n\n this._ending = true\n\n if (!this._connected) {\n this.once('connect', this.end.bind(this, cb))\n }\n let result\n if (!cb) {\n result = new this._Promise(function (resolve, reject) {\n cb = (err) => (err ? reject(err) : resolve())\n })\n }\n this.native.end(function () {\n self._errorAllQueries(new Error('Connection terminated'))\n\n process.nextTick(() => {\n self.emit('end')\n if (cb) cb()\n })\n })\n return result\n}\n\nClient.prototype._hasActiveQuery = function () {\n return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end'\n}\n\nClient.prototype._pulseQueryQueue = function (initialConnection) {\n if (!this._connected) {\n return\n }\n if (this._hasActiveQuery()) {\n return\n }\n const query = this._queryQueue.shift()\n if (!query) {\n if (!initialConnection) {\n this.emit('drain')\n }\n return\n }\n this._activeQuery = query\n query.submit(this)\n const self = this\n query.once('_done', function () {\n self._pulseQueryQueue()\n })\n}\n\n// attempt to cancel an in-progress query\nClient.prototype.cancel = function (query) {\n if (this._activeQuery === query) {\n this.native.cancel(function () {})\n } else if (this._queryQueue.indexOf(query) !== -1) {\n this._queryQueue.splice(this._queryQueue.indexOf(query), 1)\n }\n}\n\nClient.prototype.ref = function () {}\nClient.prototype.unref = function () {}\n\nClient.prototype.setTypeParser = function (oid, format, parseFn) {\n return this._types.setTypeParser(oid, format, parseFn)\n}\n\nClient.prototype.getTypeParser = function (oid, format) {\n return this._types.getTypeParser(oid, format)\n}\n",
44
44
  "'use strict'\n\nconst Client = require('./client')\nconst defaults = require('./defaults')\nconst Connection = require('./connection')\nconst Result = require('./result')\nconst utils = require('./utils')\nconst Pool = require('pg-pool')\nconst TypeOverrides = require('./type-overrides')\nconst { DatabaseError } = require('pg-protocol')\nconst { escapeIdentifier, escapeLiteral } = require('./utils')\n\nconst poolFactory = (Client) => {\n return class BoundPool extends Pool {\n constructor(options) {\n super(options, Client)\n }\n }\n}\n\nconst PG = function (clientConstructor) {\n this.defaults = defaults\n this.Client = clientConstructor\n this.Query = this.Client.Query\n this.Pool = poolFactory(this.Client)\n this._pools = []\n this.Connection = Connection\n this.types = require('pg-types')\n this.DatabaseError = DatabaseError\n this.TypeOverrides = TypeOverrides\n this.escapeIdentifier = escapeIdentifier\n this.escapeLiteral = escapeLiteral\n this.Result = Result\n this.utils = utils\n}\n\nif (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') {\n module.exports = new PG(require('./native'))\n} else {\n module.exports = new PG(Client)\n\n // lazy require native module...the native module may not have installed\n Object.defineProperty(module.exports, 'native', {\n configurable: true,\n enumerable: false,\n get() {\n let native = null\n try {\n native = new PG(require('./native'))\n } catch (err) {\n if (err.code !== 'MODULE_NOT_FOUND') {\n throw err\n }\n }\n\n // overwrite module.exports.native so that getter is never called again\n Object.defineProperty(module.exports, 'native', {\n value: native,\n })\n\n return native\n },\n })\n}\n",
45
- "import { createHash } from \"node:crypto\";\nimport {\n Backend,\n type StepStartMetadata,\n type StepCompleteMetadata,\n type StepError,\n type StepRecord,\n type StepState,\n type LogEntry,\n type Event,\n type StepEvent,\n type WorkflowEvent,\n type StepStartedEvent,\n type StepCompletedEvent,\n type StepFailedEvent,\n type StepSkippedEvent,\n type StepScheduledEvent,\n type StepHeartbeatEvent,\n type StepReclaimedEvent,\n type LogEntryEvent,\n type StepRetryingEvent,\n type WorkflowStartedEvent,\n type WorkflowInputValidationEvent,\n type WorkflowCompletedEvent,\n type WorkflowFailedEvent,\n type WorkflowResumedEvent,\n type WorkflowCancelledEvent,\n type WorkflowRetryStartedEvent,\n type RunSubmittedEvent,\n type RunSubmission,\n type RunState,\n type WorkflowMetadata,\n type WorkflowRegistration,\n type WorkflowVersion,\n type StepDefinition,\n type AnalyticsOptions,\n type ErrorAnalysis,\n type RetryAnalysis,\n type SchedulingLatency,\n type StepDuration,\n type WorkflowDuration,\n type WorkerStability,\n type Throughput,\n type QueueDepth,\n type QueueDepthByWorkflow,\n type SuccessRate,\n type AnalyticsSummary,\n safeSerialize,\n eventSchema,\n projectStepRecord,\n projectStepState,\n projectRunStateFromEvents,\n extractLogsFromEvents,\n getCurrentAttemptNumber,\n getVersionIdFromEvents,\n getMicrosecondTimestamp,\n computeErrorAnalysis,\n computeRetryAnalysis,\n computeSchedulingLatency,\n computeStepDuration,\n computeWorkflowDuration,\n computeErrorFingerprints,\n computeWorkerStability,\n computeThroughput,\n computeSuccessRate,\n} from \"@cascade-flow/backend-interface\";\n\nimport { DatabaseClient, createPool } from \"./db.js\";\nimport { runMigrations } from \"./migrations.js\";\nimport type { Pool, PoolClient } from \"pg\";\n\n/**\n * PostgreSQL backend implementation\n * Stores workflow execution state in PostgreSQL using event sourcing\n *\n * Features:\n * - Event sourcing with immutable append-only events\n * - Automatic schema initialization via idempotent migrations\n * - Race-safe step claiming using SELECT FOR UPDATE SKIP LOCKED\n * - Events-as-queue pattern (no separate queue table)\n * - JSONB storage for flexible schema evolution\n */\nexport class PostgresBackend extends Backend {\n private db: DatabaseClient;\n private initialized: boolean = false;\n\n /**\n * Create a new Postgres backend\n *\n * @param connectionString - PostgreSQL connection string (e.g., postgres://user:pass@host/db)\n * @param schema - PostgreSQL schema name (default: 'cascadeflow'). Isolated from public schema to avoid conflicts.\n */\n constructor(connectionString: string, schema: string = 'cascadeflow') {\n super();\n const pool = createPool(connectionString);\n this.db = new DatabaseClient(pool, schema);\n }\n\n /**\n * Initialize the backend by running database migrations\n * This should be called by the worker on startup to ensure the database schema is ready.\n * Other components (submit CLI, UI) can skip this and assume the schema already exists.\n */\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n await runMigrations(this.db.getPool(), this.db.getSchema());\n this.initialized = true;\n }\n\n async backendReady(): Promise<boolean> {\n try {\n const client = await this.db.getPool().connect();\n try {\n await client.query('SELECT 1');\n return true;\n } finally {\n client.release();\n }\n } catch (error) {\n console.error('Backend health check failed:', error);\n return false;\n }\n }\n\n /**\n * Generate a unique run ID\n */\n private generateRunId(): string {\n return `run_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;\n }\n\n /**\n * Hash an idempotency key\n */\n private hashIdempotencyKey(key: string): string {\n return createHash(\"sha256\").update(key).digest(\"hex\");\n }\n\n /**\n * Generate a unique event ID using microsecond timestamp\n */\n private generateEventId(timestamp?: number): string {\n const ts = timestamp ?? getMicrosecondTimestamp();\n return `${ts}`;\n }\n\n // ============================================================================\n // Run Management\n // ============================================================================\n\n async initializeRun(workflowSlug: string, runId: string): Promise<void> {\n // No explicit initialization needed - events create the run implicitly\n }\n\n async runExists(workflowSlug: string, runId: string): Promise<boolean> {\n return this.db.runExists(workflowSlug, runId);\n }\n\n async loadRun(workflowSlug: string, runId: string): Promise<StepRecord[]> {\n const events = await this.db.loadAllRunEvents(workflowSlug, runId);\n\n // Group events by step\n const stepEvents = new Map<string, Event[]>();\n for (const event of events) {\n if (event.category === \"step\") {\n const stepEvent = event as StepEvent;\n if (!stepEvents.has(stepEvent.stepId)) {\n stepEvents.set(stepEvent.stepId, []);\n }\n stepEvents.get(stepEvent.stepId)!.push(event);\n }\n }\n\n // Project each step's events into a StepRecord\n const records: StepRecord[] = [];\n for (const [stepId, events] of stepEvents) {\n const record = projectStepRecord(events);\n records.push(record);\n }\n\n return records;\n }\n\n getStepOutputPath(workflowSlug: string, runId: string, stepId: string, attemptNumber: number): string {\n // Return a logical path (not used for actual storage in Postgres)\n return `postgres://${workflowSlug}/${runId}/${stepId}/attempt-${attemptNumber}`;\n }\n\n // ============================================================================\n // Event Sourcing\n // ============================================================================\n\n async appendEvent(workflowSlug: string, runId: string, event: Event): Promise<void> {\n\n // Ensure event has ID and timestamp\n if (!event.eventId) {\n (event as any).eventId = this.generateEventId();\n }\n if (!event.timestampUs) {\n (event as any).timestampUs = getMicrosecondTimestamp();\n }\n\n // Validate event\n eventSchema.parse(event);\n\n // Append to appropriate table\n const table = event.category === \"workflow\" ? \"workflow_events\" : \"step_events\";\n await this.db.appendEvent(table, event);\n }\n\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options: { category: \"step\"; stepId?: string }\n ): Promise<StepEvent[]>;\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options: { category: \"workflow\" }\n ): Promise<WorkflowEvent[]>;\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options?: { category?: \"workflow\" | \"step\"; stepId?: string }\n ): Promise<Event[]>;\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options?: { category?: \"workflow\" | \"step\"; stepId?: string }\n ): Promise<Event[]> {\n\n if (options?.category === \"workflow\") {\n return this.db.loadEvents(\"workflow_events\", {\n workflowSlug,\n runId,\n });\n } else if (options?.category === \"step\") {\n return this.db.loadEvents(\"step_events\", {\n workflowSlug,\n runId,\n stepId: options.stepId,\n });\n } else {\n // Load all events\n return this.db.loadAllRunEvents(workflowSlug, runId);\n }\n }\n\n async loadStepEventsForProjection(\n workflowSlug: string,\n runId: string\n ): Promise<Map<string, StepEvent[]>> {\n return this.db.loadStepEventsForProjection(workflowSlug, runId);\n }\n\n async copyEntireRun(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string\n ): Promise<void> {\n await this.copyEntireRunWithClient(\n workflowSlug,\n sourceRunId,\n targetRunId,\n this.db.getPool()\n );\n }\n\n private async copyEntireRunWithClient(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string,\n client: Pool | PoolClient\n ): Promise<void> {\n // Use bulk SQL operations for efficiency\n const schema = this.db.getSchema();\n\n // Copy workflow events - copy all fields except id (auto-generated) and those that need to change\n await client.query(\n `INSERT INTO ${schema}.workflow_events (\n event_id, workflow_slug, run_id, timestamp_us, category, type, event_data, created_at,\n workflow_attempt_number, available_at_us, priority, timeout_us, idempotency_key, version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $2,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($2::text)),\n created_at,\n workflow_attempt_number,\n available_at_us,\n priority,\n timeout_us,\n idempotency_key,\n version_id\n FROM ${schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $3`,\n [workflowSlug, targetRunId, sourceRunId]\n );\n\n // Copy step events - copy all fields except id (auto-generated) and those that need to change\n await client.query(\n `INSERT INTO ${schema}.step_events (\n event_id, workflow_slug, run_id, step_id, timestamp_us, category, type, event_data, created_at,\n worker_id, attempt_number, available_at_us, export_output,\n error_name_hash, error_message_hash, error_stack_exact_hash, error_stack_normalized_hash, error_stack_portable_hash,\n version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $2,\n step_id,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($2::text)),\n created_at,\n worker_id,\n attempt_number,\n available_at_us,\n export_output,\n error_name_hash,\n error_message_hash,\n error_stack_exact_hash,\n error_stack_normalized_hash,\n error_stack_portable_hash,\n version_id\n FROM ${schema}.step_events\n WHERE workflow_slug = $1 AND run_id = $3`,\n [workflowSlug, targetRunId, sourceRunId]\n );\n }\n\n async deleteStepEvents(\n workflowSlug: string,\n runId: string,\n stepIds: Set<string>\n ): Promise<void> {\n await this.deleteStepEventsWithClient(\n workflowSlug,\n runId,\n stepIds,\n this.db.getPool()\n );\n }\n\n private async deleteStepEventsWithClient(\n workflowSlug: string,\n runId: string,\n stepIds: Set<string>,\n client: Pool | PoolClient\n ): Promise<void> {\n if (stepIds.size === 0) return;\n\n const schema = this.db.getSchema();\n const stepIdsArray = Array.from(stepIds);\n\n await client.query(\n `DELETE FROM ${schema}.step_events\n WHERE workflow_slug = $1 AND run_id = $2 AND step_id = ANY($3)`,\n [workflowSlug, runId, stepIdsArray]\n );\n }\n\n async deleteWorkflowTerminalEvents(\n workflowSlug: string,\n runId: string\n ): Promise<void> {\n await this.deleteWorkflowTerminalEventsWithClient(\n workflowSlug,\n runId,\n this.db.getPool()\n );\n }\n\n private async deleteWorkflowTerminalEventsWithClient(\n workflowSlug: string,\n runId: string,\n client: Pool | PoolClient\n ): Promise<void> {\n const schema = this.db.getSchema();\n\n // Delete terminal workflow events to reset workflow state\n await client.query(\n `DELETE FROM ${schema}.workflow_events\n WHERE workflow_slug = $1\n AND run_id = $2\n AND type IN ('WorkflowCompleted', 'WorkflowFailed', 'WorkflowCancelled')`,\n [workflowSlug, runId]\n );\n }\n\n async copyWorkflowEvents(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string,\n excludeTerminal: boolean\n ): Promise<void> {\n const pool = this.db.getPool();\n const schema = this.db.getSchema();\n\n // Build WHERE clause for terminal events filter\n const terminalFilter = excludeTerminal\n ? `AND type NOT IN ('WorkflowCompleted', 'WorkflowFailed', 'WorkflowCancelled')`\n : '';\n\n // Copy workflow events with updated run_id and new event_id\n await pool.query(\n `INSERT INTO ${schema}.workflow_events (\n event_id, workflow_slug, run_id, timestamp_us, category, type, event_data, created_at,\n workflow_attempt_number, available_at_us, priority, timeout_us, idempotency_key, version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $3,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($3::text)),\n created_at,\n workflow_attempt_number,\n available_at_us,\n priority,\n timeout_us,\n idempotency_key,\n version_id\n FROM ${schema}.workflow_events\n WHERE workflow_slug = $1\n AND run_id = $2\n ${terminalFilter}`,\n [workflowSlug, sourceRunId, targetRunId]\n );\n }\n\n async copyStepEvents(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string,\n includeStepIds: Set<string>\n ): Promise<void> {\n const pool = this.db.getPool();\n const schema = this.db.getSchema();\n const stepIdsArray = Array.from(includeStepIds);\n\n if (stepIdsArray.length === 0) return;\n\n // Copy step events for specified steps with updated run_id and new event_id\n await pool.query(\n `INSERT INTO ${schema}.step_events (\n event_id, workflow_slug, run_id, step_id, timestamp_us, category, type, event_data, created_at,\n worker_id, attempt_number, available_at_us, export_output,\n error_name_hash, error_message_hash, error_stack_exact_hash, error_stack_normalized_hash, error_stack_portable_hash,\n version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $3,\n step_id,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($3::text)),\n created_at,\n worker_id,\n attempt_number,\n available_at_us,\n export_output,\n error_name_hash,\n error_message_hash,\n error_stack_exact_hash,\n error_stack_normalized_hash,\n error_stack_portable_hash,\n version_id\n FROM ${schema}.step_events\n WHERE workflow_slug = $1\n AND run_id = $2\n AND step_id = ANY($4)`,\n [workflowSlug, sourceRunId, targetRunId, stepIdsArray]\n );\n }\n\n async rerunFrom(params: {\n parentRunId: string;\n fromStepId: string;\n input?: unknown;\n }): Promise<{\n runId: string;\n }> {\n // Get parent run to extract workflow slug\n const parentRun = await this.getRun(params.parentRunId);\n if (!parentRun) {\n throw new Error(`Parent run \"${params.parentRunId}\" not found`);\n }\n\n // Calculate excluded steps from workflow metadata (authoritative source)\n const dependents = await this.calculateDependents(\n parentRun.workflowSlug,\n params.fromStepId\n );\n const rerunStepIds = new Set<string>([params.fromStepId, ...dependents]);\n\n // Generate new run ID\n const newRunId = getMicrosecondTimestamp().toString();\n\n // Use transaction for atomic copy + delete + event emission\n const pool = this.db.getPool();\n const client = await pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n\n // Copy entire run using bulk operations\n await this.copyEntireRunWithClient(\n parentRun.workflowSlug,\n params.parentRunId,\n newRunId,\n client\n );\n\n // Delete events for steps that will be re-executed\n await this.deleteStepEventsWithClient(\n parentRun.workflowSlug,\n newRunId,\n rerunStepIds,\n client\n );\n\n // Delete terminal workflow events to reset workflow to running state\n // This allows workers to pick up the scheduled steps\n await this.deleteWorkflowTerminalEventsWithClient(\n parentRun.workflowSlug,\n newRunId,\n client\n );\n\n await client.query(\"COMMIT\");\n } catch (error) {\n await client.query(\"ROLLBACK\");\n throw error;\n } finally {\n client.release();\n }\n\n // Get current workflow version for the rerun\n const currentVersion = await this.getCurrentWorkflowVersion(parentRun.workflowSlug);\n if (!currentVersion) {\n throw new Error(`Workflow ${parentRun.workflowSlug} not registered. Please ensure the worker has started and registered workflows.`);\n }\n\n // Get parent run's version from its WorkflowStarted event\n const parentWorkflowEvents = await this.loadEvents(parentRun.workflowSlug, params.parentRunId, {\n category: \"workflow\",\n }) as WorkflowEvent[];\n const parentVersionId = getVersionIdFromEvents(parentWorkflowEvents);\n\n // Emit WorkflowRerunFromStep event to track rerun metadata\n const timestamp = getMicrosecondTimestamp();\n await this.appendEvent(parentRun.workflowSlug, newRunId, {\n category: \"workflow\",\n type: \"WorkflowRerunFromStep\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug: parentRun.workflowSlug,\n runId: newRunId,\n parentRunId: params.parentRunId,\n rerunFromStepId: params.fromStepId,\n rerunStepIds: Array.from(rerunStepIds),\n versionId: currentVersion.versionId,\n parentVersionId,\n });\n\n // Submit to queue for worker execution (no workflow discovery needed!)\n // The worker will see the copied events and resume from where parent left off\n await this.submitRun({\n workflowSlug: parentRun.workflowSlug,\n runId: newRunId,\n input: params.input,\n });\n\n return {\n runId: newRunId,\n };\n }\n\n // ============================================================================\n // Step Event Methods\n // ============================================================================\n\n async saveStepScheduled(\n workflowSlug: string,\n runId: string,\n stepId: string,\n metadata: {\n availableAt: number;\n reason: \"initial\" | \"retry\" | \"dependency-satisfied\";\n attemptNumber: number;\n retryDelayMs?: number;\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepScheduledEvent = {\n category: \"step\",\n type: \"StepScheduled\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n availableAtUs: metadata.availableAt,\n reason: metadata.reason,\n attemptNumber: metadata.attemptNumber,\n retryDelayMs: metadata.retryDelayMs,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepStart(\n workflowSlug: string,\n runId: string,\n stepId: string,\n workerId: string,\n metadata: StepStartMetadata\n ): Promise<void> {\n\n // Get current attempt number by counting previous StepStarted events\n const events = await this.loadEvents(workflowSlug, runId, { category: \"step\", stepId });\n const attemptNumber = getCurrentAttemptNumber(events) + 1;\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: StepStartedEvent = {\n category: \"step\",\n type: \"StepStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n stepId,\n attemptNumber,\n workerId,\n dependencies: metadata.dependencies,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepComplete(\n workflowSlug: string,\n runId: string,\n stepId: string,\n output: unknown,\n metadata: StepCompleteMetadata,\n exportOutput: boolean = false\n ): Promise<void> {\n\n // Use the attempt number from the caller (worker knows the correct attempt)\n const attemptNumber = metadata.attemptNumber;\n\n if (attemptNumber === 0) {\n throw new Error(`Cannot complete step that hasn't started: ${stepId}`);\n }\n\n // Append log events first (if any)\n if (metadata.logs && metadata.logs.length > 0) {\n for (const log of metadata.logs) {\n const logTimestamp = log.timestamp;\n const logEvent: LogEntryEvent = {\n category: \"step\",\n eventId: this.generateEventId(logTimestamp),\n timestampUs: logTimestamp,\n workflowSlug,\n runId,\n stepId,\n type: \"LogEntry\",\n stream: log.stream,\n message: log.message,\n attemptNumber,\n };\n await this.db.appendEvent(\"step_events\", logEvent);\n }\n }\n\n // Serialize output safely to handle complex values\n const serialized = safeSerialize(output);\n const outputString = serialized.success ? serialized.data : serialized.fallback;\n\n // Use high-resolution timestamp\n const timestamp = getMicrosecondTimestamp();\n\n const event: StepCompletedEvent = {\n category: \"step\",\n type: \"StepCompleted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n stepId,\n output: outputString,\n durationUs: metadata.duration,\n attemptNumber,\n exportOutput,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepFailed(\n workflowSlug: string,\n runId: string,\n stepId: string,\n error: StepError,\n metadata: {\n duration: number;\n attemptNumber: number;\n terminal: boolean;\n nextRetryAt?: number;\n failureReason: \"exhausted-retries\" | \"worker-crash\" | \"timeout\" | \"cancelled\" | \"execution-error\";\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepFailedEvent = {\n category: \"step\",\n type: \"StepFailed\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n error,\n errorFingerprints: computeErrorFingerprints(error, error.stack),\n durationUs: metadata.duration,\n attemptNumber: metadata.attemptNumber,\n terminal: metadata.terminal,\n nextRetryAtUs: metadata.nextRetryAt,\n failureReason: metadata.failureReason,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepFailedAndScheduleRetry(\n workflowSlug: string,\n runId: string,\n stepId: string,\n error: StepError,\n failureMetadata: {\n duration: number;\n attemptNumber: number;\n nextRetryAt: number;\n failureReason: \"execution-error\" | \"timeout\";\n policyIndex?: number;\n attemptInPolicy?: number;\n },\n scheduleMetadata: {\n availableAt: number;\n nextAttemptNumber: number;\n retryDelayMs: number;\n maxRetries?: number;\n totalPolicies?: number;\n policyIndex?: number;\n attemptInPolicy?: number;\n }\n ): Promise<void> {\n const client = await this.db.getPool().connect();\n try {\n await client.query(\"BEGIN\");\n\n // Generate timestamps for all three events\n // We need distinct timestamps to ensure proper event ordering\n const failedTimestamp = getMicrosecondTimestamp();\n const retryingTimestamp = failedTimestamp + 1;\n const scheduledTimestamp = failedTimestamp + 2;\n\n // Event 1: StepFailed (terminal: false)\n const failedEvent: StepFailedEvent = {\n category: \"step\",\n type: \"StepFailed\",\n eventId: this.generateEventId(failedTimestamp),\n timestampUs: failedTimestamp,\n workflowSlug,\n runId,\n stepId,\n error,\n errorFingerprints: computeErrorFingerprints(error, error.stack),\n durationUs: failureMetadata.duration,\n attemptNumber: failureMetadata.attemptNumber,\n terminal: false,\n nextRetryAtUs: failureMetadata.nextRetryAt,\n failureReason: failureMetadata.failureReason,\n policyIndex: failureMetadata.policyIndex,\n attemptInPolicy: failureMetadata.attemptInPolicy,\n };\n\n await this.db.appendEventWithClient(client, \"step_events\", failedEvent);\n\n // Event 2: StepRetrying (informational)\n const retryingEvent: StepRetryingEvent = {\n category: \"step\",\n type: \"StepRetrying\",\n eventId: this.generateEventId(retryingTimestamp),\n timestampUs: retryingTimestamp,\n workflowSlug,\n runId,\n stepId,\n attemptNumber: failureMetadata.attemptNumber,\n nextAttempt: scheduleMetadata.nextAttemptNumber,\n maxRetries: scheduleMetadata.maxRetries,\n totalPolicies: scheduleMetadata.totalPolicies,\n policyIndex: scheduleMetadata.policyIndex,\n error,\n };\n\n await this.db.appendEventWithClient(client, \"step_events\", retryingEvent);\n\n // Event 3: StepScheduled (for retry)\n const scheduledEvent: StepScheduledEvent = {\n category: \"step\",\n type: \"StepScheduled\",\n eventId: this.generateEventId(scheduledTimestamp),\n timestampUs: scheduledTimestamp,\n workflowSlug,\n runId,\n stepId,\n availableAtUs: scheduleMetadata.availableAt,\n reason: \"retry\",\n attemptNumber: scheduleMetadata.nextAttemptNumber,\n retryDelayMs: scheduleMetadata.retryDelayMs,\n policyIndex: scheduleMetadata.policyIndex,\n attemptInPolicy: scheduleMetadata.attemptInPolicy,\n };\n\n await this.db.appendEventWithClient(client, \"step_events\", scheduledEvent);\n\n await client.query(\"COMMIT\");\n } catch (error) {\n await client.query(\"ROLLBACK\");\n throw error;\n } finally {\n client.release();\n }\n }\n\n async saveStepSkipped(\n workflowSlug: string,\n runId: string,\n stepId: string,\n metadata: {\n skipType: \"primary\" | \"cascade\";\n reason: string;\n metadata?: Record<string, any>;\n duration: number;\n attemptNumber: number;\n cascadedFrom?: string;\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepSkippedEvent = {\n category: \"step\",\n type: \"StepSkipped\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n skipType: metadata.skipType,\n reason: metadata.reason,\n metadata: metadata.metadata,\n durationUs: metadata.duration,\n attemptNumber: metadata.attemptNumber,\n cascadedFrom: metadata.cascadedFrom,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepCheckpoint(\n workflowSlug: string,\n runId: string,\n stepId: string,\n checkpoint: {\n name: string;\n sequenceNumber: number;\n attemptNumber: number;\n data: string;\n label?: string;\n parentCheckpoint?: { name: string; sequenceNumber: number };\n }\n ): Promise<void> {\n const now = getMicrosecondTimestamp();\n\n const event = {\n category: \"step\" as const,\n type: \"StepCheckpoint\" as const,\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n name: checkpoint.name,\n sequenceNumber: checkpoint.sequenceNumber,\n attemptNumber: checkpoint.attemptNumber,\n data: checkpoint.data,\n ...(checkpoint.label && { label: checkpoint.label }),\n ...(checkpoint.parentCheckpoint && {\n parentCheckpoint: checkpoint.parentCheckpoint,\n }),\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepCheckpointFailed(\n workflowSlug: string,\n runId: string,\n stepId: string,\n checkpoint: {\n name: string;\n sequenceNumber: number;\n attemptNumber: number;\n error: StepError;\n }\n ): Promise<void> {\n const now = getMicrosecondTimestamp();\n\n const event = {\n category: \"step\" as const,\n type: \"StepCheckpointFailed\" as const,\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n name: checkpoint.name,\n sequenceNumber: checkpoint.sequenceNumber,\n attemptNumber: checkpoint.attemptNumber,\n error: checkpoint.error,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepHeartbeat(\n workflowSlug: string,\n runId: string,\n stepId: string,\n workerId: string,\n attemptNumber: number\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepHeartbeatEvent = {\n category: \"step\",\n type: \"StepHeartbeat\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n workerId,\n attemptNumber,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepReclaimed(\n workflowSlug: string,\n runId: string,\n stepId: string,\n metadata: {\n originalWorkerId: string;\n reclaimedBy: string;\n lastHeartbeat: number;\n staleThreshold: number;\n staleDuration: number;\n attemptNumber: number;\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepReclaimedEvent = {\n category: \"step\",\n type: \"StepReclaimed\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n originalWorkerId: metadata.originalWorkerId,\n reclaimedBy: metadata.reclaimedBy,\n lastHeartbeatUs: metadata.lastHeartbeat,\n staleThresholdUs: metadata.staleThreshold,\n staleDurationUs: metadata.staleDuration,\n attemptNumber: metadata.attemptNumber,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepLogs(\n workflowSlug: string,\n runId: string,\n stepId: string,\n logs: LogEntry[]\n ): Promise<void> {\n\n // Logs are now saved as LogEntry events during saveStepComplete\n // This method is kept for backward compatibility but is a no-op\n // The logs are already appended as events in saveStepComplete\n }\n\n async loadStepLogs(\n workflowSlug: string,\n runId: string,\n stepId: string,\n attemptNumber?: number\n ): Promise<LogEntry[] | null> {\n\n // Load logs from events\n const events = await this.loadEvents(workflowSlug, runId, { category: \"step\", stepId });\n let logs = extractLogsFromEvents(events);\n\n // Filter by attempt number if specified\n if (attemptNumber !== undefined) {\n logs = logs.filter(log => log.attemptNumber === attemptNumber);\n }\n\n return logs.length > 0 ? logs : null;\n }\n\n // ============================================================================\n // Workflow Event Methods\n // ============================================================================\n\n async saveWorkflowStart(\n workflowSlug: string,\n runId: string,\n metadata: { versionId: string; workflowAttemptNumber: number; hasInputSchema: boolean; hasInput: boolean }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowStartedEvent = {\n category: \"workflow\",\n type: \"WorkflowStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n versionId: metadata.versionId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n hasInputSchema: metadata.hasInputSchema,\n hasInput: metadata.hasInput,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowInputValidation(\n workflowSlug: string,\n runId: string,\n result: {\n workflowAttemptNumber: number;\n hasSchema: boolean;\n success: boolean;\n error?: StepError;\n validationErrors?: Array<{ path: string; message: string }>;\n }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowInputValidationEvent = {\n category: \"workflow\",\n type: \"WorkflowInputValidation\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n ...result,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowComplete(\n workflowSlug: string,\n runId: string,\n output: unknown,\n metadata: { workflowAttemptNumber: number; timestamp: number; duration: number; totalSteps: number }\n ): Promise<void> {\n\n // Use high-resolution timestamp\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowCompletedEvent = {\n category: \"workflow\",\n type: \"WorkflowCompleted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n output: JSON.stringify(output),\n durationUs: metadata.duration,\n totalSteps: metadata.totalSteps,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowFailed(\n workflowSlug: string,\n runId: string,\n error: StepError,\n metadata: { workflowAttemptNumber: number; duration: number; completedSteps: number; failedStep?: string },\n failureReason: \"step-failed\" | \"worker-crash\" | \"timeout\" | \"cancelled\"\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowFailedEvent = {\n category: \"workflow\",\n type: \"WorkflowFailed\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n error,\n durationUs: metadata.duration,\n completedSteps: metadata.completedSteps,\n failedStep: metadata.failedStep,\n failureReason,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowResumed(\n workflowSlug: string,\n runId: string,\n metadata: { versionId: string; originalRunId: string; resumedSteps: number; pendingSteps: number }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowResumedEvent = {\n category: \"workflow\",\n type: \"WorkflowResumed\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n versionId: metadata.versionId,\n originalRunId: metadata.originalRunId,\n resumedSteps: metadata.resumedSteps,\n pendingSteps: metadata.pendingSteps,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowCancelled(\n workflowSlug: string,\n runId: string,\n metadata: { workflowAttemptNumber: number; reason?: string; duration: number; completedSteps: number }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowCancelledEvent = {\n category: \"workflow\",\n type: \"WorkflowCancelled\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n reason: metadata.reason,\n durationUs: metadata.duration,\n completedSteps: metadata.completedSteps,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowRetryStarted(\n workflowSlug: string,\n runId: string,\n metadata: {\n workflowAttemptNumber: number;\n previousAttemptNumber: number;\n retriedSteps: string[];\n reason?: string;\n }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowRetryStartedEvent = {\n category: \"workflow\",\n type: \"WorkflowRetryStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n previousAttemptNumber: metadata.previousAttemptNumber,\n retriedSteps: metadata.retriedSteps,\n reason: metadata.reason,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveRunSubmitted(\n workflowSlug: string,\n runId: string,\n metadata: {\n versionId: string;\n availableAt: number;\n priority: number;\n input?: string;\n hasInputSchema: boolean;\n timeout?: number;\n idempotencyKey?: string;\n metadata?: Record<string, unknown>;\n tags?: string[];\n }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: RunSubmittedEvent = {\n category: \"workflow\",\n type: \"RunSubmitted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n versionId: metadata.versionId,\n availableAtUs: metadata.availableAt,\n priority: metadata.priority,\n input: metadata.input,\n hasInputSchema: metadata.hasInputSchema,\n timeoutUs: metadata.timeout,\n idempotencyKey: metadata.idempotencyKey,\n metadata: metadata.metadata,\n tags: metadata.tags,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n // ============================================================================\n // Queue Management\n // ============================================================================\n\n async submitRun(submission: RunSubmission): Promise<{ runId: string; isNew: boolean }> {\n\n // Check idempotency first\n if (submission.idempotencyKey) {\n const hash = this.hashIdempotencyKey(submission.idempotencyKey);\n const proposedRunId = submission.runId || this.generateRunId();\n const existingRunId = await this.db.saveIdempotencyKey(hash, proposedRunId);\n\n // If key already exists, return existing run ID\n if (existingRunId !== proposedRunId) {\n return { runId: existingRunId, isNew: false };\n }\n }\n\n // Generate runId if not provided\n const runId = submission.runId || this.generateRunId();\n const now = getMicrosecondTimestamp();\n const availableAt = submission.availableAt || now;\n const priority = submission.priority || 0;\n\n // Initialize run directory\n await this.initializeRun(submission.workflowSlug, runId);\n\n // Get workflow metadata to determine if input schema exists\n const workflowMetadata = await this.getWorkflowMetadata(submission.workflowSlug);\n const hasInputSchema = !!(workflowMetadata?.inputSchemaJSON);\n\n // Get current workflow version (auto-captured at submission time)\n const currentVersion = await this.getCurrentWorkflowVersion(submission.workflowSlug);\n if (!currentVersion) {\n throw new Error(`Workflow ${submission.workflowSlug} not registered. Please ensure the worker has started and registered workflows.`);\n }\n\n // Emit RunSubmitted event\n await this.saveRunSubmitted(submission.workflowSlug, runId, {\n versionId: currentVersion.versionId,\n availableAt,\n priority,\n input: submission.input !== undefined ? JSON.stringify(submission.input) : undefined,\n hasInputSchema,\n timeout: submission.timeout,\n idempotencyKey: submission.idempotencyKey,\n metadata: submission.metadata,\n tags: submission.tags,\n });\n\n return { runId, isNew: true };\n }\n\n async listRuns(options?: {\n workflowSlug?: string;\n status?: RunState[\"status\"][];\n tags?: string[];\n limit?: number;\n }): Promise<RunState[]> {\n // Step 1: Get filtered run identifiers with SQL (single query)\n // Note: We fetch more than limit to account for tag filtering done in JS\n const filteredRuns = await this.db.listRunsFiltered({\n workflowSlug: options?.workflowSlug,\n status: options?.status,\n // Fetch extra if tag filtering needed (tags are filtered in JS)\n limit: options?.tags?.length ? undefined : options?.limit,\n });\n\n // Apply tag filter if specified (AND logic - must have all)\n let runsToLoad = filteredRuns;\n if (options?.tags && options.tags.length > 0) {\n runsToLoad = filteredRuns.filter((run) => {\n const runTags = run.tags || [];\n return options.tags!.every((tag) => runTags.includes(tag));\n });\n // Apply limit after tag filtering\n if (options?.limit) {\n runsToLoad = runsToLoad.slice(0, options.limit);\n }\n }\n\n if (runsToLoad.length === 0) {\n return [];\n }\n\n // Step 2: Load full workflow events for matched runs (single query)\n const eventsByRun = await this.db.loadWorkflowEventsForRuns(\n runsToLoad.map((r) => ({ workflowSlug: r.workflowSlug, runId: r.runId }))\n );\n\n // Step 3: Project each run's events to full RunState\n const allRuns: RunState[] = [];\n for (const run of runsToLoad) {\n const key = `${run.workflowSlug}:${run.runId}`;\n const events = eventsByRun.get(key);\n if (!events || events.length === 0) continue;\n\n try {\n const state = projectRunStateFromEvents(events, run.workflowSlug);\n allRuns.push(state);\n } catch {\n // Skip runs that fail to project (corrupted/incomplete)\n continue;\n }\n }\n\n // Results are already sorted by createdAt DESC from SQL\n return allRuns;\n }\n\n async cancelRun(runId: string, reason?: string): Promise<void> {\n\n // Find the workflow slug for this run\n const allWorkflows = await this.db.listActiveWorkflows();\n\n for (const workflowSlug of allWorkflows) {\n const runIds = await this.db.listRunIds(workflowSlug);\n if (runIds.includes(runId)) {\n // Load events and get created time\n const events = await this.loadEvents(workflowSlug, runId, { category: \"workflow\" });\n if (events.length === 0) continue;\n\n const state = projectRunStateFromEvents(events, workflowSlug);\n\n // Calculate duration\n const duration = getMicrosecondTimestamp() - state.createdAt;\n\n // Count completed steps\n const stepRecords = await this.loadRun(workflowSlug, runId);\n const completedSteps = stepRecords.filter((r) => r.status === \"completed\").length;\n\n // Emit WorkflowCancelled event\n await this.saveWorkflowCancelled(workflowSlug, runId, {\n workflowAttemptNumber: state.workflowAttemptNumber || 1,\n reason,\n duration,\n completedSteps,\n });\n\n return;\n }\n }\n\n throw new Error(`Run ${runId} not found`);\n }\n\n async getRun(runId: string): Promise<RunState | null> {\n\n // Find the workflow slug for this run\n const allWorkflows = await this.db.listActiveWorkflows();\n\n for (const workflowSlug of allWorkflows) {\n const runIds = await this.db.listRunIds(workflowSlug);\n if (runIds.includes(runId)) {\n const workflowEvents = await this.loadEvents(workflowSlug, runId, { category: \"workflow\" });\n return projectRunStateFromEvents(workflowEvents, workflowSlug);\n }\n }\n\n return null;\n }\n\n async getFailedSteps(\n workflowSlug: string,\n runId: string\n ): Promise<\n Array<{\n stepId: string;\n error: StepError;\n attemptNumber: number;\n }>\n > {\n\n // Load all step events for this run\n const events = await this.loadEvents(workflowSlug, runId, { category: \"step\" });\n\n if (events.length === 0 || !(\"category\" in events[0]!)) {\n return [];\n }\n\n // Group events by step ID\n const eventsByStep = new Map<string, StepEvent[]>();\n for (const event of events) {\n if (event.category === \"step\") {\n const stepEvents = eventsByStep.get(event.stepId) || [];\n stepEvents.push(event as StepEvent);\n eventsByStep.set(event.stepId, stepEvents);\n }\n }\n\n // Project each step's state and filter for failed steps\n const failedSteps: Array<{\n stepId: string;\n error: StepError;\n attemptNumber: number;\n }> = [];\n\n for (const [stepId, stepEvents] of eventsByStep) {\n const state = projectStepState(stepEvents, workflowSlug);\n if (state.status === \"failed\" && state.terminal && state.error) {\n failedSteps.push({\n stepId,\n error: state.error,\n attemptNumber: state.attemptNumber,\n });\n }\n }\n\n return failedSteps;\n }\n\n // ============================================================================\n // Step-Level Distribution\n // ============================================================================\n\n async listActiveWorkflows(): Promise<string[]> {\n return this.db.listActiveWorkflows();\n }\n\n async listScheduledSteps(options?: {\n availableBefore?: number;\n workflowSlug?: string;\n limit?: number;\n }): Promise<Array<{ workflowSlug: string; runId: string; stepId: string }>> {\n\n // Convert single workflowSlug to array for db layer\n const dbOptions = {\n workflowSlugs: options?.workflowSlug ? [options.workflowSlug] : undefined,\n limit: options?.limit,\n };\n\n // Note: availableBefore filtering would need to be implemented in db layer\n // For now, we just pass through the other options\n return this.db.listScheduledSteps(dbOptions);\n }\n\n async isStepClaimable(workflowSlug: string, runId: string, stepId: string): Promise<boolean> {\n\n const events = await this.db.loadEvents(\"step_events\", {\n workflowSlug,\n runId,\n stepId,\n });\n\n if (events.length === 0) {\n return false;\n }\n\n // Get the latest event\n const latestEvent = events[events.length - 1];\n\n // Step is claimable if latest event is StepScheduled, StepReclaimed, or StepRetrying\n return !!(\n latestEvent &&\n (latestEvent.type === \"StepScheduled\" ||\n latestEvent.type === \"StepReclaimed\" ||\n latestEvent.type === \"StepRetrying\")\n );\n }\n\n async claimScheduledStep(\n workflowSlug: string,\n runId: string,\n stepId: string,\n workerId: string,\n metadata: StepStartMetadata\n ): Promise<{ attemptNumber: number } | null> {\n\n const initialEvents = await this.loadEvents(workflowSlug, runId, { category: \"step\", stepId });\n\n if (initialEvents.length === 0) {\n return null;\n }\n\n const now = getMicrosecondTimestamp();\n const initialState = projectStepState(initialEvents as StepEvent[], workflowSlug);\n\n if (\n initialState.status !== \"scheduled\" ||\n initialState.availableAt === undefined ||\n initialState.availableAt > now\n ) {\n return null;\n }\n\n const attemptNumber = initialState.attemptNumber;\n const timestamp = getMicrosecondTimestamp();\n\n const event: StepStartedEvent = {\n category: \"step\",\n type: \"StepStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n stepId,\n workerId,\n dependencies: metadata.dependencies,\n attemptNumber,\n };\n\n // Attempt atomic claim via db layer\n const claimed = await this.db.claimScheduledStep(workflowSlug, runId, stepId, workerId, event);\n\n return claimed ? { attemptNumber } : null;\n }\n\n async reclaimStaleSteps(\n staleThreshold: number,\n reclaimedBy: string\n ): Promise<Array<{ workflowSlug: string; runId: string; stepId: string }>> {\n\n const reclaimed: Array<{ workflowSlug: string; runId: string; stepId: string }> = [];\n const now = getMicrosecondTimestamp();\n\n const staleSteps = await this.db.findStaleSteps(staleThreshold);\n\n for (const step of staleSteps) {\n const events = await this.loadEvents(step.workflowSlug, step.runId, { category: \"step\", stepId: step.stepId });\n\n if (events.length === 0) continue;\n\n const state = projectStepState(events as StepEvent[], step.workflowSlug);\n\n // Only reclaim running steps\n if (state.status !== \"running\") continue;\n\n // Check if heartbeat is stale\n const lastHeartbeat = state.lastHeartbeat || state.startTime || 0;\n const staleDuration = now - lastHeartbeat;\n\n if (staleDuration > staleThreshold) {\n // Emit StepReclaimed event\n await this.saveStepReclaimed(step.workflowSlug, step.runId, step.stepId, {\n originalWorkerId: state.claimedBy || \"unknown\",\n reclaimedBy,\n lastHeartbeat,\n staleThreshold,\n staleDuration,\n attemptNumber: state.attemptNumber,\n });\n\n // Re-schedule the step with incremented attempt number\n await this.saveStepScheduled(step.workflowSlug, step.runId, step.stepId, {\n availableAt: now, // Immediately available\n reason: \"retry\",\n attemptNumber: state.attemptNumber + 1, // Increment attempt - reclamation is a new attempt\n retryDelayMs: 0,\n });\n\n reclaimed.push({ workflowSlug: step.workflowSlug, runId: step.runId, stepId: step.stepId });\n }\n }\n\n return reclaimed;\n }\n\n // ============================================================================\n // Workflow Registration\n // ============================================================================\n\n async registerWorkflow(registration: WorkflowRegistration): Promise<void> {\n\n // Save metadata\n await this.db.upsertWorkflowMetadata(\n registration.slug,\n registration.name,\n registration.location,\n registration.inputSchemaJSON\n );\n\n // Save step definitions\n for (const step of registration.steps) {\n await this.db.upsertStepDefinition(registration.slug, step);\n }\n }\n\n async getWorkflowMetadata(slug: string): Promise<WorkflowMetadata | null> {\n return this.db.getWorkflowMetadata(slug);\n }\n\n async listWorkflowMetadata(): Promise<WorkflowMetadata[]> {\n return this.db.listWorkflowMetadata();\n }\n\n async getWorkflowSteps(slug: string): Promise<StepDefinition[]> {\n return this.db.getWorkflowSteps(slug);\n }\n\n async listRunIds(workflowSlug: string): Promise<string[]> {\n return this.db.listRunIds(workflowSlug);\n }\n\n // ============================================================================\n // Workflow Version Management Methods\n // ============================================================================\n\n async createWorkflowVersion(version: Omit<WorkflowVersion, 'versionNumber'>): Promise<void> {\n await this.db.getPool().query(`\n INSERT INTO ${this.db.getSchema()}.workflow_versions\n (workflow_slug, version_id, created_at, step_manifest, total_steps, git_commit, git_dirty, git_branch)\n VALUES ($1, $2, to_timestamp($3 / 1000000.0), $4, $5, $6, $7, $8)\n ON CONFLICT (workflow_slug, version_id) DO NOTHING\n `, [\n version.workflowSlug,\n version.versionId,\n version.createdAt,\n version.stepManifest,\n version.totalSteps,\n version.git?.commit,\n version.git?.dirty,\n version.git?.branch,\n ]);\n }\n\n async getWorkflowVersion(workflowSlug: string, versionId: string): Promise<WorkflowVersion | null> {\n const row = await this.db.getWorkflowVersion(workflowSlug, versionId);\n\n if (!row) return null;\n\n return {\n workflowSlug: row.workflow_slug,\n versionId: row.version_id,\n versionNumber: parseInt(row.version_number.toString(), 10),\n createdAt: Math.floor(new Date(row.created_at).getTime() * 1000), // Convert to μs\n stepManifest: row.step_manifest,\n totalSteps: row.total_steps,\n git: row.git_commit && row.git_dirty !== null && row.git_branch !== null ? {\n commit: row.git_commit,\n dirty: row.git_dirty,\n branch: row.git_branch,\n } : undefined,\n };\n }\n\n async getCurrentWorkflowVersion(workflowSlug: string): Promise<WorkflowVersion | null> {\n const row = await this.db.getCurrentWorkflowVersion(workflowSlug);\n\n if (!row) return null;\n\n return {\n workflowSlug: row.workflow_slug,\n versionId: row.version_id,\n versionNumber: parseInt(row.version_number.toString(), 10),\n createdAt: Math.floor(new Date(row.created_at).getTime() * 1000),\n stepManifest: row.step_manifest,\n totalSteps: row.total_steps,\n git: row.git_commit && row.git_dirty !== null && row.git_branch !== null ? {\n commit: row.git_commit,\n dirty: row.git_dirty,\n branch: row.git_branch,\n } : undefined,\n };\n }\n\n async listWorkflowVersions(workflowSlug: string, options?: { limit?: number }): Promise<WorkflowVersion[]> {\n const limit = options?.limit ?? 100;\n const rows = await this.db.listWorkflowVersions(workflowSlug, limit);\n\n return rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n versionId: row.version_id,\n versionNumber: parseInt(row.version_number.toString(), 10),\n createdAt: Math.floor(new Date(row.created_at).getTime() * 1000),\n stepManifest: row.step_manifest,\n totalSteps: row.total_steps,\n git: row.git_commit && row.git_dirty !== null && row.git_branch !== null ? {\n commit: row.git_commit,\n dirty: row.git_dirty,\n branch: row.git_branch,\n } : undefined,\n }));\n }\n\n /**\n * Close the database connection pool\n * Call this when shutting down the application\n */\n async close(): Promise<void> {\n await this.db.getPool().end();\n }\n\n // ============================================================================\n // Analytics Methods\n // ============================================================================\n\n /**\n * Helper to load all events within a time range and optional filters\n * Uses raw SQL queries for better performance with large datasets\n */\n private async loadEventsForAnalytics(\n options?: AnalyticsOptions\n ): Promise<{ stepEvents: StepEvent[]; workflowEvents: WorkflowEvent[] }> {\n const now = getMicrosecondTimestamp();\n const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000; // Default: 24 hours ago\n const endUs = options?.endUs ?? now;\n\n // Build step events query\n let stepQuery = `\n SELECT event_data, event_id\n FROM ${this.db.getSchema()}.step_events\n WHERE timestamp_us >= $1 AND timestamp_us <= $2\n `;\n const stepParams: any[] = [startUs, endUs];\n let paramIndex = 3;\n\n if (options?.workflowSlug) {\n stepQuery += ` AND workflow_slug = $${paramIndex}`;\n stepParams.push(options.workflowSlug);\n paramIndex++;\n }\n\n if (options?.stepId) {\n stepQuery += ` AND step_id = $${paramIndex}`;\n stepParams.push(options.stepId);\n paramIndex++;\n }\n\n if (options?.runIds && options.runIds.length > 0) {\n const runIdsPlaceholder = `$${paramIndex}`;\n stepQuery += ` AND run_id = ANY(${runIdsPlaceholder})`;\n stepParams.push(options.runIds);\n paramIndex++;\n }\n\n stepQuery += ` ORDER BY timestamp_us ASC`;\n\n // Build workflow events query\n let workflowQuery = `\n SELECT event_data, event_id\n FROM ${this.db.getSchema()}.workflow_events\n WHERE timestamp_us >= $1 AND timestamp_us <= $2\n `;\n const workflowParams: any[] = [startUs, endUs];\n paramIndex = 3;\n\n if (options?.workflowSlug) {\n workflowQuery += ` AND workflow_slug = $${paramIndex}`;\n workflowParams.push(options.workflowSlug);\n paramIndex++;\n }\n\n if (options?.runIds && options.runIds.length > 0) {\n const runIdsPlaceholder = `$${paramIndex}`;\n workflowQuery += ` AND run_id = ANY(${runIdsPlaceholder})`;\n workflowParams.push(options.runIds);\n paramIndex++;\n }\n\n workflowQuery += ` ORDER BY timestamp_us ASC`;\n\n // Execute queries in parallel\n const [stepResult, workflowResult] = await Promise.all([\n this.db.getPool().query(stepQuery, stepParams),\n options?.stepId\n ? Promise.resolve({ rows: [] }) // Skip workflow events if filtering by stepId\n : this.db.getPool().query(workflowQuery, workflowParams),\n ]);\n\n // Inject event_id from column into event object (single source of truth)\n const stepEvents = stepResult.rows.map((row) => ({ ...row.event_data, eventId: row.event_id }) satisfies StepEvent);\n const workflowEvents = workflowResult.rows.map(\n (row) => ({ ...row.event_data, eventId: row.event_id }) satisfies WorkflowEvent\n );\n\n return { stepEvents, workflowEvents };\n }\n\n async getErrorAnalysis(options?: AnalyticsOptions): Promise<ErrorAnalysis> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeErrorAnalysis(\n stepEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getErrorsList(options?: {\n timeRange?: { start: number; end: number };\n workflowSlug?: string;\n groupingStrategy?: 'exact' | 'normalized' | 'portable';\n limit?: number;\n offset?: number;\n }): Promise<{\n errors: Array<{\n fingerprint: string;\n errorMessage: string;\n errorName: string;\n sampleStack: string;\n count: number;\n affectedRuns: number;\n firstSeen: number;\n lastSeen: number;\n }>;\n total: number;\n }> {\n const strategy = options?.groupingStrategy || 'exact';\n const limit = options?.limit || 50;\n const offset = options?.offset || 0;\n\n // Choose which stack hash column to use based on strategy\n const stackHashColumn =\n strategy === 'exact'\n ? 'error_stack_exact_hash'\n : strategy === 'normalized'\n ? 'error_stack_normalized_hash'\n : 'error_stack_portable_hash';\n\n // Build WHERE clause\n const conditions: string[] = [\"type = 'StepFailed'\"];\n const params: any[] = [];\n let paramIndex = 1;\n\n if (options?.timeRange) {\n conditions.push(`timestamp_us >= $${paramIndex}`);\n params.push(options.timeRange.start);\n paramIndex++;\n\n conditions.push(`timestamp_us <= $${paramIndex}`);\n params.push(options.timeRange.end);\n paramIndex++;\n }\n\n if (options?.workflowSlug) {\n conditions.push(`workflow_slug = $${paramIndex}`);\n params.push(options?.workflowSlug);\n paramIndex++;\n }\n\n const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n\n // Get total count (without pagination)\n const countQuery = `\n SELECT COUNT(DISTINCT CONCAT(error_name_hash, ':', error_message_hash, ':', ${stackHashColumn})) as total\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n `;\n\n const countResult = await this.db.getPool().query(countQuery, params);\n const total = parseInt(countResult.rows[0]?.total || '0', 10);\n\n // Get paginated errors with aggregations\n const query = `\n SELECT\n CONCAT(error_name_hash, ':', error_message_hash, ':', ${stackHashColumn}) as fingerprint,\n MIN((event_data->'error'->>'message')) as error_message,\n MIN((event_data->'error'->>'name')) as error_name,\n MIN((event_data->'error'->>'stack')) as sample_stack,\n COUNT(*) as count,\n COUNT(DISTINCT run_id) as affected_runs,\n MIN(timestamp_us) as first_seen,\n MAX(timestamp_us) as last_seen\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n GROUP BY\n error_name_hash,\n error_message_hash,\n ${stackHashColumn}\n ORDER BY count DESC\n LIMIT $${paramIndex} OFFSET $${paramIndex + 1}\n `;\n\n const result = await this.db.getPool().query(query, [...params, limit, offset]);\n\n const errors = result.rows.map((row) => ({\n fingerprint: row.fingerprint,\n errorMessage: row.error_message || '',\n errorName: row.error_name || 'Error',\n sampleStack: row.sample_stack || '',\n count: parseInt(row.count, 10),\n affectedRuns: parseInt(row.affected_runs, 10),\n firstSeen: parseInt(row.first_seen, 10),\n lastSeen: parseInt(row.last_seen, 10),\n }));\n\n return { errors, total };\n }\n\n async getErrorDetail(\n fingerprint: string,\n groupingStrategy: 'exact' | 'normalized' | 'portable',\n options?: {\n timeRange?: { start: number; end: number };\n limit?: number;\n offset?: number;\n }\n ): Promise<{\n fingerprint: string;\n errorMessage: string;\n errorName: string;\n sampleStack: string;\n totalCount: number;\n affectedRuns: number;\n firstSeen: number;\n lastSeen: number;\n occurrences: Array<{\n workflowSlug: string;\n runId: string;\n stepId: string;\n attemptNumber: number;\n timestampUs: number;\n }>;\n total: number;\n }> {\n // Parse fingerprint into components\n const parts = fingerprint.split(':');\n if (parts.length !== 3) {\n throw new Error(`Invalid fingerprint format: ${fingerprint}`);\n }\n\n const [nameHash, messageHash, stackHash] = parts;\n\n // Choose which stack hash column to use\n const stackHashColumn =\n groupingStrategy === 'exact'\n ? 'error_stack_exact_hash'\n : groupingStrategy === 'normalized'\n ? 'error_stack_normalized_hash'\n : 'error_stack_portable_hash';\n\n const limit = options?.limit || 100;\n const offset = options?.offset || 0;\n\n // Build WHERE clause\n const conditions: string[] = [\n \"type = 'StepFailed'\",\n `error_name_hash = $1`,\n `error_message_hash = $2`,\n `${stackHashColumn} = $3`,\n ];\n const params: any[] = [nameHash, messageHash, stackHash];\n let paramIndex = 4;\n\n if (options?.timeRange) {\n conditions.push(`timestamp_us >= $${paramIndex}`);\n params.push(options.timeRange.start);\n paramIndex++;\n\n conditions.push(`timestamp_us <= $${paramIndex}`);\n params.push(options.timeRange.end);\n paramIndex++;\n }\n\n const whereClause = `WHERE ${conditions.join(' AND ')}`;\n\n // Get aggregated stats and sample error\n const statsQuery = `\n SELECT\n MIN((event_data->'error'->>'message')) as error_message,\n MIN((event_data->'error'->>'name')) as error_name,\n MIN((event_data->'error'->>'stack')) as sample_stack,\n COUNT(*) as total_count,\n COUNT(DISTINCT run_id) as affected_runs,\n MIN(timestamp_us) as first_seen,\n MAX(timestamp_us) as last_seen\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n `;\n\n const statsResult = await this.db.getPool().query(statsQuery, params.slice(0, paramIndex - 1));\n\n if (statsResult.rows.length === 0) {\n // No matching errors found\n return {\n fingerprint,\n errorMessage: '',\n errorName: '',\n sampleStack: '',\n totalCount: 0,\n affectedRuns: 0,\n firstSeen: 0,\n lastSeen: 0,\n occurrences: [],\n total: 0,\n };\n }\n\n const stats = statsResult.rows[0];\n\n // Get paginated occurrences\n const occurrencesQuery = `\n SELECT\n workflow_slug,\n run_id,\n step_id,\n (event_data->>'attemptNumber')::int as attempt_number,\n timestamp_us\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n ORDER BY timestamp_us DESC\n LIMIT $${paramIndex} OFFSET $${paramIndex + 1}\n `;\n\n const occurrencesResult = await this.db.getPool().query(occurrencesQuery, [\n ...params.slice(0, paramIndex - 1),\n limit,\n offset,\n ]);\n\n const occurrences = occurrencesResult.rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n runId: row.run_id,\n stepId: row.step_id,\n attemptNumber: row.attempt_number,\n timestampUs: parseInt(row.timestamp_us, 10),\n }));\n\n return {\n fingerprint,\n errorMessage: stats.error_message || '',\n errorName: stats.error_name || 'Error',\n sampleStack: stats.sample_stack || '',\n totalCount: parseInt(stats.total_count, 10),\n affectedRuns: parseInt(stats.affected_runs, 10),\n firstSeen: parseInt(stats.first_seen, 10),\n lastSeen: parseInt(stats.last_seen, 10),\n occurrences,\n total: parseInt(stats.total_count, 10),\n };\n }\n\n async getRetryAnalysis(options?: AnalyticsOptions): Promise<RetryAnalysis> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeRetryAnalysis(stepEvents);\n }\n\n async getSchedulingLatency(\n options?: AnalyticsOptions\n ): Promise<SchedulingLatency> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeSchedulingLatency(\n stepEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getStepDuration(options?: AnalyticsOptions): Promise<StepDuration> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeStepDuration(\n stepEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getWorkflowDuration(\n options?: AnalyticsOptions\n ): Promise<WorkflowDuration> {\n const { workflowEvents } = await this.loadEventsForAnalytics(options);\n return computeWorkflowDuration(workflowEvents, options?.workflowSlug);\n }\n\n async getWorkerStability(options?: AnalyticsOptions): Promise<WorkerStability> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeWorkerStability(stepEvents);\n }\n\n async getThroughput(options?: AnalyticsOptions): Promise<Throughput> {\n const { stepEvents, workflowEvents } = await this.loadEventsForAnalytics(options);\n\n const now = getMicrosecondTimestamp();\n const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000;\n const endUs = options?.endUs ?? now;\n const timeRangeUs = endUs - startUs;\n\n return computeThroughput(\n stepEvents,\n workflowEvents,\n timeRangeUs,\n options?.workflowSlug\n );\n }\n\n async getQueueDepth(\n options?: Pick<AnalyticsOptions, \"workflowSlug\">\n ): Promise<QueueDepth> {\n const result = await this.db.getQueueDepthAggregation(options?.workflowSlug);\n\n return {\n workflowSlug: options?.workflowSlug,\n pendingRuns: result.pendingRuns,\n runningRuns: result.runningRuns,\n scheduledSteps: result.scheduledSteps,\n runningSteps: result.runningSteps,\n oldestScheduledStepUs: result.oldestScheduledStepUs ?? undefined,\n oldestPendingRunUs: result.oldestPendingRunUs ?? undefined,\n };\n }\n\n async getQueueDepthByWorkflow(): Promise<QueueDepthByWorkflow> {\n const [aggregation, allMetadata] = await Promise.all([\n this.db.getQueueDepthByWorkflowAggregation(),\n this.listWorkflowMetadata(),\n ]);\n\n const metadataMap = new Map(allMetadata.map((m) => [m.slug, m]));\n\n return aggregation\n .map((item) => ({\n workflowSlug: item.workflowSlug,\n workflowName: metadataMap.get(item.workflowSlug)?.name,\n pendingRuns: item.pendingRuns,\n scheduledSteps: item.scheduledSteps,\n oldestPendingItemUs: item.oldestPendingItemUs ?? undefined,\n }))\n .sort((a, b) => {\n const aTotal = a.pendingRuns + a.scheduledSteps;\n const bTotal = b.pendingRuns + b.scheduledSteps;\n return bTotal - aTotal;\n });\n }\n\n async getSuccessRate(options?: AnalyticsOptions): Promise<SuccessRate> {\n const { stepEvents, workflowEvents } = await this.loadEventsForAnalytics(options);\n return computeSuccessRate(\n stepEvents,\n workflowEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getAnalyticsSummary(\n options?: AnalyticsOptions\n ): Promise<AnalyticsSummary> {\n const now = getMicrosecondTimestamp();\n const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000;\n const endUs = options?.endUs ?? now;\n\n // Run all analytics in parallel\n const [\n errorAnalysis,\n retryAnalysis,\n schedulingLatency,\n stepDuration,\n workflowDuration,\n workerStability,\n throughput,\n queueDepth,\n successRate,\n ] = await Promise.all([\n this.getErrorAnalysis(options),\n this.getRetryAnalysis(options),\n this.getSchedulingLatency(options),\n this.getStepDuration(options),\n this.getWorkflowDuration(options),\n this.getWorkerStability(options),\n this.getThroughput(options),\n this.getQueueDepth(options),\n this.getSuccessRate(options),\n ]);\n\n return {\n timeRange: {\n startUs,\n endUs,\n durationUs: endUs - startUs,\n },\n errorAnalysis,\n retryAnalysis,\n schedulingLatency,\n stepDuration,\n workflowDuration,\n workerStability,\n throughput,\n queueDepth,\n successRate,\n };\n }\n}\n",
45
+ "import { createHash } from \"node:crypto\";\nimport {\n Backend,\n type StepStartMetadata,\n type StepCompleteMetadata,\n type StepError,\n type StepRecord,\n type StepState,\n type LogEntry,\n type Event,\n type StepEvent,\n type WorkflowEvent,\n type StepStartedEvent,\n type StepCompletedEvent,\n type StepFailedEvent,\n type StepSkippedEvent,\n type StepScheduledEvent,\n type StepHeartbeatEvent,\n type StepReclaimedEvent,\n type LogEntryEvent,\n type StepRetryingEvent,\n type WorkflowStartedEvent,\n type WorkflowInputValidationEvent,\n type WorkflowCompletedEvent,\n type WorkflowFailedEvent,\n type WorkflowResumedEvent,\n type WorkflowCancelledEvent,\n type WorkflowRetryStartedEvent,\n type RunSubmittedEvent,\n type RunSubmission,\n type RunState,\n type WorkflowMetadata,\n type WorkflowRegistration,\n type WorkflowVersion,\n type StepDefinition,\n type AnalyticsOptions,\n type ErrorAnalysis,\n type RetryAnalysis,\n type SchedulingLatency,\n type StepDuration,\n type WorkflowDuration,\n type WorkerStability,\n type Throughput,\n type QueueDepth,\n type QueueDepthByWorkflow,\n type SuccessRate,\n type AnalyticsSummary,\n safeSerialize,\n eventSchema,\n projectStepRecord,\n projectStepState,\n projectRunStateFromEvents,\n extractLogsFromEvents,\n getCurrentAttemptNumber,\n getVersionIdFromEvents,\n getMicrosecondTimestamp,\n computeErrorAnalysis,\n computeRetryAnalysis,\n computeSchedulingLatency,\n computeStepDuration,\n computeWorkflowDuration,\n computeErrorFingerprints,\n computeWorkerStability,\n computeThroughput,\n computeSuccessRate,\n} from \"@cascade-flow/backend-interface\";\n\nimport { DatabaseClient, createPool } from \"./db.js\";\nimport { runMigrations } from \"./migrations.js\";\nimport type { Pool, PoolClient } from \"pg\";\n\n/**\n * PostgreSQL backend implementation\n * Stores workflow execution state in PostgreSQL using event sourcing\n *\n * Features:\n * - Event sourcing with immutable append-only events\n * - Automatic schema initialization via idempotent migrations\n * - Race-safe step claiming using SELECT FOR UPDATE SKIP LOCKED\n * - Events-as-queue pattern (no separate queue table)\n * - JSONB storage for flexible schema evolution\n */\nexport class PostgresBackend extends Backend {\n private db: DatabaseClient;\n private initialized: boolean = false;\n\n /**\n * Create a new Postgres backend\n *\n * @param connectionString - PostgreSQL connection string (e.g., postgres://user:pass@host/db)\n * @param schema - PostgreSQL schema name (default: 'cascadeflow'). Isolated from public schema to avoid conflicts.\n */\n constructor(connectionString: string, schema: string = 'cascadeflow') {\n super();\n const pool = createPool(connectionString);\n this.db = new DatabaseClient(pool, schema);\n }\n\n /**\n * Initialize the backend by running database migrations\n * This should be called by the worker on startup to ensure the database schema is ready.\n * Other components (submit CLI, UI) can skip this and assume the schema already exists.\n */\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n await runMigrations(this.db.getPool(), this.db.getSchema());\n this.initialized = true;\n }\n\n async backendReady(): Promise<boolean> {\n try {\n const client = await this.db.getPool().connect();\n try {\n await client.query('SELECT 1');\n return true;\n } finally {\n client.release();\n }\n } catch (error) {\n console.error('Backend health check failed:', error);\n return false;\n }\n }\n\n /**\n * Generate a unique run ID\n */\n private generateRunId(): string {\n return `run_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;\n }\n\n /**\n * Hash an idempotency key\n */\n private hashIdempotencyKey(key: string): string {\n return createHash(\"sha256\").update(key).digest(\"hex\");\n }\n\n /**\n * Generate a unique event ID using microsecond timestamp\n */\n private generateEventId(timestamp?: number): string {\n const ts = timestamp ?? getMicrosecondTimestamp();\n return `${ts}`;\n }\n\n // ============================================================================\n // Run Management\n // ============================================================================\n\n async initializeRun(workflowSlug: string, runId: string): Promise<void> {\n // No explicit initialization needed - events create the run implicitly\n }\n\n async runExists(workflowSlug: string, runId: string): Promise<boolean> {\n return this.db.runExists(workflowSlug, runId);\n }\n\n async loadRun(workflowSlug: string, runId: string): Promise<StepRecord[]> {\n const events = await this.db.loadAllRunEvents(workflowSlug, runId);\n\n // Group events by step\n const stepEvents = new Map<string, Event[]>();\n for (const event of events) {\n if (event.category === \"step\") {\n const stepEvent = event as StepEvent;\n if (!stepEvents.has(stepEvent.stepId)) {\n stepEvents.set(stepEvent.stepId, []);\n }\n stepEvents.get(stepEvent.stepId)!.push(event);\n }\n }\n\n // Project each step's events into a StepRecord\n const records: StepRecord[] = [];\n for (const [stepId, events] of stepEvents) {\n const record = projectStepRecord(events);\n records.push(record);\n }\n\n return records;\n }\n\n getStepOutputPath(workflowSlug: string, runId: string, stepId: string, attemptNumber: number): string {\n // Return a logical path (not used for actual storage in Postgres)\n return `postgres://${workflowSlug}/${runId}/${stepId}/attempt-${attemptNumber}`;\n }\n\n // ============================================================================\n // Event Sourcing\n // ============================================================================\n\n async appendEvent(workflowSlug: string, runId: string, event: Event): Promise<void> {\n\n // Ensure event has ID and timestamp\n if (!event.eventId) {\n (event as any).eventId = this.generateEventId();\n }\n if (!event.timestampUs) {\n (event as any).timestampUs = getMicrosecondTimestamp();\n }\n\n // Validate event\n eventSchema.parse(event);\n\n // Append to appropriate table\n const table = event.category === \"workflow\" ? \"workflow_events\" : \"step_events\";\n await this.db.appendEvent(table, event);\n }\n\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options: { category: \"step\"; stepId?: string }\n ): Promise<StepEvent[]>;\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options: { category: \"workflow\" }\n ): Promise<WorkflowEvent[]>;\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options?: { category?: \"workflow\" | \"step\"; stepId?: string }\n ): Promise<Event[]>;\n async loadEvents(\n workflowSlug: string,\n runId: string,\n options?: { category?: \"workflow\" | \"step\"; stepId?: string }\n ): Promise<Event[]> {\n\n if (options?.category === \"workflow\") {\n return this.db.loadEvents(\"workflow_events\", {\n workflowSlug,\n runId,\n });\n } else if (options?.category === \"step\") {\n return this.db.loadEvents(\"step_events\", {\n workflowSlug,\n runId,\n stepId: options.stepId,\n });\n } else {\n // Load all events\n return this.db.loadAllRunEvents(workflowSlug, runId);\n }\n }\n\n async loadStepEventsForProjection(\n workflowSlug: string,\n runId: string\n ): Promise<Map<string, StepEvent[]>> {\n return this.db.loadStepEventsForProjection(workflowSlug, runId);\n }\n\n async copyEntireRun(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string\n ): Promise<void> {\n await this.copyEntireRunWithClient(\n workflowSlug,\n sourceRunId,\n targetRunId,\n this.db.getPool()\n );\n }\n\n private async copyEntireRunWithClient(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string,\n client: Pool | PoolClient\n ): Promise<void> {\n // Use bulk SQL operations for efficiency\n const schema = this.db.getSchema();\n\n // Copy workflow events - copy all fields except id (auto-generated) and those that need to change\n await client.query(\n `INSERT INTO ${schema}.workflow_events (\n event_id, workflow_slug, run_id, timestamp_us, category, type, event_data, created_at,\n workflow_attempt_number, available_at_us, priority, timeout_us, idempotency_key, version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $2,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($2::text)),\n created_at,\n workflow_attempt_number,\n available_at_us,\n priority,\n timeout_us,\n idempotency_key,\n version_id\n FROM ${schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $3`,\n [workflowSlug, targetRunId, sourceRunId]\n );\n\n // Copy step events - copy all fields except id (auto-generated) and those that need to change\n await client.query(\n `INSERT INTO ${schema}.step_events (\n event_id, workflow_slug, run_id, step_id, timestamp_us, category, type, event_data, created_at,\n worker_id, attempt_number, available_at_us, export_output,\n error_name_hash, error_message_hash, error_stack_exact_hash, error_stack_normalized_hash, error_stack_portable_hash,\n version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $2,\n step_id,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($2::text)),\n created_at,\n worker_id,\n attempt_number,\n available_at_us,\n export_output,\n error_name_hash,\n error_message_hash,\n error_stack_exact_hash,\n error_stack_normalized_hash,\n error_stack_portable_hash,\n version_id\n FROM ${schema}.step_events\n WHERE workflow_slug = $1 AND run_id = $3`,\n [workflowSlug, targetRunId, sourceRunId]\n );\n }\n\n async deleteStepEvents(\n workflowSlug: string,\n runId: string,\n stepIds: Set<string>\n ): Promise<void> {\n await this.deleteStepEventsWithClient(\n workflowSlug,\n runId,\n stepIds,\n this.db.getPool()\n );\n }\n\n private async deleteStepEventsWithClient(\n workflowSlug: string,\n runId: string,\n stepIds: Set<string>,\n client: Pool | PoolClient\n ): Promise<void> {\n if (stepIds.size === 0) return;\n\n const schema = this.db.getSchema();\n const stepIdsArray = Array.from(stepIds);\n\n await client.query(\n `DELETE FROM ${schema}.step_events\n WHERE workflow_slug = $1 AND run_id = $2 AND step_id = ANY($3)`,\n [workflowSlug, runId, stepIdsArray]\n );\n }\n\n async deleteWorkflowTerminalEvents(\n workflowSlug: string,\n runId: string\n ): Promise<void> {\n await this.deleteWorkflowTerminalEventsWithClient(\n workflowSlug,\n runId,\n this.db.getPool()\n );\n }\n\n private async deleteWorkflowTerminalEventsWithClient(\n workflowSlug: string,\n runId: string,\n client: Pool | PoolClient\n ): Promise<void> {\n const schema = this.db.getSchema();\n\n // Delete terminal workflow events to reset workflow state\n await client.query(\n `DELETE FROM ${schema}.workflow_events\n WHERE workflow_slug = $1\n AND run_id = $2\n AND type IN ('WorkflowCompleted', 'WorkflowFailed', 'WorkflowCancelled')`,\n [workflowSlug, runId]\n );\n }\n\n async copyWorkflowEvents(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string,\n excludeTerminal: boolean\n ): Promise<void> {\n const pool = this.db.getPool();\n const schema = this.db.getSchema();\n\n // Build WHERE clause for terminal events filter\n const terminalFilter = excludeTerminal\n ? `AND type NOT IN ('WorkflowCompleted', 'WorkflowFailed', 'WorkflowCancelled')`\n : '';\n\n // Copy workflow events with updated run_id and new event_id\n await pool.query(\n `INSERT INTO ${schema}.workflow_events (\n event_id, workflow_slug, run_id, timestamp_us, category, type, event_data, created_at,\n workflow_attempt_number, available_at_us, priority, timeout_us, idempotency_key, version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $3,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($3::text)),\n created_at,\n workflow_attempt_number,\n available_at_us,\n priority,\n timeout_us,\n idempotency_key,\n version_id\n FROM ${schema}.workflow_events\n WHERE workflow_slug = $1\n AND run_id = $2\n ${terminalFilter}`,\n [workflowSlug, sourceRunId, targetRunId]\n );\n }\n\n async copyStepEvents(\n workflowSlug: string,\n sourceRunId: string,\n targetRunId: string,\n includeStepIds: Set<string>\n ): Promise<void> {\n const pool = this.db.getPool();\n const schema = this.db.getSchema();\n const stepIdsArray = Array.from(includeStepIds);\n\n if (stepIdsArray.length === 0) return;\n\n // Copy step events for specified steps with updated run_id and new event_id\n await pool.query(\n `INSERT INTO ${schema}.step_events (\n event_id, workflow_slug, run_id, step_id, timestamp_us, category, type, event_data, created_at,\n worker_id, attempt_number, available_at_us, export_output,\n error_name_hash, error_message_hash, error_stack_exact_hash, error_stack_normalized_hash, error_stack_portable_hash,\n version_id\n )\n SELECT\n gen_random_uuid()::text,\n workflow_slug,\n $3,\n step_id,\n timestamp_us,\n category,\n type,\n jsonb_set(event_data, '{runId}', to_jsonb($3::text)),\n created_at,\n worker_id,\n attempt_number,\n available_at_us,\n export_output,\n error_name_hash,\n error_message_hash,\n error_stack_exact_hash,\n error_stack_normalized_hash,\n error_stack_portable_hash,\n version_id\n FROM ${schema}.step_events\n WHERE workflow_slug = $1\n AND run_id = $2\n AND step_id = ANY($4)`,\n [workflowSlug, sourceRunId, targetRunId, stepIdsArray]\n );\n }\n\n async rerunFrom(params: {\n parentRunId: string;\n fromStepId: string;\n input?: unknown;\n }): Promise<{\n runId: string;\n }> {\n // Get parent run to extract workflow slug\n const parentRun = await this.getRun(params.parentRunId);\n if (!parentRun) {\n throw new Error(`Parent run \"${params.parentRunId}\" not found`);\n }\n\n // Calculate excluded steps from workflow metadata (authoritative source)\n const dependents = await this.calculateDependents(\n parentRun.workflowSlug,\n params.fromStepId\n );\n const rerunStepIds = new Set<string>([params.fromStepId, ...dependents]);\n\n // Generate new run ID\n const newRunId = getMicrosecondTimestamp().toString();\n\n // Use transaction for atomic copy + delete + event emission\n const pool = this.db.getPool();\n const client = await pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n\n // Copy entire run using bulk operations\n await this.copyEntireRunWithClient(\n parentRun.workflowSlug,\n params.parentRunId,\n newRunId,\n client\n );\n\n // Delete events for steps that will be re-executed\n await this.deleteStepEventsWithClient(\n parentRun.workflowSlug,\n newRunId,\n rerunStepIds,\n client\n );\n\n // Delete terminal workflow events to reset workflow to running state\n // This allows workers to pick up the scheduled steps\n await this.deleteWorkflowTerminalEventsWithClient(\n parentRun.workflowSlug,\n newRunId,\n client\n );\n\n await client.query(\"COMMIT\");\n } catch (error) {\n await client.query(\"ROLLBACK\");\n throw error;\n } finally {\n client.release();\n }\n\n // Get current workflow version for the rerun\n const currentVersion = await this.getCurrentWorkflowVersion(parentRun.workflowSlug);\n if (!currentVersion) {\n throw new Error(`Workflow ${parentRun.workflowSlug} not registered. Please ensure the worker has started and registered workflows.`);\n }\n\n // Get parent run's version from its WorkflowStarted event\n const parentWorkflowEvents = await this.loadEvents(parentRun.workflowSlug, params.parentRunId, {\n category: \"workflow\",\n }) as WorkflowEvent[];\n const parentVersionId = getVersionIdFromEvents(parentWorkflowEvents);\n\n // Emit WorkflowRerunFromStep event to track rerun metadata\n const timestamp = getMicrosecondTimestamp();\n await this.appendEvent(parentRun.workflowSlug, newRunId, {\n category: \"workflow\",\n type: \"WorkflowRerunFromStep\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug: parentRun.workflowSlug,\n runId: newRunId,\n parentRunId: params.parentRunId,\n rerunFromStepId: params.fromStepId,\n rerunStepIds: Array.from(rerunStepIds),\n versionId: currentVersion.versionId,\n parentVersionId,\n });\n\n // Submit to queue for worker execution (no workflow discovery needed!)\n // The worker will see the copied events and resume from where parent left off\n await this.submitRun({\n workflowSlug: parentRun.workflowSlug,\n runId: newRunId,\n input: params.input,\n });\n\n return {\n runId: newRunId,\n };\n }\n\n // ============================================================================\n // Step Event Methods\n // ============================================================================\n\n async saveStepScheduled(\n workflowSlug: string,\n runId: string,\n stepId: string,\n metadata: {\n availableAt: number;\n reason: \"initial\" | \"retry\" | \"dependency-satisfied\";\n attemptNumber: number;\n retryDelayMs?: number;\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepScheduledEvent = {\n category: \"step\",\n type: \"StepScheduled\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n availableAtUs: metadata.availableAt,\n reason: metadata.reason,\n attemptNumber: metadata.attemptNumber,\n retryDelayMs: metadata.retryDelayMs,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepStart(\n workflowSlug: string,\n runId: string,\n stepId: string,\n workerId: string,\n metadata: StepStartMetadata\n ): Promise<void> {\n\n // Get current attempt number by counting previous StepStarted events\n const events = await this.loadEvents(workflowSlug, runId, { category: \"step\", stepId });\n const attemptNumber = getCurrentAttemptNumber(events) + 1;\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: StepStartedEvent = {\n category: \"step\",\n type: \"StepStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n stepId,\n attemptNumber,\n workerId,\n dependencies: metadata.dependencies,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepComplete(\n workflowSlug: string,\n runId: string,\n stepId: string,\n output: unknown,\n metadata: StepCompleteMetadata,\n exportOutput: boolean = false\n ): Promise<void> {\n\n // Use the attempt number from the caller (worker knows the correct attempt)\n const attemptNumber = metadata.attemptNumber;\n\n if (attemptNumber === 0) {\n throw new Error(`Cannot complete step that hasn't started: ${stepId}`);\n }\n\n // Append log events first (if any)\n if (metadata.logs && metadata.logs.length > 0) {\n for (const log of metadata.logs) {\n const logTimestamp = log.timestamp;\n const logEvent: LogEntryEvent = {\n category: \"step\",\n eventId: this.generateEventId(logTimestamp),\n timestampUs: logTimestamp,\n workflowSlug,\n runId,\n stepId,\n type: \"LogEntry\",\n stream: log.stream,\n message: log.message,\n attemptNumber,\n };\n await this.db.appendEvent(\"step_events\", logEvent);\n }\n }\n\n // Serialize output safely to handle complex values\n const serialized = safeSerialize(output);\n const outputString = serialized.success ? serialized.data : serialized.fallback;\n\n // Use high-resolution timestamp\n const timestamp = getMicrosecondTimestamp();\n\n const event: StepCompletedEvent = {\n category: \"step\",\n type: \"StepCompleted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n stepId,\n output: outputString,\n durationUs: metadata.duration,\n attemptNumber,\n exportOutput,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepFailed(\n workflowSlug: string,\n runId: string,\n stepId: string,\n error: StepError,\n metadata: {\n duration: number;\n attemptNumber: number;\n terminal: boolean;\n nextRetryAt?: number;\n failureReason: \"exhausted-retries\" | \"worker-crash\" | \"timeout\" | \"cancelled\" | \"execution-error\" | \"step-removed\";\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepFailedEvent = {\n category: \"step\",\n type: \"StepFailed\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n error,\n errorFingerprints: computeErrorFingerprints(error, error.stack),\n durationUs: metadata.duration,\n attemptNumber: metadata.attemptNumber,\n terminal: metadata.terminal,\n nextRetryAtUs: metadata.nextRetryAt,\n failureReason: metadata.failureReason,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepFailedAndScheduleRetry(\n workflowSlug: string,\n runId: string,\n stepId: string,\n error: StepError,\n failureMetadata: {\n duration: number;\n attemptNumber: number;\n nextRetryAt: number;\n failureReason: \"execution-error\" | \"timeout\";\n policyIndex?: number;\n attemptInPolicy?: number;\n },\n scheduleMetadata: {\n availableAt: number;\n nextAttemptNumber: number;\n retryDelayMs: number;\n maxRetries?: number;\n totalPolicies?: number;\n policyIndex?: number;\n attemptInPolicy?: number;\n }\n ): Promise<void> {\n const client = await this.db.getPool().connect();\n try {\n await client.query(\"BEGIN\");\n\n // Generate timestamps for all three events\n // We need distinct timestamps to ensure proper event ordering\n const failedTimestamp = getMicrosecondTimestamp();\n const retryingTimestamp = failedTimestamp + 1;\n const scheduledTimestamp = failedTimestamp + 2;\n\n // Event 1: StepFailed (terminal: false)\n const failedEvent: StepFailedEvent = {\n category: \"step\",\n type: \"StepFailed\",\n eventId: this.generateEventId(failedTimestamp),\n timestampUs: failedTimestamp,\n workflowSlug,\n runId,\n stepId,\n error,\n errorFingerprints: computeErrorFingerprints(error, error.stack),\n durationUs: failureMetadata.duration,\n attemptNumber: failureMetadata.attemptNumber,\n terminal: false,\n nextRetryAtUs: failureMetadata.nextRetryAt,\n failureReason: failureMetadata.failureReason,\n policyIndex: failureMetadata.policyIndex,\n attemptInPolicy: failureMetadata.attemptInPolicy,\n };\n\n await this.db.appendEventWithClient(client, \"step_events\", failedEvent);\n\n // Event 2: StepRetrying (informational)\n const retryingEvent: StepRetryingEvent = {\n category: \"step\",\n type: \"StepRetrying\",\n eventId: this.generateEventId(retryingTimestamp),\n timestampUs: retryingTimestamp,\n workflowSlug,\n runId,\n stepId,\n attemptNumber: failureMetadata.attemptNumber,\n nextAttempt: scheduleMetadata.nextAttemptNumber,\n maxRetries: scheduleMetadata.maxRetries,\n totalPolicies: scheduleMetadata.totalPolicies,\n policyIndex: scheduleMetadata.policyIndex,\n error,\n };\n\n await this.db.appendEventWithClient(client, \"step_events\", retryingEvent);\n\n // Event 3: StepScheduled (for retry)\n const scheduledEvent: StepScheduledEvent = {\n category: \"step\",\n type: \"StepScheduled\",\n eventId: this.generateEventId(scheduledTimestamp),\n timestampUs: scheduledTimestamp,\n workflowSlug,\n runId,\n stepId,\n availableAtUs: scheduleMetadata.availableAt,\n reason: \"retry\",\n attemptNumber: scheduleMetadata.nextAttemptNumber,\n retryDelayMs: scheduleMetadata.retryDelayMs,\n policyIndex: scheduleMetadata.policyIndex,\n attemptInPolicy: scheduleMetadata.attemptInPolicy,\n };\n\n await this.db.appendEventWithClient(client, \"step_events\", scheduledEvent);\n\n await client.query(\"COMMIT\");\n } catch (error) {\n await client.query(\"ROLLBACK\");\n throw error;\n } finally {\n client.release();\n }\n }\n\n async saveStepSkipped(\n workflowSlug: string,\n runId: string,\n stepId: string,\n metadata: {\n skipType: \"primary\" | \"cascade\";\n reason: string;\n metadata?: Record<string, any>;\n duration: number;\n attemptNumber: number;\n cascadedFrom?: string;\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepSkippedEvent = {\n category: \"step\",\n type: \"StepSkipped\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n skipType: metadata.skipType,\n reason: metadata.reason,\n metadata: metadata.metadata,\n durationUs: metadata.duration,\n attemptNumber: metadata.attemptNumber,\n cascadedFrom: metadata.cascadedFrom,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepCheckpoint(\n workflowSlug: string,\n runId: string,\n stepId: string,\n checkpoint: {\n name: string;\n sequenceNumber: number;\n attemptNumber: number;\n data: string;\n label?: string;\n parentCheckpoint?: { name: string; sequenceNumber: number };\n }\n ): Promise<void> {\n const now = getMicrosecondTimestamp();\n\n const event = {\n category: \"step\" as const,\n type: \"StepCheckpoint\" as const,\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n name: checkpoint.name,\n sequenceNumber: checkpoint.sequenceNumber,\n attemptNumber: checkpoint.attemptNumber,\n data: checkpoint.data,\n ...(checkpoint.label && { label: checkpoint.label }),\n ...(checkpoint.parentCheckpoint && {\n parentCheckpoint: checkpoint.parentCheckpoint,\n }),\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepCheckpointFailed(\n workflowSlug: string,\n runId: string,\n stepId: string,\n checkpoint: {\n name: string;\n sequenceNumber: number;\n attemptNumber: number;\n error: StepError;\n }\n ): Promise<void> {\n const now = getMicrosecondTimestamp();\n\n const event = {\n category: \"step\" as const,\n type: \"StepCheckpointFailed\" as const,\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n name: checkpoint.name,\n sequenceNumber: checkpoint.sequenceNumber,\n attemptNumber: checkpoint.attemptNumber,\n error: checkpoint.error,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepHeartbeat(\n workflowSlug: string,\n runId: string,\n stepId: string,\n workerId: string,\n attemptNumber: number\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepHeartbeatEvent = {\n category: \"step\",\n type: \"StepHeartbeat\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n workerId,\n attemptNumber,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepReclaimed(\n workflowSlug: string,\n runId: string,\n stepId: string,\n metadata: {\n originalWorkerId: string;\n reclaimedBy: string;\n lastHeartbeat: number;\n staleThreshold: number;\n staleDuration: number;\n attemptNumber: number;\n }\n ): Promise<void> {\n\n const now = getMicrosecondTimestamp();\n\n const event: StepReclaimedEvent = {\n category: \"step\",\n type: \"StepReclaimed\",\n eventId: this.generateEventId(now),\n timestampUs: now,\n workflowSlug,\n runId,\n stepId,\n originalWorkerId: metadata.originalWorkerId,\n reclaimedBy: metadata.reclaimedBy,\n lastHeartbeatUs: metadata.lastHeartbeat,\n staleThresholdUs: metadata.staleThreshold,\n staleDurationUs: metadata.staleDuration,\n attemptNumber: metadata.attemptNumber,\n };\n\n await this.db.appendEvent(\"step_events\", event);\n }\n\n async saveStepLogs(\n workflowSlug: string,\n runId: string,\n stepId: string,\n logs: LogEntry[]\n ): Promise<void> {\n\n // Logs are now saved as LogEntry events during saveStepComplete\n // This method is kept for backward compatibility but is a no-op\n // The logs are already appended as events in saveStepComplete\n }\n\n async loadStepLogs(\n workflowSlug: string,\n runId: string,\n stepId: string,\n attemptNumber?: number\n ): Promise<LogEntry[] | null> {\n\n // Load logs from events\n const events = await this.loadEvents(workflowSlug, runId, { category: \"step\", stepId });\n let logs = extractLogsFromEvents(events);\n\n // Filter by attempt number if specified\n if (attemptNumber !== undefined) {\n logs = logs.filter(log => log.attemptNumber === attemptNumber);\n }\n\n return logs.length > 0 ? logs : null;\n }\n\n // ============================================================================\n // Workflow Event Methods\n // ============================================================================\n\n async saveWorkflowStart(\n workflowSlug: string,\n runId: string,\n metadata: { versionId: string; workflowAttemptNumber: number; hasInputSchema: boolean; hasInput: boolean }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowStartedEvent = {\n category: \"workflow\",\n type: \"WorkflowStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n versionId: metadata.versionId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n hasInputSchema: metadata.hasInputSchema,\n hasInput: metadata.hasInput,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowInputValidation(\n workflowSlug: string,\n runId: string,\n result: {\n workflowAttemptNumber: number;\n hasSchema: boolean;\n success: boolean;\n error?: StepError;\n validationErrors?: Array<{ path: string; message: string }>;\n }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowInputValidationEvent = {\n category: \"workflow\",\n type: \"WorkflowInputValidation\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n ...result,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowComplete(\n workflowSlug: string,\n runId: string,\n output: unknown,\n metadata: { workflowAttemptNumber: number; timestamp: number; duration: number; totalSteps: number }\n ): Promise<void> {\n\n // Use high-resolution timestamp\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowCompletedEvent = {\n category: \"workflow\",\n type: \"WorkflowCompleted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n output: JSON.stringify(output),\n durationUs: metadata.duration,\n totalSteps: metadata.totalSteps,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowFailed(\n workflowSlug: string,\n runId: string,\n error: StepError,\n metadata: { workflowAttemptNumber: number; duration: number; completedSteps: number; failedStep?: string },\n failureReason: \"step-failed\" | \"worker-crash\" | \"timeout\" | \"cancelled\"\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowFailedEvent = {\n category: \"workflow\",\n type: \"WorkflowFailed\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n error,\n durationUs: metadata.duration,\n completedSteps: metadata.completedSteps,\n failedStep: metadata.failedStep,\n failureReason,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowResumed(\n workflowSlug: string,\n runId: string,\n metadata: { versionId: string; originalRunId: string; resumedSteps: number; pendingSteps: number }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowResumedEvent = {\n category: \"workflow\",\n type: \"WorkflowResumed\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n versionId: metadata.versionId,\n originalRunId: metadata.originalRunId,\n resumedSteps: metadata.resumedSteps,\n pendingSteps: metadata.pendingSteps,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowCancelled(\n workflowSlug: string,\n runId: string,\n metadata: { workflowAttemptNumber: number; reason?: string; duration: number; completedSteps: number }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowCancelledEvent = {\n category: \"workflow\",\n type: \"WorkflowCancelled\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n reason: metadata.reason,\n durationUs: metadata.duration,\n completedSteps: metadata.completedSteps,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveWorkflowRetryStarted(\n workflowSlug: string,\n runId: string,\n metadata: {\n workflowAttemptNumber: number;\n previousAttemptNumber: number;\n retriedSteps: string[];\n reason?: string;\n }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: WorkflowRetryStartedEvent = {\n category: \"workflow\",\n type: \"WorkflowRetryStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n workflowAttemptNumber: metadata.workflowAttemptNumber,\n previousAttemptNumber: metadata.previousAttemptNumber,\n retriedSteps: metadata.retriedSteps,\n reason: metadata.reason,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n async saveRunSubmitted(\n workflowSlug: string,\n runId: string,\n metadata: {\n versionId: string;\n availableAt: number;\n priority: number;\n input?: string;\n hasInputSchema: boolean;\n timeout?: number;\n idempotencyKey?: string;\n metadata?: Record<string, unknown>;\n tags?: string[];\n }\n ): Promise<void> {\n\n const timestamp = getMicrosecondTimestamp();\n\n const event: RunSubmittedEvent = {\n category: \"workflow\",\n type: \"RunSubmitted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n versionId: metadata.versionId,\n availableAtUs: metadata.availableAt,\n priority: metadata.priority,\n input: metadata.input,\n hasInputSchema: metadata.hasInputSchema,\n timeoutUs: metadata.timeout,\n idempotencyKey: metadata.idempotencyKey,\n metadata: metadata.metadata,\n tags: metadata.tags,\n };\n\n await this.db.appendEvent(\"workflow_events\", event);\n }\n\n // ============================================================================\n // Queue Management\n // ============================================================================\n\n async submitRun(submission: RunSubmission): Promise<{ runId: string; isNew: boolean }> {\n\n // Check idempotency first\n if (submission.idempotencyKey) {\n const hash = this.hashIdempotencyKey(submission.idempotencyKey);\n const proposedRunId = submission.runId || this.generateRunId();\n const existingRunId = await this.db.saveIdempotencyKey(hash, proposedRunId);\n\n // If key already exists, return existing run ID\n if (existingRunId !== proposedRunId) {\n return { runId: existingRunId, isNew: false };\n }\n }\n\n // Generate runId if not provided\n const runId = submission.runId || this.generateRunId();\n const now = getMicrosecondTimestamp();\n const availableAt = submission.availableAt || now;\n const priority = submission.priority || 0;\n\n // Initialize run directory\n await this.initializeRun(submission.workflowSlug, runId);\n\n // Get workflow metadata to determine if input schema exists\n const workflowMetadata = await this.getWorkflowMetadata(submission.workflowSlug);\n const hasInputSchema = !!(workflowMetadata?.inputSchemaJSON);\n\n // Get current workflow version (auto-captured at submission time)\n const currentVersion = await this.getCurrentWorkflowVersion(submission.workflowSlug);\n if (!currentVersion) {\n throw new Error(`Workflow ${submission.workflowSlug} not registered. Please ensure the worker has started and registered workflows.`);\n }\n\n // Emit RunSubmitted event\n await this.saveRunSubmitted(submission.workflowSlug, runId, {\n versionId: currentVersion.versionId,\n availableAt,\n priority,\n input: submission.input !== undefined ? JSON.stringify(submission.input) : undefined,\n hasInputSchema,\n timeout: submission.timeout,\n idempotencyKey: submission.idempotencyKey,\n metadata: submission.metadata,\n tags: submission.tags,\n });\n\n return { runId, isNew: true };\n }\n\n async listRuns(options?: {\n workflowSlug?: string;\n status?: RunState[\"status\"][];\n tags?: string[];\n limit?: number;\n }): Promise<RunState[]> {\n // Step 1: Get filtered run identifiers with SQL (single query)\n // Note: We fetch more than limit to account for tag filtering done in JS\n const filteredRuns = await this.db.listRunsFiltered({\n workflowSlug: options?.workflowSlug,\n status: options?.status,\n // Fetch extra if tag filtering needed (tags are filtered in JS)\n limit: options?.tags?.length ? undefined : options?.limit,\n });\n\n // Apply tag filter if specified (AND logic - must have all)\n let runsToLoad = filteredRuns;\n if (options?.tags && options.tags.length > 0) {\n runsToLoad = filteredRuns.filter((run) => {\n const runTags = run.tags || [];\n return options.tags!.every((tag) => runTags.includes(tag));\n });\n // Apply limit after tag filtering\n if (options?.limit) {\n runsToLoad = runsToLoad.slice(0, options.limit);\n }\n }\n\n if (runsToLoad.length === 0) {\n return [];\n }\n\n // Step 2: Load full workflow events for matched runs (single query)\n const eventsByRun = await this.db.loadWorkflowEventsForRuns(\n runsToLoad.map((r) => ({ workflowSlug: r.workflowSlug, runId: r.runId }))\n );\n\n // Step 3: Project each run's events to full RunState\n const allRuns: RunState[] = [];\n for (const run of runsToLoad) {\n const key = `${run.workflowSlug}:${run.runId}`;\n const events = eventsByRun.get(key);\n if (!events || events.length === 0) continue;\n\n try {\n const state = projectRunStateFromEvents(events, run.workflowSlug);\n allRuns.push(state);\n } catch {\n // Skip runs that fail to project (corrupted/incomplete)\n continue;\n }\n }\n\n // Results are already sorted by createdAt DESC from SQL\n return allRuns;\n }\n\n async cancelRun(runId: string, reason?: string): Promise<void> {\n\n // Find the workflow slug for this run\n const allWorkflows = await this.db.listActiveWorkflows();\n\n for (const workflowSlug of allWorkflows) {\n const runIds = await this.db.listRunIds(workflowSlug);\n if (runIds.includes(runId)) {\n // Load events and get created time\n const events = await this.loadEvents(workflowSlug, runId, { category: \"workflow\" });\n if (events.length === 0) continue;\n\n const state = projectRunStateFromEvents(events, workflowSlug);\n\n // Calculate duration\n const duration = getMicrosecondTimestamp() - state.createdAt;\n\n // Count completed steps\n const stepRecords = await this.loadRun(workflowSlug, runId);\n const completedSteps = stepRecords.filter((r) => r.status === \"completed\").length;\n\n // Emit WorkflowCancelled event\n await this.saveWorkflowCancelled(workflowSlug, runId, {\n workflowAttemptNumber: state.workflowAttemptNumber || 1,\n reason,\n duration,\n completedSteps,\n });\n\n return;\n }\n }\n\n throw new Error(`Run ${runId} not found`);\n }\n\n async getRun(runId: string): Promise<RunState | null> {\n\n // Find the workflow slug for this run\n const allWorkflows = await this.db.listActiveWorkflows();\n\n for (const workflowSlug of allWorkflows) {\n const runIds = await this.db.listRunIds(workflowSlug);\n if (runIds.includes(runId)) {\n const workflowEvents = await this.loadEvents(workflowSlug, runId, { category: \"workflow\" });\n return projectRunStateFromEvents(workflowEvents, workflowSlug);\n }\n }\n\n return null;\n }\n\n async getFailedSteps(\n workflowSlug: string,\n runId: string\n ): Promise<\n Array<{\n stepId: string;\n error: StepError;\n attemptNumber: number;\n }>\n > {\n\n // Load all step events for this run\n const events = await this.loadEvents(workflowSlug, runId, { category: \"step\" });\n\n if (events.length === 0 || !(\"category\" in events[0]!)) {\n return [];\n }\n\n // Group events by step ID\n const eventsByStep = new Map<string, StepEvent[]>();\n for (const event of events) {\n if (event.category === \"step\") {\n const stepEvents = eventsByStep.get(event.stepId) || [];\n stepEvents.push(event as StepEvent);\n eventsByStep.set(event.stepId, stepEvents);\n }\n }\n\n // Project each step's state and filter for failed steps\n const failedSteps: Array<{\n stepId: string;\n error: StepError;\n attemptNumber: number;\n }> = [];\n\n for (const [stepId, stepEvents] of eventsByStep) {\n const state = projectStepState(stepEvents, workflowSlug);\n if (state.status === \"failed\" && state.terminal && state.error) {\n failedSteps.push({\n stepId,\n error: state.error,\n attemptNumber: state.attemptNumber,\n });\n }\n }\n\n return failedSteps;\n }\n\n // ============================================================================\n // Step-Level Distribution\n // ============================================================================\n\n async listActiveWorkflows(): Promise<string[]> {\n return this.db.listActiveWorkflows();\n }\n\n async listScheduledSteps(options?: {\n availableBefore?: number;\n workflowSlug?: string;\n limit?: number;\n }): Promise<Array<{ workflowSlug: string; runId: string; stepId: string }>> {\n\n // Convert single workflowSlug to array for db layer\n const dbOptions = {\n workflowSlugs: options?.workflowSlug ? [options.workflowSlug] : undefined,\n limit: options?.limit,\n };\n\n // Note: availableBefore filtering would need to be implemented in db layer\n // For now, we just pass through the other options\n return this.db.listScheduledSteps(dbOptions);\n }\n\n async isStepClaimable(workflowSlug: string, runId: string, stepId: string): Promise<boolean> {\n\n const events = await this.db.loadEvents(\"step_events\", {\n workflowSlug,\n runId,\n stepId,\n });\n\n if (events.length === 0) {\n return false;\n }\n\n // Get the latest event\n const latestEvent = events[events.length - 1];\n\n // Step is claimable if latest event is StepScheduled, StepReclaimed, or StepRetrying\n return !!(\n latestEvent &&\n (latestEvent.type === \"StepScheduled\" ||\n latestEvent.type === \"StepReclaimed\" ||\n latestEvent.type === \"StepRetrying\")\n );\n }\n\n async claimScheduledStep(\n workflowSlug: string,\n runId: string,\n stepId: string,\n workerId: string,\n metadata: StepStartMetadata\n ): Promise<{ attemptNumber: number } | null> {\n\n const initialEvents = await this.loadEvents(workflowSlug, runId, { category: \"step\", stepId });\n\n if (initialEvents.length === 0) {\n return null;\n }\n\n const now = getMicrosecondTimestamp();\n const initialState = projectStepState(initialEvents as StepEvent[], workflowSlug);\n\n if (\n initialState.status !== \"scheduled\" ||\n initialState.availableAt === undefined ||\n initialState.availableAt > now\n ) {\n return null;\n }\n\n const attemptNumber = initialState.attemptNumber;\n const timestamp = getMicrosecondTimestamp();\n\n const event: StepStartedEvent = {\n category: \"step\",\n type: \"StepStarted\",\n eventId: this.generateEventId(timestamp),\n timestampUs: timestamp,\n workflowSlug,\n runId,\n stepId,\n workerId,\n dependencies: metadata.dependencies,\n attemptNumber,\n };\n\n // Attempt atomic claim via db layer\n const claimed = await this.db.claimScheduledStep(workflowSlug, runId, stepId, workerId, event);\n\n return claimed ? { attemptNumber } : null;\n }\n\n async reclaimStaleSteps(\n staleThreshold: number,\n reclaimedBy: string\n ): Promise<Array<{ workflowSlug: string; runId: string; stepId: string }>> {\n\n const reclaimed: Array<{ workflowSlug: string; runId: string; stepId: string }> = [];\n const now = getMicrosecondTimestamp();\n\n const staleSteps = await this.db.findStaleSteps(staleThreshold);\n\n for (const step of staleSteps) {\n const events = await this.loadEvents(step.workflowSlug, step.runId, { category: \"step\", stepId: step.stepId });\n\n if (events.length === 0) continue;\n\n const state = projectStepState(events as StepEvent[], step.workflowSlug);\n\n // Only reclaim running steps\n if (state.status !== \"running\") continue;\n\n // Check if heartbeat is stale\n const lastHeartbeat = state.lastHeartbeat || state.startTime || 0;\n const staleDuration = now - lastHeartbeat;\n\n if (staleDuration > staleThreshold) {\n // Emit StepReclaimed event\n await this.saveStepReclaimed(step.workflowSlug, step.runId, step.stepId, {\n originalWorkerId: state.claimedBy || \"unknown\",\n reclaimedBy,\n lastHeartbeat,\n staleThreshold,\n staleDuration,\n attemptNumber: state.attemptNumber,\n });\n\n // Re-schedule the step with incremented attempt number\n await this.saveStepScheduled(step.workflowSlug, step.runId, step.stepId, {\n availableAt: now, // Immediately available\n reason: \"retry\",\n attemptNumber: state.attemptNumber + 1, // Increment attempt - reclamation is a new attempt\n retryDelayMs: 0,\n });\n\n reclaimed.push({ workflowSlug: step.workflowSlug, runId: step.runId, stepId: step.stepId });\n }\n }\n\n return reclaimed;\n }\n\n // ============================================================================\n // Workflow Registration\n // ============================================================================\n\n async registerWorkflow(registration: WorkflowRegistration): Promise<void> {\n\n // Save metadata\n await this.db.upsertWorkflowMetadata(\n registration.slug,\n registration.name,\n registration.location,\n registration.inputSchemaJSON\n );\n\n // Save step definitions\n for (const step of registration.steps) {\n await this.db.upsertStepDefinition(registration.slug, step);\n }\n }\n\n async getWorkflowMetadata(slug: string): Promise<WorkflowMetadata | null> {\n return this.db.getWorkflowMetadata(slug);\n }\n\n async listWorkflowMetadata(): Promise<WorkflowMetadata[]> {\n return this.db.listWorkflowMetadata();\n }\n\n async getWorkflowSteps(slug: string): Promise<StepDefinition[]> {\n return this.db.getWorkflowSteps(slug);\n }\n\n async listRunIds(workflowSlug: string): Promise<string[]> {\n return this.db.listRunIds(workflowSlug);\n }\n\n // ============================================================================\n // Workflow Version Management Methods\n // ============================================================================\n\n async createWorkflowVersion(version: Omit<WorkflowVersion, 'versionNumber'>): Promise<void> {\n await this.db.getPool().query(`\n INSERT INTO ${this.db.getSchema()}.workflow_versions\n (workflow_slug, version_id, created_at, step_manifest, total_steps, git_commit, git_dirty, git_branch)\n VALUES ($1, $2, to_timestamp($3 / 1000000.0), $4, $5, $6, $7, $8)\n ON CONFLICT (workflow_slug, version_id) DO NOTHING\n `, [\n version.workflowSlug,\n version.versionId,\n version.createdAt,\n version.stepManifest,\n version.totalSteps,\n version.git?.commit,\n version.git?.dirty,\n version.git?.branch,\n ]);\n }\n\n async getWorkflowVersion(workflowSlug: string, versionId: string): Promise<WorkflowVersion | null> {\n const row = await this.db.getWorkflowVersion(workflowSlug, versionId);\n\n if (!row) return null;\n\n return {\n workflowSlug: row.workflow_slug,\n versionId: row.version_id,\n versionNumber: parseInt(row.version_number.toString(), 10),\n createdAt: Math.floor(new Date(row.created_at).getTime() * 1000), // Convert to μs\n stepManifest: row.step_manifest,\n totalSteps: row.total_steps,\n git: row.git_commit && row.git_dirty !== null && row.git_branch !== null ? {\n commit: row.git_commit,\n dirty: row.git_dirty,\n branch: row.git_branch,\n } : undefined,\n };\n }\n\n async getCurrentWorkflowVersion(workflowSlug: string): Promise<WorkflowVersion | null> {\n const row = await this.db.getCurrentWorkflowVersion(workflowSlug);\n\n if (!row) return null;\n\n return {\n workflowSlug: row.workflow_slug,\n versionId: row.version_id,\n versionNumber: parseInt(row.version_number.toString(), 10),\n createdAt: Math.floor(new Date(row.created_at).getTime() * 1000),\n stepManifest: row.step_manifest,\n totalSteps: row.total_steps,\n git: row.git_commit && row.git_dirty !== null && row.git_branch !== null ? {\n commit: row.git_commit,\n dirty: row.git_dirty,\n branch: row.git_branch,\n } : undefined,\n };\n }\n\n async listWorkflowVersions(workflowSlug: string, options?: { limit?: number }): Promise<WorkflowVersion[]> {\n const limit = options?.limit ?? 100;\n const rows = await this.db.listWorkflowVersions(workflowSlug, limit);\n\n return rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n versionId: row.version_id,\n versionNumber: parseInt(row.version_number.toString(), 10),\n createdAt: Math.floor(new Date(row.created_at).getTime() * 1000),\n stepManifest: row.step_manifest,\n totalSteps: row.total_steps,\n git: row.git_commit && row.git_dirty !== null && row.git_branch !== null ? {\n commit: row.git_commit,\n dirty: row.git_dirty,\n branch: row.git_branch,\n } : undefined,\n }));\n }\n\n /**\n * Close the database connection pool\n * Call this when shutting down the application\n */\n async close(): Promise<void> {\n await this.db.getPool().end();\n }\n\n // ============================================================================\n // Analytics Methods\n // ============================================================================\n\n /**\n * Helper to load all events within a time range and optional filters\n * Uses raw SQL queries for better performance with large datasets\n */\n private async loadEventsForAnalytics(\n options?: AnalyticsOptions\n ): Promise<{ stepEvents: StepEvent[]; workflowEvents: WorkflowEvent[] }> {\n const now = getMicrosecondTimestamp();\n const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000; // Default: 24 hours ago\n const endUs = options?.endUs ?? now;\n\n // Build step events query\n let stepQuery = `\n SELECT event_data, event_id\n FROM ${this.db.getSchema()}.step_events\n WHERE timestamp_us >= $1 AND timestamp_us <= $2\n `;\n const stepParams: any[] = [startUs, endUs];\n let paramIndex = 3;\n\n if (options?.workflowSlug) {\n stepQuery += ` AND workflow_slug = $${paramIndex}`;\n stepParams.push(options.workflowSlug);\n paramIndex++;\n }\n\n if (options?.stepId) {\n stepQuery += ` AND step_id = $${paramIndex}`;\n stepParams.push(options.stepId);\n paramIndex++;\n }\n\n if (options?.runIds && options.runIds.length > 0) {\n const runIdsPlaceholder = `$${paramIndex}`;\n stepQuery += ` AND run_id = ANY(${runIdsPlaceholder})`;\n stepParams.push(options.runIds);\n paramIndex++;\n }\n\n stepQuery += ` ORDER BY timestamp_us ASC`;\n\n // Build workflow events query\n let workflowQuery = `\n SELECT event_data, event_id\n FROM ${this.db.getSchema()}.workflow_events\n WHERE timestamp_us >= $1 AND timestamp_us <= $2\n `;\n const workflowParams: any[] = [startUs, endUs];\n paramIndex = 3;\n\n if (options?.workflowSlug) {\n workflowQuery += ` AND workflow_slug = $${paramIndex}`;\n workflowParams.push(options.workflowSlug);\n paramIndex++;\n }\n\n if (options?.runIds && options.runIds.length > 0) {\n const runIdsPlaceholder = `$${paramIndex}`;\n workflowQuery += ` AND run_id = ANY(${runIdsPlaceholder})`;\n workflowParams.push(options.runIds);\n paramIndex++;\n }\n\n workflowQuery += ` ORDER BY timestamp_us ASC`;\n\n // Execute queries in parallel\n const [stepResult, workflowResult] = await Promise.all([\n this.db.getPool().query(stepQuery, stepParams),\n options?.stepId\n ? Promise.resolve({ rows: [] }) // Skip workflow events if filtering by stepId\n : this.db.getPool().query(workflowQuery, workflowParams),\n ]);\n\n // Inject event_id from column into event object (single source of truth)\n const stepEvents = stepResult.rows.map((row) => ({ ...row.event_data, eventId: row.event_id }) satisfies StepEvent);\n const workflowEvents = workflowResult.rows.map(\n (row) => ({ ...row.event_data, eventId: row.event_id }) satisfies WorkflowEvent\n );\n\n return { stepEvents, workflowEvents };\n }\n\n async getErrorAnalysis(options?: AnalyticsOptions): Promise<ErrorAnalysis> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeErrorAnalysis(\n stepEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getErrorsList(options?: {\n timeRange?: { start: number; end: number };\n workflowSlug?: string;\n groupingStrategy?: 'exact' | 'normalized' | 'portable';\n limit?: number;\n offset?: number;\n }): Promise<{\n errors: Array<{\n fingerprint: string;\n errorMessage: string;\n errorName: string;\n sampleStack: string;\n count: number;\n affectedRuns: number;\n firstSeen: number;\n lastSeen: number;\n }>;\n total: number;\n }> {\n const strategy = options?.groupingStrategy || 'exact';\n const limit = options?.limit || 50;\n const offset = options?.offset || 0;\n\n // Choose which stack hash column to use based on strategy\n const stackHashColumn =\n strategy === 'exact'\n ? 'error_stack_exact_hash'\n : strategy === 'normalized'\n ? 'error_stack_normalized_hash'\n : 'error_stack_portable_hash';\n\n // Build WHERE clause\n const conditions: string[] = [\"type = 'StepFailed'\"];\n const params: any[] = [];\n let paramIndex = 1;\n\n if (options?.timeRange) {\n conditions.push(`timestamp_us >= $${paramIndex}`);\n params.push(options.timeRange.start);\n paramIndex++;\n\n conditions.push(`timestamp_us <= $${paramIndex}`);\n params.push(options.timeRange.end);\n paramIndex++;\n }\n\n if (options?.workflowSlug) {\n conditions.push(`workflow_slug = $${paramIndex}`);\n params.push(options?.workflowSlug);\n paramIndex++;\n }\n\n const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n\n // Get total count (without pagination)\n const countQuery = `\n SELECT COUNT(DISTINCT CONCAT(error_name_hash, ':', error_message_hash, ':', ${stackHashColumn})) as total\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n `;\n\n const countResult = await this.db.getPool().query(countQuery, params);\n const total = parseInt(countResult.rows[0]?.total || '0', 10);\n\n // Get paginated errors with aggregations\n const query = `\n SELECT\n CONCAT(error_name_hash, ':', error_message_hash, ':', ${stackHashColumn}) as fingerprint,\n MIN((event_data->'error'->>'message')) as error_message,\n MIN((event_data->'error'->>'name')) as error_name,\n MIN((event_data->'error'->>'stack')) as sample_stack,\n COUNT(*) as count,\n COUNT(DISTINCT run_id) as affected_runs,\n MIN(timestamp_us) as first_seen,\n MAX(timestamp_us) as last_seen\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n GROUP BY\n error_name_hash,\n error_message_hash,\n ${stackHashColumn}\n ORDER BY count DESC\n LIMIT $${paramIndex} OFFSET $${paramIndex + 1}\n `;\n\n const result = await this.db.getPool().query(query, [...params, limit, offset]);\n\n const errors = result.rows.map((row) => ({\n fingerprint: row.fingerprint,\n errorMessage: row.error_message || '',\n errorName: row.error_name || 'Error',\n sampleStack: row.sample_stack || '',\n count: parseInt(row.count, 10),\n affectedRuns: parseInt(row.affected_runs, 10),\n firstSeen: parseInt(row.first_seen, 10),\n lastSeen: parseInt(row.last_seen, 10),\n }));\n\n return { errors, total };\n }\n\n async getErrorDetail(\n fingerprint: string,\n groupingStrategy: 'exact' | 'normalized' | 'portable',\n options?: {\n timeRange?: { start: number; end: number };\n limit?: number;\n offset?: number;\n }\n ): Promise<{\n fingerprint: string;\n errorMessage: string;\n errorName: string;\n sampleStack: string;\n totalCount: number;\n affectedRuns: number;\n firstSeen: number;\n lastSeen: number;\n occurrences: Array<{\n workflowSlug: string;\n runId: string;\n stepId: string;\n attemptNumber: number;\n timestampUs: number;\n }>;\n total: number;\n }> {\n // Parse fingerprint into components\n const parts = fingerprint.split(':');\n if (parts.length !== 3) {\n throw new Error(`Invalid fingerprint format: ${fingerprint}`);\n }\n\n const [nameHash, messageHash, stackHash] = parts;\n\n // Choose which stack hash column to use\n const stackHashColumn =\n groupingStrategy === 'exact'\n ? 'error_stack_exact_hash'\n : groupingStrategy === 'normalized'\n ? 'error_stack_normalized_hash'\n : 'error_stack_portable_hash';\n\n const limit = options?.limit || 100;\n const offset = options?.offset || 0;\n\n // Build WHERE clause\n const conditions: string[] = [\n \"type = 'StepFailed'\",\n `error_name_hash = $1`,\n `error_message_hash = $2`,\n `${stackHashColumn} = $3`,\n ];\n const params: any[] = [nameHash, messageHash, stackHash];\n let paramIndex = 4;\n\n if (options?.timeRange) {\n conditions.push(`timestamp_us >= $${paramIndex}`);\n params.push(options.timeRange.start);\n paramIndex++;\n\n conditions.push(`timestamp_us <= $${paramIndex}`);\n params.push(options.timeRange.end);\n paramIndex++;\n }\n\n const whereClause = `WHERE ${conditions.join(' AND ')}`;\n\n // Get aggregated stats and sample error\n const statsQuery = `\n SELECT\n MIN((event_data->'error'->>'message')) as error_message,\n MIN((event_data->'error'->>'name')) as error_name,\n MIN((event_data->'error'->>'stack')) as sample_stack,\n COUNT(*) as total_count,\n COUNT(DISTINCT run_id) as affected_runs,\n MIN(timestamp_us) as first_seen,\n MAX(timestamp_us) as last_seen\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n `;\n\n const statsResult = await this.db.getPool().query(statsQuery, params.slice(0, paramIndex - 1));\n\n if (statsResult.rows.length === 0) {\n // No matching errors found\n return {\n fingerprint,\n errorMessage: '',\n errorName: '',\n sampleStack: '',\n totalCount: 0,\n affectedRuns: 0,\n firstSeen: 0,\n lastSeen: 0,\n occurrences: [],\n total: 0,\n };\n }\n\n const stats = statsResult.rows[0];\n\n // Get paginated occurrences\n const occurrencesQuery = `\n SELECT\n workflow_slug,\n run_id,\n step_id,\n (event_data->>'attemptNumber')::int as attempt_number,\n timestamp_us\n FROM ${this.db.getSchema()}.step_events\n ${whereClause}\n ORDER BY timestamp_us DESC\n LIMIT $${paramIndex} OFFSET $${paramIndex + 1}\n `;\n\n const occurrencesResult = await this.db.getPool().query(occurrencesQuery, [\n ...params.slice(0, paramIndex - 1),\n limit,\n offset,\n ]);\n\n const occurrences = occurrencesResult.rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n runId: row.run_id,\n stepId: row.step_id,\n attemptNumber: row.attempt_number,\n timestampUs: parseInt(row.timestamp_us, 10),\n }));\n\n return {\n fingerprint,\n errorMessage: stats.error_message || '',\n errorName: stats.error_name || 'Error',\n sampleStack: stats.sample_stack || '',\n totalCount: parseInt(stats.total_count, 10),\n affectedRuns: parseInt(stats.affected_runs, 10),\n firstSeen: parseInt(stats.first_seen, 10),\n lastSeen: parseInt(stats.last_seen, 10),\n occurrences,\n total: parseInt(stats.total_count, 10),\n };\n }\n\n async getRetryAnalysis(options?: AnalyticsOptions): Promise<RetryAnalysis> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeRetryAnalysis(stepEvents);\n }\n\n async getSchedulingLatency(\n options?: AnalyticsOptions\n ): Promise<SchedulingLatency> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeSchedulingLatency(\n stepEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getStepDuration(options?: AnalyticsOptions): Promise<StepDuration> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeStepDuration(\n stepEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getWorkflowDuration(\n options?: AnalyticsOptions\n ): Promise<WorkflowDuration> {\n const { workflowEvents } = await this.loadEventsForAnalytics(options);\n return computeWorkflowDuration(workflowEvents, options?.workflowSlug);\n }\n\n async getWorkerStability(options?: AnalyticsOptions): Promise<WorkerStability> {\n const { stepEvents } = await this.loadEventsForAnalytics(options);\n return computeWorkerStability(stepEvents);\n }\n\n async getThroughput(options?: AnalyticsOptions): Promise<Throughput> {\n const { stepEvents, workflowEvents } = await this.loadEventsForAnalytics(options);\n\n const now = getMicrosecondTimestamp();\n const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000;\n const endUs = options?.endUs ?? now;\n const timeRangeUs = endUs - startUs;\n\n return computeThroughput(\n stepEvents,\n workflowEvents,\n timeRangeUs,\n options?.workflowSlug\n );\n }\n\n async getQueueDepth(\n options?: Pick<AnalyticsOptions, \"workflowSlug\">\n ): Promise<QueueDepth> {\n const result = await this.db.getQueueDepthAggregation(options?.workflowSlug);\n\n return {\n workflowSlug: options?.workflowSlug,\n pendingRuns: result.pendingRuns,\n runningRuns: result.runningRuns,\n scheduledSteps: result.scheduledSteps,\n runningSteps: result.runningSteps,\n oldestScheduledStepUs: result.oldestScheduledStepUs ?? undefined,\n oldestPendingRunUs: result.oldestPendingRunUs ?? undefined,\n };\n }\n\n async getQueueDepthByWorkflow(): Promise<QueueDepthByWorkflow> {\n const [aggregation, allMetadata] = await Promise.all([\n this.db.getQueueDepthByWorkflowAggregation(),\n this.listWorkflowMetadata(),\n ]);\n\n const metadataMap = new Map(allMetadata.map((m) => [m.slug, m]));\n\n return aggregation\n .map((item) => ({\n workflowSlug: item.workflowSlug,\n workflowName: metadataMap.get(item.workflowSlug)?.name,\n pendingRuns: item.pendingRuns,\n scheduledSteps: item.scheduledSteps,\n oldestPendingItemUs: item.oldestPendingItemUs ?? undefined,\n }))\n .sort((a, b) => {\n const aTotal = a.pendingRuns + a.scheduledSteps;\n const bTotal = b.pendingRuns + b.scheduledSteps;\n return bTotal - aTotal;\n });\n }\n\n async getSuccessRate(options?: AnalyticsOptions): Promise<SuccessRate> {\n const { stepEvents, workflowEvents } = await this.loadEventsForAnalytics(options);\n return computeSuccessRate(\n stepEvents,\n workflowEvents,\n options?.workflowSlug,\n options?.stepId\n );\n }\n\n async getAnalyticsSummary(\n options?: AnalyticsOptions\n ): Promise<AnalyticsSummary> {\n const now = getMicrosecondTimestamp();\n const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000;\n const endUs = options?.endUs ?? now;\n\n // Run all analytics in parallel\n const [\n errorAnalysis,\n retryAnalysis,\n schedulingLatency,\n stepDuration,\n workflowDuration,\n workerStability,\n throughput,\n queueDepth,\n successRate,\n ] = await Promise.all([\n this.getErrorAnalysis(options),\n this.getRetryAnalysis(options),\n this.getSchedulingLatency(options),\n this.getStepDuration(options),\n this.getWorkflowDuration(options),\n this.getWorkerStability(options),\n this.getThroughput(options),\n this.getQueueDepth(options),\n this.getSuccessRate(options),\n ]);\n\n return {\n timeRange: {\n startUs,\n endUs,\n durationUs: endUs - startUs,\n },\n errorAnalysis,\n retryAnalysis,\n schedulingLatency,\n stepDuration,\n workflowDuration,\n workerStability,\n throughput,\n queueDepth,\n successRate,\n };\n }\n}\n",
46
46
  "// ESM wrapper for pg\nimport pg from '../lib/index.js'\n\n// Re-export all the properties\nexport const Client = pg.Client\nexport const Pool = pg.Pool\nexport const Connection = pg.Connection\nexport const types = pg.types\nexport const Query = pg.Query\nexport const DatabaseError = pg.DatabaseError\nexport const escapeIdentifier = pg.escapeIdentifier\nexport const escapeLiteral = pg.escapeLiteral\nexport const Result = pg.Result\nexport const TypeOverrides = pg.TypeOverrides\n\n// Also export the defaults\nexport const defaults = pg.defaults\n\n// Re-export the default\nexport default pg\n",
47
47
  "/**\n * Database layer - isolates all pg usage and SQL queries\n * This makes it easy to swap to a different Postgres library in the future\n */\n\nimport pg from \"pg\";\nimport type {\n Event,\n StepEvent,\n WorkflowEvent,\n WorkflowMetadata,\n StepDefinition,\n RunSubmission,\n} from \"@cascade-flow/backend-interface\";\n\nconst { Pool } = pg;\nexport type { Pool } from \"pg\";\n\n/**\n * Strip eventId from event before storing in JSON - it's stored in the event_id column\n */\nfunction stripEventIdFromJson(event: Event): Omit<Event, 'eventId'> {\n const { eventId, ...eventWithoutId } = event;\n return eventWithoutId;\n}\n\n/**\n * Database client that encapsulates pool and schema configuration\n */\nexport class DatabaseClient {\n constructor(\n private pool: pg.Pool,\n private schema: string\n ) {}\n\n /**\n * Get the underlying pool (useful for migrations and direct access)\n */\n getPool(): pg.Pool {\n return this.pool;\n }\n\n /**\n * Get the schema name\n */\n getSchema(): string {\n return this.schema;\n }\n\n /**\n * Generic event append - handles both workflow and step events\n * Extracts normalized fields from events for efficient querying\n */\n async appendEvent(\n table: \"workflow_events\" | \"step_events\",\n event: Event\n ): Promise<void> {\n const client = await this.pool.connect();\n try {\n if (table === \"workflow_events\") {\n const we = event as WorkflowEvent;\n\n // Extract normalized fields based on event type\n let workflowAttemptNumber: number | null = null;\n let availableAtUs: number | null = null;\n let priority: number | null = null;\n let timeoutUs: number | null = null;\n let idempotencyKey: string | null = null;\n let versionId: string | null = null;\n\n if (we.type === \"RunSubmitted\") {\n availableAtUs = we.availableAtUs;\n priority = we.priority;\n timeoutUs = we.timeoutUs ?? null;\n idempotencyKey = we.idempotencyKey ?? null;\n versionId = we.versionId;\n } else if (\"workflowAttemptNumber\" in we) {\n workflowAttemptNumber = we.workflowAttemptNumber;\n }\n\n // Extract versionId from WorkflowStarted and WorkflowResumed events\n if (we.type === \"WorkflowStarted\" || we.type === \"WorkflowResumed\") {\n versionId = we.versionId;\n }\n\n // For workflow events that don't have versionId as a field, query for it\n // (WorkflowInputValidation, WorkflowCompleted, WorkflowFailed, WorkflowCancelled)\n if (versionId === null) {\n const versionResult = await client.query<{ version_id: string }>(\n `SELECT version_id FROM ${this.schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $2\n AND type IN ('WorkflowStarted', 'RunSubmitted')\n AND version_id IS NOT NULL\n ORDER BY timestamp_us DESC, event_id DESC\n LIMIT 1`,\n [we.workflowSlug, we.runId]\n );\n versionId = versionResult.rows[0]?.version_id ?? null;\n }\n\n await client.query(\n `INSERT INTO ${this.schema}.workflow_events (\n event_id, workflow_slug, run_id, timestamp_us, category, type, event_data,\n workflow_attempt_number, available_at_us, priority, timeout_us, idempotency_key, version_id\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`,\n [\n we.eventId,\n we.workflowSlug,\n we.runId,\n we.timestampUs,\n we.category,\n we.type,\n JSON.stringify(stripEventIdFromJson(event)),\n workflowAttemptNumber,\n availableAtUs,\n priority,\n timeoutUs,\n idempotencyKey,\n versionId,\n ]\n );\n } else {\n const se = event as StepEvent;\n\n // Extract normalized fields based on event type\n let workerId: string | null = null;\n let attemptNumber: number | null = null;\n let availableAtUs: number | null = null;\n let exportOutput: boolean | null = null;\n let errorNameHash = '';\n let errorMessageHash = '';\n let errorStackExactHash = '';\n let errorStackNormalizedHash = '';\n let errorStackPortableHash = '';\n\n if (se.type === \"StepStarted\" || se.type === \"StepHeartbeat\") {\n workerId = se.workerId;\n }\n\n if (\"attemptNumber\" in se) {\n attemptNumber = se.attemptNumber;\n }\n\n if (se.type === \"StepScheduled\") {\n availableAtUs = se.availableAtUs;\n }\n\n if (se.type === \"StepCompleted\") {\n exportOutput = se.exportOutput;\n }\n\n if (se.type === \"StepFailed\") {\n errorNameHash = se.errorFingerprints.nameHash;\n errorMessageHash = se.errorFingerprints.messageHash;\n errorStackExactHash = se.errorFingerprints.stackExactHash;\n errorStackNormalizedHash = se.errorFingerprints.stackNormalizedHash;\n errorStackPortableHash = se.errorFingerprints.stackPortableHash;\n }\n\n // Get versionId from workflow events for this run\n // Step events inherit the version from their workflow run\n const versionResult = await client.query<{ version_id: string }>(\n `SELECT version_id FROM ${this.schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $2\n AND type IN ('WorkflowStarted', 'RunSubmitted')\n AND version_id IS NOT NULL\n ORDER BY timestamp_us DESC, event_id DESC\n LIMIT 1`,\n [se.workflowSlug, se.runId]\n );\n\n const versionId = versionResult.rows[0]?.version_id ?? null;\n\n await client.query(\n `INSERT INTO ${this.schema}.step_events (\n event_id, workflow_slug, run_id, step_id, timestamp_us, category, type, event_data,\n worker_id, attempt_number, available_at_us, export_output,\n error_name_hash, error_message_hash, error_stack_exact_hash,\n error_stack_normalized_hash, error_stack_portable_hash, version_id\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18)`,\n [\n se.eventId,\n se.workflowSlug,\n se.runId,\n se.stepId,\n se.timestampUs,\n se.category,\n se.type,\n JSON.stringify(stripEventIdFromJson(event)),\n workerId,\n attemptNumber,\n availableAtUs,\n exportOutput,\n errorNameHash,\n errorMessageHash,\n errorStackExactHash,\n errorStackNormalizedHash,\n errorStackPortableHash,\n versionId,\n ]\n );\n }\n } finally {\n client.release();\n }\n }\n\n /**\n * Append event using an existing client (for transactions)\n * Same as appendEvent but doesn't manage the client connection\n */\n async appendEventWithClient(\n client: pg.PoolClient,\n table: \"workflow_events\" | \"step_events\",\n event: Event\n ): Promise<void> {\n if (table === \"workflow_events\") {\n const we = event as WorkflowEvent;\n\n // Extract normalized fields based on event type\n let workflowAttemptNumber: number | null = null;\n let availableAtUs: number | null = null;\n let priority: number | null = null;\n let timeoutUs: number | null = null;\n let idempotencyKey: string | null = null;\n let versionId: string | null = null;\n\n if (we.type === \"RunSubmitted\") {\n availableAtUs = we.availableAtUs;\n priority = we.priority;\n timeoutUs = we.timeoutUs ?? null;\n idempotencyKey = we.idempotencyKey ?? null;\n versionId = we.versionId;\n } else if (\"workflowAttemptNumber\" in we) {\n workflowAttemptNumber = we.workflowAttemptNumber;\n }\n\n // Extract versionId from WorkflowStarted events\n if (we.type === \"WorkflowStarted\") {\n versionId = we.versionId;\n }\n\n // For workflow events that don't have versionId as a field, query for it\n // (WorkflowInputValidation, WorkflowCompleted, WorkflowFailed, WorkflowResumed, WorkflowCancelled)\n if (versionId === null) {\n const versionResult = await client.query<{ version_id: string }>(\n `SELECT version_id FROM ${this.schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $2\n AND type IN ('WorkflowStarted', 'RunSubmitted')\n AND version_id IS NOT NULL\n ORDER BY timestamp_us DESC, event_id DESC\n LIMIT 1`,\n [we.workflowSlug, we.runId]\n );\n versionId = versionResult.rows[0]?.version_id ?? null;\n }\n\n await client.query(\n `INSERT INTO ${this.schema}.workflow_events (\n event_id, workflow_slug, run_id, timestamp_us, category, type, event_data,\n workflow_attempt_number, available_at_us, priority, timeout_us, idempotency_key, version_id\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`,\n [\n we.eventId,\n we.workflowSlug,\n we.runId,\n we.timestampUs,\n we.category,\n we.type,\n JSON.stringify(stripEventIdFromJson(event)),\n workflowAttemptNumber,\n availableAtUs,\n priority,\n timeoutUs,\n idempotencyKey,\n versionId,\n ]\n );\n } else {\n const se = event as StepEvent;\n\n // Extract normalized fields based on event type\n let workerId: string | null = null;\n let attemptNumber: number | null = null;\n let availableAtUs: number | null = null;\n let exportOutput: boolean | null = null;\n let errorNameHash = '';\n let errorMessageHash = '';\n let errorStackExactHash = '';\n let errorStackNormalizedHash = '';\n let errorStackPortableHash = '';\n\n if (se.type === \"StepStarted\" || se.type === \"StepHeartbeat\") {\n workerId = se.workerId;\n }\n\n if (\"attemptNumber\" in se) {\n attemptNumber = se.attemptNumber;\n }\n\n if (se.type === \"StepScheduled\") {\n availableAtUs = se.availableAtUs;\n }\n\n if (se.type === \"StepCompleted\") {\n exportOutput = se.exportOutput;\n }\n\n if (se.type === \"StepFailed\") {\n errorNameHash = se.errorFingerprints.nameHash;\n errorMessageHash = se.errorFingerprints.messageHash;\n errorStackExactHash = se.errorFingerprints.stackExactHash;\n errorStackNormalizedHash = se.errorFingerprints.stackNormalizedHash;\n errorStackPortableHash = se.errorFingerprints.stackPortableHash;\n }\n\n // Get versionId from workflow events for this run\n // Step events inherit the version from their workflow run\n const versionResult = await client.query<{ version_id: string }>(\n `SELECT version_id FROM ${this.schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $2\n AND type IN ('WorkflowStarted', 'RunSubmitted')\n AND version_id IS NOT NULL\n ORDER BY timestamp_us DESC, event_id DESC\n LIMIT 1`,\n [se.workflowSlug, se.runId]\n );\n\n const versionId = versionResult.rows[0]?.version_id ?? null;\n\n await client.query(\n `INSERT INTO ${this.schema}.step_events (\n event_id, workflow_slug, run_id, step_id, timestamp_us, category, type, event_data,\n worker_id, attempt_number, available_at_us, export_output,\n error_name_hash, error_message_hash, error_stack_exact_hash,\n error_stack_normalized_hash, error_stack_portable_hash, version_id\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18)`,\n [\n se.eventId,\n se.workflowSlug,\n se.runId,\n se.stepId,\n se.timestampUs,\n se.category,\n se.type,\n JSON.stringify(stripEventIdFromJson(event)),\n workerId,\n attemptNumber,\n availableAtUs,\n exportOutput,\n errorNameHash,\n errorMessageHash,\n errorStackExactHash,\n errorStackNormalizedHash,\n errorStackPortableHash,\n versionId,\n ]\n );\n }\n }\n\n /**\n * Load events with optional filtering\n */\n async loadEvents(\n table: \"workflow_events\" | \"step_events\",\n filters: {\n workflowSlug?: string;\n runId?: string;\n stepId?: string;\n category?: \"workflow\" | \"step\";\n types?: string[];\n }\n ): Promise<Event[]> {\n const client = await this.pool.connect();\n try {\n const conditions: string[] = [];\n const values: any[] = [];\n let paramIndex = 1;\n\n if (filters.workflowSlug) {\n conditions.push(`workflow_slug = $${paramIndex++}`);\n values.push(filters.workflowSlug);\n }\n\n if (filters.runId) {\n conditions.push(`run_id = $${paramIndex++}`);\n values.push(filters.runId);\n }\n\n if (filters.stepId && table === \"step_events\") {\n conditions.push(`step_id = $${paramIndex++}`);\n values.push(filters.stepId);\n }\n\n if (filters.category) {\n conditions.push(`category = $${paramIndex++}`);\n values.push(filters.category);\n }\n\n if (filters.types && filters.types.length > 0) {\n conditions.push(`type = ANY($${paramIndex++})`);\n values.push(filters.types);\n }\n\n const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(\" AND \")}` : \"\";\n const query = `\n SELECT event_data, event_id FROM ${this.schema}.${table}\n ${whereClause}\n ORDER BY timestamp_us ASC, event_id ASC\n `;\n\n const result = await client.query(query, values);\n // Inject event_id from column into event object (single source of truth)\n return result.rows.map((row) => ({ ...row.event_data, eventId: row.event_id }));\n } finally {\n client.release();\n }\n }\n\n /**\n * Load all events for a specific run (both workflow and step events)\n */\n async loadAllRunEvents(\n workflowSlug: string,\n runId: string\n ): Promise<Event[]> {\n const client = await this.pool.connect();\n try {\n // Union query to get all events sorted by timestamp\n const query = `\n SELECT event_data, timestamp_us, event_id FROM ${this.schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $2\n UNION ALL\n SELECT event_data, timestamp_us, event_id FROM ${this.schema}.step_events\n WHERE workflow_slug = $1 AND run_id = $2\n ORDER BY timestamp_us ASC, event_id ASC\n `;\n\n const result = await client.query(query, [workflowSlug, runId]);\n // Inject event_id from column into event object (single source of truth)\n return result.rows.map((row) => ({ ...row.event_data, eventId: row.event_id }));\n } finally {\n client.release();\n }\n }\n\n /**\n * Load step events for state projection, grouped by stepId\n * Optimized batch query that avoids N+1 when checking multiple step states\n *\n * Excludes high-volume event types that don't affect state projection:\n * - LogEntry: Can be 1000+ per verbose step, doesn't affect status\n * - StepHeartbeat: Every 5s per running step, only updates lastHeartbeat\n * - StepRetrying: Informational only, doesn't change state machine\n */\n async loadStepEventsForProjection(\n workflowSlug: string,\n runId: string\n ): Promise<Map<string, StepEvent[]>> {\n const client = await this.pool.connect();\n try {\n const query = `\n SELECT event_data, event_id, step_id\n FROM ${this.schema}.step_events\n WHERE workflow_slug = $1 AND run_id = $2\n AND type NOT IN ('LogEntry', 'StepHeartbeat', 'StepRetrying')\n ORDER BY timestamp_us ASC, event_id ASC\n `;\n\n const result = await client.query(query, [workflowSlug, runId]);\n\n // Group events by stepId\n const eventsByStep = new Map<string, StepEvent[]>();\n for (const row of result.rows) {\n const stepId = row.step_id;\n const event = { ...row.event_data, eventId: row.event_id } as StepEvent;\n\n if (!eventsByStep.has(stepId)) {\n eventsByStep.set(stepId, []);\n }\n eventsByStep.get(stepId)!.push(event);\n }\n\n return eventsByStep;\n } finally {\n client.release();\n }\n }\n\n /**\n * Atomic step claiming using SELECT FOR UPDATE SKIP LOCKED\n * Returns true if claim succeeded, false if already claimed\n */\n async claimScheduledStep(\n workflowSlug: string,\n runId: string,\n stepId: string,\n workerId: string,\n eventToWrite: StepEvent\n ): Promise<boolean> {\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n\n // Find the most recent StepScheduled event for this step\n const checkQuery = `\n SELECT event_data FROM ${this.schema}.step_events\n WHERE workflow_slug = $1 AND run_id = $2 AND step_id = $3\n ORDER BY timestamp_us DESC, event_id DESC\n LIMIT 1\n FOR UPDATE SKIP LOCKED\n `;\n\n const checkResult = await client.query(checkQuery, [workflowSlug, runId, stepId]);\n\n // If no rows returned, step is locked by another worker\n if (checkResult.rows.length === 0) {\n await client.query(\"ROLLBACK\");\n return false;\n }\n\n const latestEvent = checkResult.rows[0].event_data;\n\n // Verify step is in scheduled state (not started/completed/failed)\n if (\n latestEvent.type !== \"StepScheduled\" &&\n latestEvent.type !== \"StepReclaimed\" &&\n latestEvent.type !== \"StepRetrying\"\n ) {\n await client.query(\"ROLLBACK\");\n return false;\n }\n\n // Write the StepStarted event with normalized fields\n let workerId: string | null = null;\n let attemptNumber: number | null = null;\n let errorNameHash = '';\n let errorMessageHash = '';\n let errorStackExactHash = '';\n let errorStackNormalizedHash = '';\n let errorStackPortableHash = '';\n\n if (eventToWrite.type === \"StepStarted\") {\n workerId = eventToWrite.workerId;\n attemptNumber = eventToWrite.attemptNumber;\n }\n\n if (eventToWrite.type === \"StepFailed\") {\n errorNameHash = eventToWrite.errorFingerprints.nameHash;\n errorMessageHash = eventToWrite.errorFingerprints.messageHash;\n errorStackExactHash = eventToWrite.errorFingerprints.stackExactHash;\n errorStackNormalizedHash = eventToWrite.errorFingerprints.stackNormalizedHash;\n errorStackPortableHash = eventToWrite.errorFingerprints.stackPortableHash;\n }\n\n // Get versionId from workflow events for this run\n // Step events inherit the version from their workflow run\n const versionResult = await client.query<{ version_id: string }>(\n `SELECT version_id FROM ${this.schema}.workflow_events\n WHERE workflow_slug = $1 AND run_id = $2\n AND type IN ('WorkflowStarted', 'RunSubmitted')\n AND version_id IS NOT NULL\n ORDER BY timestamp_us DESC, event_id DESC\n LIMIT 1`,\n [eventToWrite.workflowSlug, eventToWrite.runId]\n );\n\n const versionId = versionResult.rows[0]?.version_id ?? null;\n\n await client.query(\n `INSERT INTO ${this.schema}.step_events (\n event_id, workflow_slug, run_id, step_id, timestamp_us, category, type, event_data,\n worker_id, attempt_number, available_at_us, export_output,\n error_name_hash, error_message_hash, error_stack_exact_hash,\n error_stack_normalized_hash, error_stack_portable_hash, version_id\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18)`,\n [\n eventToWrite.eventId,\n eventToWrite.workflowSlug,\n eventToWrite.runId,\n eventToWrite.stepId,\n eventToWrite.timestampUs,\n eventToWrite.category,\n eventToWrite.type,\n JSON.stringify(stripEventIdFromJson(eventToWrite)),\n workerId,\n attemptNumber,\n null, // available_at_us\n null, // export_output\n errorNameHash,\n errorMessageHash,\n errorStackExactHash,\n errorStackNormalizedHash,\n errorStackPortableHash,\n versionId,\n ]\n );\n\n await client.query(\"COMMIT\");\n return true;\n } catch (error) {\n await client.query(\"ROLLBACK\");\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Get all scheduled steps across workflows\n * Filters by available_at_us for delayed scheduling support\n */\n async listScheduledSteps(\n options?: {\n workflowSlugs?: string[];\n limit?: number;\n }\n ): Promise<Array<{ workflowSlug: string; runId: string; stepId: string }>> {\n const client = await this.pool.connect();\n try {\n const currentTimeUs = Date.now() * 1000;\n\n // Find steps where the latest event is StepScheduled, StepReclaimed, or StepRetrying\n // and available_at_us is in the past (or null for immediate availability)\n const scheduledTypes = [\"StepScheduled\", \"StepReclaimed\", \"StepRetrying\"];\n\n let query = `\n WITH latest_step_events AS (\n SELECT DISTINCT ON (workflow_slug, run_id, step_id)\n workflow_slug, run_id, step_id, type, available_at_us\n FROM ${this.schema}.step_events\n ${options?.workflowSlugs ? \"WHERE workflow_slug = ANY($1)\" : \"\"}\n ORDER BY workflow_slug, run_id, step_id, timestamp_us DESC, event_id DESC\n )\n SELECT workflow_slug, run_id, step_id\n FROM latest_step_events\n WHERE type = ANY($${options?.workflowSlugs ? \"2\" : \"1\"})\n AND (available_at_us IS NULL OR available_at_us <= $${options?.workflowSlugs ? \"3\" : \"2\"})\n ${options?.limit ? `LIMIT $${options?.workflowSlugs ? \"4\" : \"3\"}` : \"\"}\n `;\n\n const params: any[] = [];\n if (options?.workflowSlugs) {\n params.push(options.workflowSlugs);\n }\n params.push(scheduledTypes);\n params.push(currentTimeUs);\n if (options?.limit) {\n params.push(options.limit);\n }\n\n const result = await client.query(query, params);\n return result.rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n runId: row.run_id,\n stepId: row.step_id,\n }));\n } finally {\n client.release();\n }\n }\n\n /**\n * Find stale steps (last heartbeat older than threshold)\n * Uses normalized worker_id column for efficient querying\n */\n async findStaleSteps(\n staleThresholdUs: number\n ): Promise<Array<{ workflowSlug: string; runId: string; stepId: string; workerId: string }>> {\n const client = await this.pool.connect();\n try {\n const currentTimeUs = Date.now() * 1000;\n\n // Find steps where latest event is StepStarted or StepHeartbeat\n // and the timestamp is older than threshold\n // Uses normalized worker_id column instead of JSONB extraction\n const query = `\n WITH latest_step_events AS (\n SELECT DISTINCT ON (workflow_slug, run_id, step_id)\n workflow_slug, run_id, step_id, type, timestamp_us, worker_id\n FROM ${this.schema}.step_events\n WHERE type IN ('StepStarted', 'StepHeartbeat')\n ORDER BY workflow_slug, run_id, step_id, timestamp_us DESC, event_id DESC\n )\n SELECT workflow_slug, run_id, step_id, worker_id\n FROM latest_step_events\n WHERE timestamp_us < $1 AND worker_id IS NOT NULL\n `;\n\n const result = await client.query(query, [currentTimeUs - staleThresholdUs]);\n return result.rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n runId: row.run_id,\n stepId: row.step_id,\n workerId: row.worker_id,\n }));\n } finally {\n client.release();\n }\n }\n\n /**\n * Save step output\n */\n async saveStepOutput(\n workflowSlug: string,\n runId: string,\n stepId: string,\n attemptNumber: number,\n output: any\n ): Promise<void> {\n const client = await this.pool.connect();\n try {\n await client.query(\n `INSERT INTO ${this.schema}.step_outputs (workflow_slug, run_id, step_id, attempt_number, output)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (workflow_slug, run_id, step_id, attempt_number)\n DO UPDATE SET output = EXCLUDED.output`,\n [workflowSlug, runId, stepId, attemptNumber, JSON.stringify(output)]\n );\n } finally {\n client.release();\n }\n }\n\n /**\n * Load step output\n */\n async loadStepOutput(\n workflowSlug: string,\n runId: string,\n stepId: string,\n attemptNumber: number\n ): Promise<any | null> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `SELECT output FROM ${this.schema}.step_outputs\n WHERE workflow_slug = $1 AND run_id = $2 AND step_id = $3 AND attempt_number = $4`,\n [workflowSlug, runId, stepId, attemptNumber]\n );\n\n return result.rows.length > 0 ? result.rows[0].output : null;\n } finally {\n client.release();\n }\n }\n\n /**\n * Register workflow metadata (upsert)\n */\n async upsertWorkflowMetadata(\n slug: string,\n name: string,\n location: string | undefined,\n inputSchemaJSON: any | undefined\n ): Promise<void> {\n const client = await this.pool.connect();\n try {\n await client.query(\n `INSERT INTO ${this.schema}.workflow_metadata (slug, name, description, input_schema_json, tags, updated_at)\n VALUES ($1, $2, $3, $4, $5, NOW())\n ON CONFLICT (slug)\n DO UPDATE SET\n name = EXCLUDED.name,\n description = EXCLUDED.description,\n input_schema_json = EXCLUDED.input_schema_json,\n updated_at = NOW()`,\n [\n slug,\n name,\n location || null,\n inputSchemaJSON ? JSON.stringify(inputSchemaJSON) : null,\n [], // tags not in WorkflowRegistration schema\n ]\n );\n } finally {\n client.release();\n }\n }\n\n /**\n * Register step definition (upsert)\n */\n async upsertStepDefinition(\n workflowSlug: string,\n step: StepDefinition\n ): Promise<void> {\n const client = await this.pool.connect();\n try {\n await client.query(\n `INSERT INTO ${this.schema}.step_definitions (\n workflow_slug, id, dependencies, export_output, input_schema_json,\n timeout_ms, max_retries, retry_delay_ms\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8)\n ON CONFLICT (workflow_slug, id)\n DO UPDATE SET\n dependencies = EXCLUDED.dependencies,\n export_output = EXCLUDED.export_output`,\n [\n workflowSlug,\n step.id,\n JSON.stringify(step.dependencies),\n step.exportOutput,\n null, // inputSchemaJSON not in StepDefinition schema\n null, // timeoutMs not in StepDefinition schema\n null, // maxRetries not in StepDefinition schema\n null, // retryDelayMs not in StepDefinition schema\n ]\n );\n } finally {\n client.release();\n }\n }\n\n /**\n * Get workflow metadata by slug\n */\n async getWorkflowMetadata(\n slug: string\n ): Promise<WorkflowMetadata | null> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `SELECT slug, name, description, input_schema_json\n FROM ${this.schema}.workflow_metadata\n WHERE slug = $1`,\n [slug]\n );\n\n if (result.rows.length === 0) {\n return null;\n }\n\n const row = result.rows[0];\n return {\n slug: row.slug,\n name: row.name,\n location: row.description, // using description as location\n inputSchemaJSON: row.input_schema_json,\n };\n } finally {\n client.release();\n }\n }\n\n /**\n * List all workflow metadata\n */\n async listWorkflowMetadata(): Promise<WorkflowMetadata[]> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `SELECT slug, name, description, input_schema_json\n FROM ${this.schema}.workflow_metadata\n ORDER BY name ASC`\n );\n\n return result.rows.map((row) => ({\n slug: row.slug,\n name: row.name,\n location: row.description, // using description as location\n inputSchemaJSON: row.input_schema_json,\n }));\n } finally {\n client.release();\n }\n }\n\n /**\n * Get step definitions for a workflow\n */\n async getWorkflowSteps(\n workflowSlug: string\n ): Promise<StepDefinition[]> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `SELECT id, dependencies, export_output\n FROM ${this.schema}.step_definitions\n WHERE workflow_slug = $1\n ORDER BY id ASC`,\n [workflowSlug]\n );\n\n return result.rows.map((row) => ({\n id: row.id,\n name: row.id, // StepDefinition has name field, use id as name\n dependencies: row.dependencies,\n exportOutput: row.export_output,\n }));\n } finally {\n client.release();\n }\n }\n\n /**\n * Get a specific workflow version by versionId\n * Uses ROW_NUMBER window function to compute version numbers (1 = oldest, N = newest)\n */\n async getWorkflowVersion(\n workflowSlug: string,\n versionId: string\n ): Promise<{\n workflow_slug: string;\n version_id: string;\n version_number: number;\n created_at: Date;\n step_manifest: string[];\n total_steps: number;\n git_commit: string | null;\n git_dirty: boolean | null;\n git_branch: string | null;\n } | null> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `WITH numbered_versions AS (\n SELECT *,\n ROW_NUMBER() OVER (PARTITION BY workflow_slug ORDER BY created_at ASC) as version_number\n FROM ${this.schema}.workflow_versions\n WHERE workflow_slug = $1\n )\n SELECT * FROM numbered_versions WHERE version_id = $2`,\n [workflowSlug, versionId]\n );\n\n if (result.rows.length === 0) return null;\n\n return result.rows[0];\n } finally {\n client.release();\n }\n }\n\n /**\n * Get the current (most recent) workflow version\n * Uses ROW_NUMBER window function to compute version numbers (1 = oldest, N = newest)\n */\n async getCurrentWorkflowVersion(\n workflowSlug: string\n ): Promise<{\n workflow_slug: string;\n version_id: string;\n version_number: number;\n created_at: Date;\n step_manifest: string[];\n total_steps: number;\n git_commit: string | null;\n git_dirty: boolean | null;\n git_branch: string | null;\n } | null> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `WITH numbered_versions AS (\n SELECT *,\n ROW_NUMBER() OVER (PARTITION BY workflow_slug ORDER BY created_at ASC) as version_number\n FROM ${this.schema}.workflow_versions\n WHERE workflow_slug = $1\n )\n SELECT * FROM numbered_versions\n ORDER BY created_at DESC\n LIMIT 1`,\n [workflowSlug]\n );\n\n if (result.rows.length === 0) return null;\n\n return result.rows[0];\n } finally {\n client.release();\n }\n }\n\n /**\n * List all workflow versions, ordered by created_at DESC (newest first)\n * Uses ROW_NUMBER window function to compute version numbers (1 = oldest, N = newest)\n */\n async listWorkflowVersions(\n workflowSlug: string,\n limit: number\n ): Promise<Array<{\n workflow_slug: string;\n version_id: string;\n version_number: number;\n created_at: Date;\n step_manifest: string[];\n total_steps: number;\n git_commit: string | null;\n git_dirty: boolean | null;\n git_branch: string | null;\n }>> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `WITH numbered_versions AS (\n SELECT *,\n ROW_NUMBER() OVER (PARTITION BY workflow_slug ORDER BY created_at ASC) as version_number\n FROM ${this.schema}.workflow_versions\n WHERE workflow_slug = $1\n )\n SELECT * FROM numbered_versions\n ORDER BY created_at DESC\n LIMIT $2`,\n [workflowSlug, limit]\n );\n\n return result.rows;\n } finally {\n client.release();\n }\n }\n\n /**\n * Save idempotency key (returns existing runId if key already exists)\n */\n async saveIdempotencyKey(\n hash: string,\n runId: string\n ): Promise<string> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `INSERT INTO ${this.schema}.idempotency_keys (hash, run_id)\n VALUES ($1, $2)\n ON CONFLICT (hash)\n DO UPDATE SET hash = EXCLUDED.hash\n RETURNING run_id`,\n [hash, runId]\n );\n\n return result.rows[0].run_id;\n } finally {\n client.release();\n }\n }\n\n /**\n * Get all distinct run IDs for a workflow\n */\n async listRunIds(\n workflowSlug: string\n ): Promise<string[]> {\n const client = await this.pool.connect();\n try {\n // Get distinct run IDs from both workflow and step events\n const result = await client.query(\n `SELECT DISTINCT run_id FROM ${this.schema}.workflow_events WHERE workflow_slug = $1\n UNION\n SELECT DISTINCT run_id FROM ${this.schema}.step_events WHERE workflow_slug = $1\n ORDER BY run_id DESC`,\n [workflowSlug]\n );\n\n return result.rows.map((row) => row.run_id);\n } finally {\n client.release();\n }\n }\n\n /**\n * Get all workflows that have active (incomplete) runs\n */\n async listActiveWorkflows(): Promise<string[]> {\n const client = await this.pool.connect();\n try {\n // Find workflows that have runs with incomplete steps or pending workflow events\n const result = await client.query(`\n SELECT DISTINCT workflow_slug FROM (\n SELECT DISTINCT workflow_slug FROM ${this.schema}.workflow_events\n WHERE type IN ('RunSubmitted', 'WorkflowStarted', 'WorkflowResumed')\n UNION\n SELECT DISTINCT workflow_slug FROM ${this.schema}.step_events\n WHERE type IN ('StepScheduled', 'StepStarted', 'StepReclaimed', 'StepRetrying')\n ) AS active\n ORDER BY workflow_slug ASC\n `);\n\n return result.rows.map((row) => row.workflow_slug);\n } finally {\n client.release();\n }\n }\n\n /**\n * Check if a run exists\n */\n async runExists(\n workflowSlug: string,\n runId: string\n ): Promise<boolean> {\n const client = await this.pool.connect();\n try {\n const result = await client.query(\n `SELECT 1 FROM ${this.schema}.workflow_events WHERE workflow_slug = $1 AND run_id = $2 LIMIT 1`,\n [workflowSlug, runId]\n );\n\n return result.rows.length > 0;\n } finally {\n client.release();\n }\n }\n\n /**\n * Get queue depth metrics using a single SQL query.\n * Computes run and step counts entirely in SQL using window functions.\n */\n async getQueueDepthAggregation(workflowSlug?: string): Promise<{\n pendingRuns: number;\n runningRuns: number;\n scheduledSteps: number;\n runningSteps: number;\n oldestScheduledStepUs: number | null;\n oldestPendingRunUs: number | null;\n }> {\n const client = await this.pool.connect();\n try {\n const result = await client.query<{\n pending_runs: string;\n running_runs: string;\n scheduled_steps: string;\n running_steps: string;\n oldest_scheduled_step_us: string | null;\n oldest_pending_run_us: string | null;\n }>(\n `WITH\n -- Get the latest workflow event per run to determine run status\n run_status AS (\n SELECT DISTINCT ON (workflow_slug, run_id)\n workflow_slug,\n run_id,\n type,\n timestamp_us AS created_at\n FROM ${this.schema}.workflow_events\n WHERE ($1::text IS NULL OR workflow_slug = $1)\n ORDER BY workflow_slug, run_id, timestamp_us DESC, event_id DESC\n ),\n -- Filter to only active (pending/running) runs\n active_runs AS (\n SELECT\n workflow_slug,\n run_id,\n CASE WHEN type IN ('RunSubmitted', 'WorkflowRetryStarted') THEN 'pending' ELSE 'running' END AS status,\n created_at\n FROM run_status\n WHERE type IN ('RunSubmitted', 'WorkflowRetryStarted', 'WorkflowStarted', 'WorkflowResumed')\n ),\n -- Get latest step event per step (excluding LogEntry which doesn't change state)\n latest_step_events AS (\n SELECT DISTINCT ON (se.workflow_slug, se.run_id, se.step_id)\n se.type,\n se.available_at_us\n FROM ${this.schema}.step_events se\n INNER JOIN active_runs ar\n ON ar.workflow_slug = se.workflow_slug\n AND ar.run_id = se.run_id\n AND ar.status = 'running'\n WHERE se.type != 'LogEntry'\n ORDER BY se.workflow_slug, se.run_id, se.step_id, se.timestamp_us DESC, se.event_id DESC\n ),\n -- Aggregate step counts\n step_counts AS (\n SELECT\n COUNT(*) FILTER (WHERE type IN ('StepScheduled', 'StepReclaimed')) AS scheduled_steps,\n COUNT(*) FILTER (WHERE type IN ('StepStarted', 'StepHeartbeat')) AS running_steps,\n MIN(available_at_us) FILTER (WHERE type IN ('StepScheduled', 'StepReclaimed')) AS oldest_scheduled_step_us\n FROM latest_step_events\n ),\n -- Aggregate run counts\n run_counts AS (\n SELECT\n COUNT(*) FILTER (WHERE status = 'pending') AS pending_runs,\n COUNT(*) FILTER (WHERE status = 'running') AS running_runs,\n MIN(created_at) FILTER (WHERE status = 'pending') AS oldest_pending_run_us\n FROM active_runs\n )\n SELECT\n COALESCE(rc.pending_runs, 0) AS pending_runs,\n COALESCE(rc.running_runs, 0) AS running_runs,\n COALESCE(sc.scheduled_steps, 0) AS scheduled_steps,\n COALESCE(sc.running_steps, 0) AS running_steps,\n sc.oldest_scheduled_step_us,\n rc.oldest_pending_run_us\n FROM run_counts rc, step_counts sc`,\n [workflowSlug ?? null]\n );\n\n const row = result.rows[0];\n return {\n pendingRuns: parseInt(row?.pending_runs ?? \"0\", 10),\n runningRuns: parseInt(row?.running_runs ?? \"0\", 10),\n scheduledSteps: parseInt(row?.scheduled_steps ?? \"0\", 10),\n runningSteps: parseInt(row?.running_steps ?? \"0\", 10),\n oldestScheduledStepUs: row?.oldest_scheduled_step_us\n ? parseInt(row.oldest_scheduled_step_us, 10)\n : null,\n oldestPendingRunUs: row?.oldest_pending_run_us\n ? parseInt(row.oldest_pending_run_us, 10)\n : null,\n };\n } finally {\n client.release();\n }\n }\n\n /**\n * List runs with filtering, sorting, and pagination - single query.\n * Returns run identifiers with basic info for filtering.\n */\n async listRunsFiltered(options?: {\n workflowSlug?: string;\n status?: string[];\n limit?: number;\n }): Promise<\n Array<{\n workflowSlug: string;\n runId: string;\n status: string;\n createdAt: number;\n tags: string[] | null;\n }>\n > {\n const client = await this.pool.connect();\n try {\n // Map status values to the event types that represent them\n const statusToEvents: Record<string, string[]> = {\n pending: [\"RunSubmitted\", \"WorkflowRetryStarted\"],\n running: [\"WorkflowStarted\", \"WorkflowResumed\"],\n completed: [\"WorkflowCompleted\"],\n failed: [\"WorkflowFailed\"],\n cancelled: [\"WorkflowCancelled\"],\n };\n\n // Build list of event types to filter on\n let eventTypeFilter: string[] | null = null;\n if (options?.status && options.status.length > 0) {\n eventTypeFilter = options.status.flatMap(\n (s) => statusToEvents[s] || []\n );\n }\n\n const result = await client.query<{\n workflow_slug: string;\n run_id: string;\n status: string;\n created_at: string;\n tags: string[] | null;\n }>(\n `WITH\n -- Get first event (RunSubmitted) for each run to get createdAt and tags\n run_submitted AS (\n SELECT DISTINCT ON (workflow_slug, run_id)\n workflow_slug,\n run_id,\n timestamp_us AS created_at,\n event_data->'tags' AS tags\n FROM ${this.schema}.workflow_events\n WHERE type = 'RunSubmitted'\n AND ($1::text IS NULL OR workflow_slug = $1)\n ORDER BY workflow_slug, run_id, timestamp_us ASC\n ),\n -- Get latest status-determining event for each run\n latest_status AS (\n SELECT DISTINCT ON (workflow_slug, run_id)\n workflow_slug,\n run_id,\n CASE\n WHEN type IN ('RunSubmitted', 'WorkflowRetryStarted') THEN 'pending'\n WHEN type IN ('WorkflowStarted', 'WorkflowResumed') THEN 'running'\n WHEN type = 'WorkflowCompleted' THEN 'completed'\n WHEN type = 'WorkflowFailed' THEN 'failed'\n WHEN type = 'WorkflowCancelled' THEN 'cancelled'\n END AS status,\n type\n FROM ${this.schema}.workflow_events\n WHERE ($1::text IS NULL OR workflow_slug = $1)\n ORDER BY workflow_slug, run_id, timestamp_us DESC, event_id DESC\n )\n SELECT\n ls.workflow_slug,\n ls.run_id,\n ls.status,\n rs.created_at,\n rs.tags\n FROM latest_status ls\n JOIN run_submitted rs ON ls.workflow_slug = rs.workflow_slug AND ls.run_id = rs.run_id\n WHERE ($2::text[] IS NULL OR ls.type = ANY($2))\n ORDER BY rs.created_at DESC\n LIMIT $3`,\n [\n options?.workflowSlug ?? null,\n eventTypeFilter,\n options?.limit ?? null,\n ]\n );\n\n return result.rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n runId: row.run_id,\n status: row.status,\n createdAt: parseInt(row.created_at, 10),\n tags: row.tags,\n }));\n } finally {\n client.release();\n }\n }\n\n /**\n * Load all workflow events for multiple runs in a single query.\n */\n async loadWorkflowEventsForRuns(\n runs: Array<{ workflowSlug: string; runId: string }>\n ): Promise<Map<string, WorkflowEvent[]>> {\n if (runs.length === 0) {\n return new Map();\n }\n\n const client = await this.pool.connect();\n try {\n // Build VALUES clause for the runs\n const values: any[] = [];\n const valuePlaceholders: string[] = [];\n runs.forEach((run, i) => {\n values.push(run.workflowSlug, run.runId);\n valuePlaceholders.push(`($${i * 2 + 1}, $${i * 2 + 2})`);\n });\n\n const result = await client.query<{\n workflow_slug: string;\n run_id: string;\n event_data: any;\n event_id: string;\n }>(\n `SELECT workflow_slug, run_id, event_data, event_id\n FROM ${this.schema}.workflow_events\n WHERE (workflow_slug, run_id) IN (VALUES ${valuePlaceholders.join(\", \")})\n ORDER BY workflow_slug, run_id, timestamp_us ASC, event_id ASC`,\n values\n );\n\n // Group events by run\n const eventsByRun = new Map<string, WorkflowEvent[]>();\n for (const row of result.rows) {\n const key = `${row.workflow_slug}:${row.run_id}`;\n const events = eventsByRun.get(key) || [];\n events.push({ ...row.event_data, eventId: row.event_id });\n eventsByRun.set(key, events);\n }\n\n return eventsByRun;\n } finally {\n client.release();\n }\n }\n\n /**\n * Get queue depth by workflow using a single SQL query.\n * Returns aggregated counts per workflow.\n */\n async getQueueDepthByWorkflowAggregation(): Promise<\n Array<{\n workflowSlug: string;\n pendingRuns: number;\n scheduledSteps: number;\n oldestPendingItemUs: number | null;\n }>\n > {\n const client = await this.pool.connect();\n try {\n const result = await client.query<{\n workflow_slug: string;\n pending_runs: string;\n scheduled_steps: string;\n oldest_pending_item_us: string | null;\n }>(\n `WITH\n -- Get the latest workflow event per run to determine run status\n run_status AS (\n SELECT DISTINCT ON (workflow_slug, run_id)\n workflow_slug,\n run_id,\n type,\n timestamp_us AS created_at\n FROM ${this.schema}.workflow_events\n ORDER BY workflow_slug, run_id, timestamp_us DESC, event_id DESC\n ),\n -- Filter to only active (pending/running) runs\n active_runs AS (\n SELECT\n workflow_slug,\n run_id,\n CASE WHEN type IN ('RunSubmitted', 'WorkflowRetryStarted') THEN 'pending' ELSE 'running' END AS status,\n created_at\n FROM run_status\n WHERE type IN ('RunSubmitted', 'WorkflowRetryStarted', 'WorkflowStarted', 'WorkflowResumed')\n ),\n -- Get latest step event per step (excluding LogEntry)\n latest_step_events AS (\n SELECT DISTINCT ON (se.workflow_slug, se.run_id, se.step_id)\n se.workflow_slug,\n se.type,\n se.available_at_us\n FROM ${this.schema}.step_events se\n INNER JOIN active_runs ar\n ON ar.workflow_slug = se.workflow_slug\n AND ar.run_id = se.run_id\n AND ar.status = 'running'\n WHERE se.type != 'LogEntry'\n ORDER BY se.workflow_slug, se.run_id, se.step_id, se.timestamp_us DESC, se.event_id DESC\n ),\n -- Aggregate step counts by workflow\n step_counts_by_workflow AS (\n SELECT\n workflow_slug,\n COUNT(*) FILTER (WHERE type IN ('StepScheduled', 'StepReclaimed')) AS scheduled_steps,\n MIN(available_at_us) FILTER (WHERE type IN ('StepScheduled', 'StepReclaimed')) AS oldest_scheduled_us\n FROM latest_step_events\n GROUP BY workflow_slug\n ),\n -- Aggregate run counts by workflow\n run_counts_by_workflow AS (\n SELECT\n workflow_slug,\n COUNT(*) FILTER (WHERE status = 'pending') AS pending_runs,\n MIN(created_at) FILTER (WHERE status = 'pending') AS oldest_pending_run_us\n FROM active_runs\n GROUP BY workflow_slug\n )\n SELECT\n COALESCE(r.workflow_slug, s.workflow_slug) AS workflow_slug,\n COALESCE(r.pending_runs, 0) AS pending_runs,\n COALESCE(s.scheduled_steps, 0) AS scheduled_steps,\n LEAST(r.oldest_pending_run_us, s.oldest_scheduled_us) AS oldest_pending_item_us\n FROM run_counts_by_workflow r\n FULL OUTER JOIN step_counts_by_workflow s ON r.workflow_slug = s.workflow_slug\n WHERE COALESCE(r.pending_runs, 0) > 0 OR COALESCE(s.scheduled_steps, 0) > 0`\n );\n\n return result.rows.map((row) => ({\n workflowSlug: row.workflow_slug,\n pendingRuns: parseInt(row.pending_runs, 10),\n scheduledSteps: parseInt(row.scheduled_steps, 10),\n oldestPendingItemUs: row.oldest_pending_item_us\n ? parseInt(row.oldest_pending_item_us, 10)\n : null,\n }));\n } finally {\n client.release();\n }\n }\n}\n\n/**\n * Create a connection pool\n */\nexport function createPool(connectionString: string): pg.Pool {\n return new Pool({ connectionString });\n}\n",
48
48
  "import type { Pool } from \"pg\";\n\n/**\n * Migration 000: Create schema\n *\n * Creates the cascadeflow schema to isolate workflow tables from other application tables\n */\nasync function migration000_createSchema(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n await client.query(`CREATE SCHEMA IF NOT EXISTS ${schema}`);\n console.log(`[Migration 000] Schema '${schema}' created successfully`);\n } catch (error) {\n console.error(`[Migration 000] Error creating schema '${schema}':`, error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Migration 001: Create core tables\n *\n * Creates all tables needed for the backend-postgres implementation:\n * - workflow_events: Workflow-level events (event sourcing)\n * - step_events: Step-level events (event sourcing)\n * - workflow_metadata: Workflow registry metadata\n * - step_definitions: Step definitions from workflow registry\n * - step_outputs: Serialized step outputs\n * - idempotency_keys: Deduplication for run submissions\n */\nasync function migration001_createTables(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n // Workflow events table - stores WorkflowEvent types\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${schema}.workflow_events (\n id SERIAL PRIMARY KEY,\n event_id TEXT NOT NULL,\n workflow_slug TEXT NOT NULL,\n run_id TEXT NOT NULL,\n timestamp_us BIGINT NOT NULL,\n category TEXT NOT NULL DEFAULT 'workflow',\n type TEXT NOT NULL,\n event_data JSONB NOT NULL,\n created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()\n )\n `);\n\n // Step events table - stores StepEvent types\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${schema}.step_events (\n id SERIAL PRIMARY KEY,\n event_id TEXT NOT NULL,\n workflow_slug TEXT NOT NULL,\n run_id TEXT NOT NULL,\n step_id TEXT NOT NULL,\n timestamp_us BIGINT NOT NULL,\n category TEXT NOT NULL DEFAULT 'step',\n type TEXT NOT NULL,\n event_data JSONB NOT NULL,\n created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()\n )\n `);\n\n // Workflow metadata table - registry\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${schema}.workflow_metadata (\n slug TEXT PRIMARY KEY,\n name TEXT NOT NULL,\n description TEXT,\n input_schema_json JSONB,\n tags TEXT[],\n created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),\n updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()\n )\n `);\n\n // Step definitions table - registry\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${schema}.step_definitions (\n workflow_slug TEXT NOT NULL,\n id TEXT NOT NULL,\n dependencies JSONB NOT NULL DEFAULT '{}',\n export_output BOOLEAN NOT NULL DEFAULT false,\n input_schema_json JSONB,\n timeout_ms INTEGER,\n max_retries INTEGER,\n retry_delay_ms INTEGER,\n created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),\n PRIMARY KEY (workflow_slug, id),\n FOREIGN KEY (workflow_slug) REFERENCES ${schema}.workflow_metadata(slug) ON DELETE CASCADE\n )\n `);\n\n // Step outputs table - stores serialized outputs\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${schema}.step_outputs (\n workflow_slug TEXT NOT NULL,\n run_id TEXT NOT NULL,\n step_id TEXT NOT NULL,\n attempt_number INTEGER NOT NULL,\n output JSONB NOT NULL,\n created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),\n PRIMARY KEY (workflow_slug, run_id, step_id, attempt_number)\n )\n `);\n\n // Idempotency keys table - deduplication\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${schema}.idempotency_keys (\n hash TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()\n )\n `);\n\n console.log(\"[Migration 001] Tables created successfully\");\n } catch (error) {\n console.error(\"[Migration 001] Error creating tables:\", error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Migration 002: Add normalized columns to event tables\n *\n * Extracts frequently-queried fields from JSONB to top-level columns for better indexing:\n * - Step events: worker_id, attempt_number, available_at_us, export_output\n * - Workflow events: workflow_attempt_number, available_at_us, priority, timeout_us, idempotency_key\n */\nasync function migration002_addNormalizedColumns(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n // Add normalized columns to step_events\n await client.query(`\n ALTER TABLE ${schema}.step_events\n ADD COLUMN IF NOT EXISTS worker_id TEXT,\n ADD COLUMN IF NOT EXISTS attempt_number INTEGER,\n ADD COLUMN IF NOT EXISTS available_at_us BIGINT,\n ADD COLUMN IF NOT EXISTS export_output BOOLEAN\n `);\n\n // Add normalized columns to workflow_events\n await client.query(`\n ALTER TABLE ${schema}.workflow_events\n ADD COLUMN IF NOT EXISTS workflow_attempt_number INTEGER,\n ADD COLUMN IF NOT EXISTS available_at_us BIGINT,\n ADD COLUMN IF NOT EXISTS priority INTEGER,\n ADD COLUMN IF NOT EXISTS timeout_us BIGINT,\n ADD COLUMN IF NOT EXISTS idempotency_key TEXT\n `);\n\n console.log(\"[Migration 002] Normalized columns added successfully\");\n } catch (error) {\n console.error(\"[Migration 002] Error adding normalized columns:\", error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Migration 003: Create indexes for performance\n *\n * Creates indexes for common query patterns:\n * - Event ordering by timestamp\n * - Step claiming queries\n * - Queue state projection\n * - Registry lookups\n * - Normalized column queries (stale detection, priority, scheduling)\n */\nasync function migration003_createIndexes(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n // Workflow events indexes\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_lookup\n ON ${schema}.workflow_events (workflow_slug, run_id, timestamp_us)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_type\n ON ${schema}.workflow_events (workflow_slug, run_id, type)\n `);\n\n // Step events indexes - critical for step claiming and state queries\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_lookup\n ON ${schema}.step_events (workflow_slug, run_id, step_id, timestamp_us)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_run\n ON ${schema}.step_events (workflow_slug, run_id, timestamp_us)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_type\n ON ${schema}.step_events (workflow_slug, run_id, type)\n `);\n\n // Composite index for scheduled step queries (step claiming)\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_scheduled\n ON ${schema}.step_events (workflow_slug, run_id, step_id, type, timestamp_us)\n `);\n\n // Step definitions lookup\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_definitions_workflow\n ON ${schema}.step_definitions (workflow_slug)\n `);\n\n // Normalized column indexes for step_events\n // For stale step detection (worker_id + timestamp for reclamation)\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_worker_timestamp\n ON ${schema}.step_events (worker_id, timestamp_us)\n WHERE worker_id IS NOT NULL\n `);\n\n // For delayed scheduling (available_at_us filtering)\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_available_at\n ON ${schema}.step_events (type, available_at_us)\n WHERE available_at_us IS NOT NULL\n `);\n\n // For attempt number filtering\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_attempt\n ON ${schema}.step_events (workflow_slug, run_id, step_id, attempt_number)\n WHERE attempt_number IS NOT NULL\n `);\n\n // Normalized column indexes for workflow_events\n // For priority queue ordering\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_priority\n ON ${schema}.workflow_events (priority DESC, available_at_us)\n WHERE priority IS NOT NULL\n `);\n\n // For idempotency key lookups\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_idempotency\n ON ${schema}.workflow_events (idempotency_key)\n WHERE idempotency_key IS NOT NULL\n `);\n\n // For timeout enforcement\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_timeout\n ON ${schema}.workflow_events (workflow_slug, run_id, timeout_us)\n WHERE timeout_us IS NOT NULL\n `);\n\n console.log(\"[Migration 003] Indexes created successfully\");\n } catch (error) {\n console.error(\"[Migration 003] Error creating indexes:\", error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Migration 004: Add error fingerprint columns for efficient error tracking\n *\n * Adds normalized columns for composable error fingerprints to step_events table.\n * These allow flexible grouping strategies without recomputing hashes.\n */\nasync function migration004_addErrorFingerprints(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n // Add error fingerprint columns to step_events\n await client.query(`\n ALTER TABLE ${schema}.step_events\n ADD COLUMN IF NOT EXISTS error_name_hash TEXT NOT NULL DEFAULT '',\n ADD COLUMN IF NOT EXISTS error_message_hash TEXT NOT NULL DEFAULT '',\n ADD COLUMN IF NOT EXISTS error_stack_exact_hash TEXT NOT NULL DEFAULT '',\n ADD COLUMN IF NOT EXISTS error_stack_normalized_hash TEXT NOT NULL DEFAULT '',\n ADD COLUMN IF NOT EXISTS error_stack_portable_hash TEXT NOT NULL DEFAULT ''\n `);\n\n // Create composite indexes for error queries (partial index on StepFailed only)\n // Index for \"exact\" grouping strategy\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_error_fp_exact\n ON ${schema}.step_events(error_name_hash, error_message_hash, error_stack_exact_hash)\n WHERE type = 'StepFailed'\n `);\n\n // Index for \"normalized\" grouping strategy\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_error_fp_normalized\n ON ${schema}.step_events(error_name_hash, error_message_hash, error_stack_normalized_hash)\n WHERE type = 'StepFailed'\n `);\n\n // Index for \"portable\" grouping strategy\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_error_fp_portable\n ON ${schema}.step_events(error_name_hash, error_message_hash, error_stack_portable_hash)\n WHERE type = 'StepFailed'\n `);\n\n console.log(\"[Migration 004] Error fingerprint columns and indexes added successfully\");\n } catch (error) {\n console.error(\"[Migration 004] Error adding error fingerprints:\", error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Migration 005: Add workflow versioning support\n *\n * Creates workflow_versions table and adds version_id columns to event tables.\n * This enables tracking which steps existed at the time of execution and\n * detecting workflow definition changes across runs.\n */\nasync function migration005_addWorkflowVersioning(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n // Create workflow_versions table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${schema}.workflow_versions (\n workflow_slug TEXT NOT NULL,\n version_id TEXT NOT NULL,\n created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),\n step_manifest TEXT[] NOT NULL,\n total_steps INTEGER NOT NULL,\n git_commit TEXT,\n git_dirty BOOLEAN,\n git_branch TEXT,\n PRIMARY KEY (workflow_slug, version_id),\n FOREIGN KEY (workflow_slug) REFERENCES ${schema}.workflow_metadata(slug) ON DELETE CASCADE\n )\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_versions_slug\n ON ${schema}.workflow_versions(workflow_slug)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_versions_created\n ON ${schema}.workflow_versions(workflow_slug, created_at DESC)\n `);\n\n // Add version_id column to workflow_events (NOT NULL)\n await client.query(`\n ALTER TABLE ${schema}.workflow_events\n ADD COLUMN IF NOT EXISTS version_id TEXT NOT NULL\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_version\n ON ${schema}.workflow_events(workflow_slug, run_id, version_id)\n `);\n\n // Add version_id column to step_events (NOT NULL)\n await client.query(`\n ALTER TABLE ${schema}.step_events\n ADD COLUMN IF NOT EXISTS version_id TEXT NOT NULL\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_version\n ON ${schema}.step_events(workflow_slug, version_id)\n `);\n\n console.log(\"[Migration 005] Workflow versioning support added successfully\");\n } catch (error) {\n console.error(\"[Migration 005] Error adding workflow versioning:\", error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Migration 006: Add DESC indexes for optimized DISTINCT ON queries\n *\n * Our dashboard and list queries use DISTINCT ON with ORDER BY ... DESC\n * to find the latest event per entity. These indexes enable efficient\n * forward scans instead of backward scans on ASC indexes.\n */\nasync function migration006_addDescIndexes(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n // Index for workflow_events: used by listRunsFiltered, getQueueDepthAggregation\n // Pattern: DISTINCT ON (workflow_slug, run_id) ORDER BY ... timestamp_us DESC, event_id DESC\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_latest\n ON ${schema}.workflow_events (workflow_slug, run_id, timestamp_us DESC, event_id DESC)\n `);\n\n // Index for step_events: used by getQueueDepthAggregation for step status counts\n // Pattern: DISTINCT ON (workflow_slug, run_id, step_id) ORDER BY ... timestamp_us DESC, event_id DESC\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_latest\n ON ${schema}.step_events (workflow_slug, run_id, step_id, timestamp_us DESC, event_id DESC)\n `);\n\n // Index for workflow_events filtered by type: used by listRunsFiltered to find RunSubmitted events\n // Pattern: WHERE type = 'RunSubmitted' ORDER BY workflow_slug, run_id, timestamp_us ASC\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_by_type\n ON ${schema}.workflow_events (type, workflow_slug, run_id, timestamp_us ASC)\n `);\n\n console.log(\"[Migration 006] Optimized query indexes added successfully\");\n } catch (error) {\n console.error(\"[Migration 006] Error adding DESC indexes:\", error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Migration 007: Add worker loop indexes\n *\n * Adds partial indexes optimized for worker loop queries:\n * - listScheduledSteps: Find steps in scheduled/reclaimed/retrying state\n * - findStaleSteps: Find steps with stale heartbeats\n * - listActiveWorkflows: Find workflows with active runs\n */\nasync function migration007_addWorkerIndexes(pool: Pool, schema: string): Promise<void> {\n const client = await pool.connect();\n try {\n // For listScheduledSteps - DISTINCT ON with type filter\n // Partial index for scheduled step states only\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_scheduled_status\n ON ${schema}.step_events (type, workflow_slug, run_id, step_id, timestamp_us DESC, event_id DESC)\n WHERE type IN ('StepScheduled', 'StepReclaimed', 'StepRetrying')\n `);\n\n // For findStaleSteps - DISTINCT ON with type filter for heartbeat detection\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_heartbeat_status\n ON ${schema}.step_events (type, workflow_slug, run_id, step_id, timestamp_us DESC, event_id DESC)\n WHERE type IN ('StepStarted', 'StepHeartbeat')\n `);\n\n // For listActiveWorkflows - type filter on workflow_events\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_workflow_events_active_types\n ON ${schema}.workflow_events (type)\n WHERE type IN ('RunSubmitted', 'WorkflowStarted', 'WorkflowResumed')\n `);\n\n // For listActiveWorkflows - type filter on step_events\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_events_active_types\n ON ${schema}.step_events (type)\n WHERE type IN ('StepScheduled', 'StepStarted', 'StepReclaimed', 'StepRetrying')\n `);\n\n console.log(\"[Migration 007] Worker loop indexes added successfully\");\n } catch (error) {\n console.error(\"[Migration 007] Error adding worker indexes:\", error);\n throw error;\n } finally {\n client.release();\n }\n}\n\n/**\n * Run all migrations in order\n * Each migration is idempotent (safe to rerun)\n */\nexport async function runMigrations(pool: Pool, schema: string = 'cascadeflow'): Promise<void> {\n console.log(`[Migrations] Starting database migrations in schema '${schema}'...`);\n\n try {\n await migration000_createSchema(pool, schema);\n await migration001_createTables(pool, schema);\n await migration002_addNormalizedColumns(pool, schema);\n await migration003_createIndexes(pool, schema);\n await migration004_addErrorFingerprints(pool, schema);\n await migration005_addWorkflowVersioning(pool, schema);\n await migration006_addDescIndexes(pool, schema);\n await migration007_addWorkerIndexes(pool, schema);\n\n console.log(\"[Migrations] All migrations completed successfully\");\n } catch (error) {\n console.error(\"[Migrations] Migration failed:\", error);\n throw error;\n }\n}\n"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cascade-flow/backend-postgres",
3
- "version": "0.2.13",
3
+ "version": "0.2.14",
4
4
  "main": "./dist/index.js",
5
5
  "module": "./dist/index.js",
6
6
  "types": "./dist/index.d.ts",
@@ -18,8 +18,8 @@
18
18
  "test": "bun test"
19
19
  },
20
20
  "dependencies": {
21
- "@cascade-flow/backend-interface": "0.2.6",
22
- "@cascade-flow/runner": "0.2.10",
21
+ "@cascade-flow/backend-interface": "0.2.7",
22
+ "@cascade-flow/runner": "0.2.11",
23
23
  "pg": "^8.13.1"
24
24
  },
25
25
  "devDependencies": {