@workflow/world-postgres 4.0.1-beta.2 → 4.1.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/LICENSE.md +201 -21
  2. package/README.md +33 -0
  3. package/bin/setup.js +12 -0
  4. package/dist/boss.d.ts +15 -0
  5. package/dist/boss.d.ts.map +1 -0
  6. package/dist/boss.js +18 -0
  7. package/dist/boss.js.map +1 -0
  8. package/dist/cli.d.ts +3 -0
  9. package/dist/cli.d.ts.map +1 -0
  10. package/dist/cli.js +44 -0
  11. package/dist/cli.js.map +1 -0
  12. package/dist/config.d.ts +6 -0
  13. package/dist/config.d.ts.map +1 -0
  14. package/dist/config.js +2 -0
  15. package/dist/config.js.map +1 -0
  16. package/dist/drizzle/index.d.ts +8 -0
  17. package/dist/drizzle/index.d.ts.map +1 -0
  18. package/dist/drizzle/index.js +7 -0
  19. package/dist/drizzle/index.js.map +1 -0
  20. package/dist/drizzle/schema.d.ts +843 -0
  21. package/dist/drizzle/schema.d.ts.map +1 -0
  22. package/dist/drizzle/schema.js +88 -0
  23. package/dist/drizzle/schema.js.map +1 -0
  24. package/dist/index.d.ts +8 -0
  25. package/dist/index.d.ts.map +1 -0
  26. package/dist/index.js +40 -0
  27. package/dist/index.js.map +1 -0
  28. package/dist/queue.d.ts +17 -0
  29. package/dist/queue.d.ts.map +1 -0
  30. package/dist/queue.js +103 -0
  31. package/dist/queue.js.map +1 -0
  32. package/dist/storage.d.ts +7 -0
  33. package/dist/storage.d.ts.map +1 -0
  34. package/dist/storage.js +585 -0
  35. package/dist/storage.js.map +1 -0
  36. package/dist/streamer.d.ts +5 -0
  37. package/dist/streamer.d.ts.map +1 -0
  38. package/dist/streamer.js +157 -0
  39. package/dist/streamer.js.map +1 -0
  40. package/dist/util.d.ts +6 -0
  41. package/dist/util.d.ts.map +1 -0
  42. package/dist/util.js +20 -0
  43. package/dist/util.js.map +1 -0
  44. package/dist/zod.d.ts +3 -0
  45. package/dist/zod.d.ts.map +1 -0
  46. package/dist/zod.js +10 -0
  47. package/dist/zod.js.map +1 -0
  48. package/package.json +32 -17
  49. package/src/drizzle/migrations/0000_cultured_the_anarchist.sql +84 -0
  50. package/src/drizzle/migrations/0001_update_error_schema.sql +7 -0
  51. package/src/drizzle/migrations/meta/0000_snapshot.json +499 -0
  52. package/src/drizzle/migrations/meta/_journal.json +13 -0
@@ -0,0 +1,157 @@
1
+ import { EventEmitter } from 'node:events';
2
+ import { and, eq } from 'drizzle-orm';
3
+ import { monotonicFactory } from 'ulid';
4
+ import * as z from 'zod';
5
+ import { Schema } from './drizzle/index.js';
6
+ import { Mutex } from './util.js';
7
+ const StreamPublishMessage = z.object({
8
+ streamId: z.string(),
9
+ chunkId: z.templateLiteral(['chnk_', z.string()]),
10
+ });
11
+ class Rc {
12
+ resource;
13
+ refCount = 0;
14
+ constructor(resource) {
15
+ this.resource = resource;
16
+ }
17
+ acquire() {
18
+ this.refCount++;
19
+ return {
20
+ ...this.resource,
21
+ [Symbol.dispose]: () => {
22
+ this.release();
23
+ },
24
+ };
25
+ }
26
+ release() {
27
+ this.refCount--;
28
+ if (this.refCount <= 0) {
29
+ this.resource.drop();
30
+ }
31
+ }
32
+ }
33
+ export function createStreamer(postgres, drizzle) {
34
+ const ulid = monotonicFactory();
35
+ const events = new EventEmitter();
36
+ const { streams } = Schema;
37
+ const genChunkId = () => `chnk_${ulid()}`;
38
+ const mutexes = new Map();
39
+ const getMutex = (key) => {
40
+ let mutex = mutexes.get(key);
41
+ if (!mutex) {
42
+ mutex = new Rc({
43
+ mutex: new Mutex(),
44
+ drop: () => mutexes.delete(key),
45
+ });
46
+ mutexes.set(key, mutex);
47
+ }
48
+ return mutex.acquire();
49
+ };
50
+ const STREAM_TOPIC = 'workflow_event_chunk';
51
+ postgres.listen(STREAM_TOPIC, async (msg) => {
52
+ const parsed = await Promise.resolve(msg)
53
+ .then(JSON.parse)
54
+ .then(StreamPublishMessage.parse);
55
+ const key = `strm:${parsed.streamId}`;
56
+ if (!events.listenerCount(key)) {
57
+ return;
58
+ }
59
+ const resource = getMutex(key);
60
+ await resource.mutex.andThen(async () => {
61
+ const [value] = await drizzle
62
+ .select({ eof: streams.eof, data: streams.chunkData })
63
+ .from(streams)
64
+ .where(and(eq(streams.streamId, parsed.streamId), eq(streams.chunkId, parsed.chunkId)))
65
+ .limit(1);
66
+ if (!value)
67
+ return;
68
+ const { data, eof } = value;
69
+ events.emit(key, { id: parsed.chunkId, data, eof });
70
+ });
71
+ });
72
+ return {
73
+ async writeToStream(name, chunk) {
74
+ const chunkId = genChunkId();
75
+ await drizzle.insert(streams).values({
76
+ chunkId,
77
+ streamId: name,
78
+ chunkData: !Buffer.isBuffer(chunk) ? Buffer.from(chunk) : chunk,
79
+ eof: false,
80
+ });
81
+ postgres.notify(STREAM_TOPIC, JSON.stringify(StreamPublishMessage.encode({
82
+ chunkId,
83
+ streamId: name,
84
+ })));
85
+ },
86
+ async closeStream(name) {
87
+ const chunkId = genChunkId();
88
+ await drizzle.insert(streams).values({
89
+ chunkId,
90
+ streamId: name,
91
+ chunkData: Buffer.from([]),
92
+ eof: true,
93
+ });
94
+ postgres.notify('workflow_event_chunk', JSON.stringify(StreamPublishMessage.encode({
95
+ streamId: name,
96
+ chunkId,
97
+ })));
98
+ },
99
+ async readFromStream(name, startIndex) {
100
+ const cleanups = [];
101
+ return new ReadableStream({
102
+ async start(controller) {
103
+ // an empty string is always < than any string,
104
+ // so `'' < ulid()` and `ulid() < ulid()` (maintaining order)
105
+ let lastChunkId = '';
106
+ let offset = startIndex ?? 0;
107
+ let buffer = [];
108
+ function enqueue(msg) {
109
+ if (lastChunkId >= msg.id) {
110
+ // already sent or out of order
111
+ return;
112
+ }
113
+ if (offset > 0) {
114
+ offset--;
115
+ return;
116
+ }
117
+ if (msg.data.byteLength) {
118
+ controller.enqueue(new Uint8Array(msg.data));
119
+ }
120
+ if (msg.eof) {
121
+ controller.close();
122
+ }
123
+ lastChunkId = msg.id;
124
+ }
125
+ function onData(data) {
126
+ if (buffer) {
127
+ buffer.push(data);
128
+ return;
129
+ }
130
+ enqueue(data);
131
+ }
132
+ events.on(`strm:${name}`, onData);
133
+ cleanups.push(() => {
134
+ events.off(`strm:${name}`, onData);
135
+ });
136
+ const chunks = await drizzle
137
+ .select({
138
+ id: streams.chunkId,
139
+ eof: streams.eof,
140
+ data: streams.chunkData,
141
+ })
142
+ .from(streams)
143
+ .where(and(eq(streams.streamId, name)))
144
+ .orderBy(streams.chunkId);
145
+ for (const chunk of [...chunks, ...(buffer ?? [])]) {
146
+ enqueue(chunk);
147
+ }
148
+ buffer = null;
149
+ },
150
+ cancel() {
151
+ cleanups.forEach((fn) => fn());
152
+ },
153
+ });
154
+ },
155
+ };
156
+ }
157
+ //# sourceMappingURL=streamer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"streamer.js","sourceRoot":"","sources":["../src/streamer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAE3C,OAAO,EAAE,GAAG,EAAE,EAAE,EAAE,MAAM,aAAa,CAAC;AAEtC,OAAO,EAAE,gBAAgB,EAAE,MAAM,MAAM,CAAC;AACxC,OAAO,KAAK,CAAC,MAAM,KAAK,CAAC;AACzB,OAAO,EAAgB,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAC1D,OAAO,EAAE,KAAK,EAAE,MAAM,WAAW,CAAC;AAElC,MAAM,oBAAoB,GAAG,CAAC,CAAC,MAAM,CAAC;IACpC,QAAQ,EAAE,CAAC,CAAC,MAAM,EAAE;IACpB,OAAO,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;CAClD,CAAC,CAAC;AAQH,MAAM,EAAE;IAEc;IADZ,QAAQ,GAAG,CAAC,CAAC;IACrB,YAAoB,QAAW;QAAX,aAAQ,GAAR,QAAQ,CAAG;IAAG,CAAC;IACnC,OAAO;QACL,IAAI,CAAC,QAAQ,EAAE,CAAC;QAChB,OAAO;YACL,GAAG,IAAI,CAAC,QAAQ;YAChB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,GAAG,EAAE;gBACrB,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,CAAC;SACF,CAAC;IACJ,CAAC;IACD,OAAO;QACL,IAAI,CAAC,QAAQ,EAAE,CAAC;QAChB,IAAI,IAAI,CAAC,QAAQ,IAAI,CAAC,EAAE,CAAC;YACvB,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;CACF;AAED,MAAM,UAAU,cAAc,CAAC,QAAa,EAAE,OAAgB;IAC5D,MAAM,IAAI,GAAG,gBAAgB,EAAE,CAAC;IAChC,MAAM,MAAM,GAAG,IAAI,YAAY,EAE3B,CAAC;IACL,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;IAC3B,MAAM,UAAU,GAAG,GAAG,EAAE,CAAC,QAAQ,IAAI,EAAE,EAAW,CAAC;IACnD,MAAM,OAAO,GAAG,IAAI,GAAG,EAA8C,CAAC;IACtE,MAAM,QAAQ,GAAG,CAAC,GAAW,EAAE,EAAE;QAC/B,IAAI,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAC7B,IAAI,CAAC,KAAK,EAAE,CAAC;YACX,KAAK,GAAG,IAAI,EAAE,CAAC;gBACb,KAAK,EAAE,IAAI,KAAK,EAAE;gBAClB,IAAI,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC;aAChC,CAAC,CAAC;YACH,OAAO,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1B,CAAC;QACD,OAAO,KAAK,CAAC,OAAO,EAAE,CAAC;IACzB,CAAC,CAAC;IAEF,MAAM,YAAY,GAAG,sBAAsB,CAAC;IAC5C,QAAQ,CAAC,MAAM,CAAC,YAAY,EAAE,KAAK,EAAE,GAAG,EAAE,EAAE;QAC1C,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC;aACtC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;aAChB,IAAI,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC;QAEpC,MAAM,GAAG,GAAG,QAAQ,MAAM,CAAC,QAAQ,EAAW,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,EAAE,CAAC;YAC/B,OAAO;QACT,CAAC;QAED,MAAM,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC;QAC/B,MAAM,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;YACtC,MAAM,CAAC,KAAK,CAAC,GAAG,MAAM,OAAO;iBAC1B,MAAM,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,GAAG,EAAE,IAAI,EAAE,OAAO,CAAC,SAAS,EAAE,CAAC;iBACrD,IAAI,CAAC,OAAO,CAAC;iBACb,KAAK,CACJ,GAAG,CACD,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAC,EACrC,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,CACpC,CACF;iBACA,KAAK,CAAC,CAAC,CAAC,CAAC;YACZ,IAAI,CAAC,KAAK;gBAAE,OAAO;YACnB,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,GAAG,KAAK,CAAC;YAC5B,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE,EAAE,MAAM,CAAC,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAC;QACtD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEH,OAAO;QACL,KAAK,CAAC,aAAa,CAAC,IAAI,EAAE,KAAK;YAC7B,MAAM,OAAO,GAAG,UAAU,EAAE,CAAC;YAC7B,MAAM,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC;gBACnC,OAAO;gBACP,QAAQ,EAAE,IAAI;gBACd,SAAS,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK;gBAC/D,GAAG,EAAE,KAAK;aACX,CAAC,CAAC;YACH,QAAQ,CAAC,MAAM,CACb,YAAY,EACZ,IAAI,CAAC,SAAS,CACZ,oBAAoB,CAAC,MAAM,CAAC;gBAC1B,OAAO;gBACP,QAAQ,EAAE,IAAI;aACf,CAAC,CACH,CACF,CAAC;QACJ,CAAC;QACD,KAAK,CAAC,WAAW,CAAC,IAAY;YAC5B,MAAM,OAAO,GAAG,UAAU,EAAE,CAAC;YAC7B,MAAM,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC;gBACnC,OAAO;gBACP,QAAQ,EAAE,IAAI;gBACd,SAAS,EAAE,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC1B,GAAG,EAAE,IAAI;aACV,CAAC,CAAC;YACH,QAAQ,CAAC,MAAM,CACb,sBAAsB,EACtB,IAAI,CAAC,SAAS,CACZ,oBAAoB,CAAC,MAAM,CAAC;gBAC1B,QAAQ,EAAE,IAAI;gBACd,OAAO;aACR,CAAC,CACH,CACF,CAAC;QACJ,CAAC;QACD,KAAK,CAAC,cAAc,CAClB,IAAY,EACZ,UAAmB;YAEnB,MAAM,QAAQ,GAAmB,EAAE,CAAC;YAEpC,OAAO,IAAI,cAAc,CAAa;gBACpC,KAAK,CAAC,KAAK,CAAC,UAAU;oBACpB,+CAA+C;oBAC/C,6DAA6D;oBAC7D,IAAI,WAAW,GAAG,EAAE,CAAC;oBACrB,IAAI,MAAM,GAAG,UAAU,IAAI,CAAC,CAAC;oBAC7B,IAAI,MAAM,GAAG,EAA+B,CAAC;oBAE7C,SAAS,OAAO,CAAC,GAIhB;wBACC,IAAI,WAAW,IAAI,GAAG,CAAC,EAAE,EAAE,CAAC;4BAC1B,+BAA+B;4BAC/B,OAAO;wBACT,CAAC;wBAED,IAAI,MAAM,GAAG,CAAC,EAAE,CAAC;4BACf,MAAM,EAAE,CAAC;4BACT,OAAO;wBACT,CAAC;wBAED,IAAI,GAAG,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC;4BACxB,UAAU,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;wBAC/C,CAAC;wBACD,IAAI,GAAG,CAAC,GAAG,EAAE,CAAC;4BACZ,UAAU,CAAC,KAAK,EAAE,CAAC;wBACrB,CAAC;wBACD,WAAW,GAAG,GAAG,CAAC,EAAE,CAAC;oBACvB,CAAC;oBAED,SAAS,MAAM,CAAC,IAAsB;wBACpC,IAAI,MAAM,EAAE,CAAC;4BACX,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;4BAClB,OAAO;wBACT,CAAC;wBACD,OAAO,CAAC,IAAI,CAAC,CAAC;oBAChB,CAAC;oBACD,MAAM,CAAC,EAAE,CAAC,QAAQ,IAAI,EAAE,EAAE,MAAM,CAAC,CAAC;oBAClC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE;wBACjB,MAAM,CAAC,GAAG,CAAC,QAAQ,IAAI,EAAE,EAAE,MAAM,CAAC,CAAC;oBACrC,CAAC,CAAC,CAAC;oBAEH,MAAM,MAAM,GAAG,MAAM,OAAO;yBACzB,MAAM,CAAC;wBACN,EAAE,EAAE,OAAO,CAAC,OAAO;wBACnB,GAAG,EAAE,OAAO,CAAC,GAAG;wBAChB,IAAI,EAAE,OAAO,CAAC,SAAS;qBACxB,CAAC;yBACD,IAAI,CAAC,OAAO,CAAC;yBACb,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC,CAAC;yBACtC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;oBAE5B,KAAK,MAAM,KAAK,IAAI,CAAC,GAAG,MAAM,EAAE,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC;wBACnD,OAAO,CAAC,KAAK,CAAC,CAAC;oBACjB,CAAC;oBACD,MAAM,GAAG,IAAI,CAAC;gBAChB,CAAC;gBACD,MAAM;oBACJ,QAAQ,CAAC,OAAO,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;gBACjC,CAAC;aACF,CAAC,CAAC;QACL,CAAC;KACF,CAAC;AACJ,CAAC"}
package/dist/util.d.ts ADDED
@@ -0,0 +1,6 @@
1
+ export declare class Mutex {
2
+ promise: Promise<unknown>;
3
+ andThen<T>(fn: () => Promise<T> | T): Promise<T>;
4
+ }
5
+ export declare function compact<T extends object>(obj: T): { [key in keyof T]: null extends T[key] ? NonNullable<T[key]> | undefined : T[key]; };
6
+ //# sourceMappingURL=util.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"util.d.ts","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":"AAAA,qBAAa,KAAK;IAChB,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,CAAqB;IAC9C,OAAO,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;CAOjD;AAED,wBAAgB,OAAO,CAAC,CAAC,SAAS,MAAM,EAAE,GAAG,EAAE,CAAC,MAE3C,GAAG,gFAYP"}
package/dist/util.js ADDED
@@ -0,0 +1,20 @@
1
+ export class Mutex {
2
+ promise = Promise.resolve();
3
+ andThen(fn) {
4
+ this.promise = this.promise.then(() => fn(), () => fn());
5
+ return this.promise;
6
+ }
7
+ }
8
+ export function compact(obj) {
9
+ const value = {};
10
+ for (const key in obj) {
11
+ if (obj[key] !== null) {
12
+ value[key] = obj[key];
13
+ }
14
+ else {
15
+ value[key] = undefined;
16
+ }
17
+ }
18
+ return value;
19
+ }
20
+ //# sourceMappingURL=util.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"util.js","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,KAAK;IAChB,OAAO,GAAqB,OAAO,CAAC,OAAO,EAAE,CAAC;IAC9C,OAAO,CAAI,EAAwB;QACjC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAC9B,GAAG,EAAE,CAAC,EAAE,EAAE,EACV,GAAG,EAAE,CAAC,EAAE,EAAE,CACX,CAAC;QACF,OAAO,IAAI,CAAC,OAAqB,CAAC;IACpC,CAAC;CACF;AAED,MAAM,UAAU,OAAO,CAAmB,GAAM;IAC9C,MAAM,KAAK,GAAG,EAIb,CAAC;IACF,KAAK,MAAM,GAAG,IAAI,GAAG,EAAE,CAAC;QACtB,IAAI,GAAG,CAAC,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;YACtB,KAAK,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAQ,CAAC;QAC/B,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,GAAG,CAAC,GAAG,SAAgB,CAAC;QAChC,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC"}
package/dist/zod.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ import * as z from 'zod';
2
+ export declare const Base64Buffer: z.ZodCodec<z.ZodBase64, z.ZodCustom<Buffer<ArrayBufferLike>, Buffer<ArrayBufferLike>>>;
3
+ //# sourceMappingURL=zod.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"zod.d.ts","sourceRoot":"","sources":["../src/zod.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,KAAK,CAAC;AAEzB,eAAO,MAAM,YAAY,wFAOvB,CAAC"}
package/dist/zod.js ADDED
@@ -0,0 +1,10 @@
1
+ import * as z from 'zod';
2
+ export const Base64Buffer = z.codec(z.base64(), z.instanceof(Buffer), {
3
+ decode(b64) {
4
+ return Buffer.from(b64, 'base64');
5
+ },
6
+ encode(buf) {
7
+ return buf.toString('base64');
8
+ },
9
+ });
10
+ //# sourceMappingURL=zod.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"zod.js","sourceRoot":"","sources":["../src/zod.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,KAAK,CAAC;AAEzB,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE;IACpE,MAAM,CAAC,GAAG;QACR,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;IACpC,CAAC;IACD,MAAM,CAAC,GAAG;QACR,OAAO,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAChC,CAAC;CACF,CAAC,CAAC"}
package/package.json CHANGED
@@ -1,16 +1,21 @@
1
1
  {
2
2
  "name": "@workflow/world-postgres",
3
- "version": "4.0.1-beta.2",
3
+ "version": "4.1.0-beta.10",
4
4
  "description": "A reference World implementation based on PostgreSQL",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
7
+ "bin": {
8
+ "workflow-postgres-setup": "./bin/setup.js"
9
+ },
7
10
  "files": [
8
- "dist"
11
+ "dist",
12
+ "bin",
13
+ "src/drizzle/migrations"
9
14
  ],
10
15
  "publishConfig": {
11
16
  "access": "public"
12
17
  },
13
- "license": "MIT",
18
+ "license": "Apache-2.0",
14
19
  "repository": {
15
20
  "type": "git",
16
21
  "url": "https://github.com/vercel/workflow.git",
@@ -20,27 +25,37 @@
20
25
  ".": {
21
26
  "types": "./dist/index.d.ts",
22
27
  "default": "./dist/index.js"
23
- }
28
+ },
29
+ "./schema": {
30
+ "types": "./dist/drizzle/schema.d.ts",
31
+ "default": "./dist/drizzle/schema.js"
32
+ },
33
+ "./cli": {
34
+ "types": "./dist/cli.d.ts",
35
+ "default": "./dist/cli.js"
36
+ },
37
+ "./migrations/*.sql": "./src/drizzle/migrations/*.sql"
24
38
  },
25
39
  "dependencies": {
26
40
  "@vercel/queue": "0.0.0-alpha.23",
27
- "drizzle-orm": "^0.31.2",
28
- "pg-boss": "^11.0.7",
29
- "postgres": "^3.4.7",
30
- "ulid": "^3.0.1",
41
+ "dotenv": "16.4.5",
42
+ "drizzle-orm": "0.44.7",
43
+ "pg-boss": "11.0.7",
44
+ "postgres": "3.4.7",
45
+ "ulid": "3.0.1",
31
46
  "zod": "4.1.11",
32
- "@workflow/errors": "4.0.1-beta.1",
33
- "@workflow/world": "4.0.1-beta.2",
34
- "@workflow/world-local": "4.0.1-beta.2"
47
+ "@workflow/errors": "4.0.1-beta.5",
48
+ "@workflow/world-local": "5.0.0-beta.8",
49
+ "@workflow/world": "4.0.1-beta.5"
35
50
  },
36
51
  "devDependencies": {
37
- "@testcontainers/postgresql": "^11.7.1",
38
- "@types/node": "24.6.2",
39
- "drizzle-kit": "^0.22.7",
52
+ "@testcontainers/postgresql": "11.7.1",
53
+ "@types/node": "22.19.0",
54
+ "drizzle-kit": "0.31.6",
40
55
  "vitest": "^3.2.4",
56
+ "@workflow/errors": "4.0.1-beta.5",
41
57
  "@workflow/tsconfig": "4.0.1-beta.0",
42
- "@workflow/errors": "4.0.1-beta.1",
43
- "@workflow/world-testing": "4.0.1-beta.3"
58
+ "@workflow/world-testing": "4.0.1-beta.15"
44
59
  },
45
60
  "keywords": [],
46
61
  "author": "",
@@ -50,6 +65,6 @@
50
65
  "clean": "tsc --build --clean && rm -rf dist",
51
66
  "test": "vitest run",
52
67
  "typecheck": "tsc --noEmit",
53
- "db:push": "pnpm exec drizzle-kit --version && drizzle-kit push"
68
+ "db:push": "node dist/cli.js"
54
69
  }
55
70
  }
@@ -0,0 +1,84 @@
1
+ CREATE SCHEMA "workflow";
2
+ --> statement-breakpoint
3
+ DO $$ BEGIN
4
+ CREATE TYPE "public"."step_status" AS ENUM('pending', 'running', 'completed', 'failed', 'cancelled');
5
+ EXCEPTION
6
+ WHEN duplicate_object THEN null;
7
+ END $$;
8
+ --> statement-breakpoint
9
+ DO $$ BEGIN
10
+ CREATE TYPE "public"."status" AS ENUM('pending', 'running', 'completed', 'failed', 'paused', 'cancelled');
11
+ EXCEPTION
12
+ WHEN duplicate_object THEN null;
13
+ END $$;
14
+ --> statement-breakpoint
15
+ CREATE TABLE IF NOT EXISTS "workflow"."workflow_events" (
16
+ "id" varchar PRIMARY KEY NOT NULL,
17
+ "type" varchar NOT NULL,
18
+ "correlation_id" varchar,
19
+ "created_at" timestamp DEFAULT now() NOT NULL,
20
+ "run_id" varchar NOT NULL,
21
+ "payload" jsonb
22
+ );
23
+ --> statement-breakpoint
24
+ CREATE TABLE IF NOT EXISTS "workflow"."workflow_hooks" (
25
+ "run_id" varchar NOT NULL,
26
+ "hook_id" varchar PRIMARY KEY NOT NULL,
27
+ "token" varchar NOT NULL,
28
+ "owner_id" varchar NOT NULL,
29
+ "project_id" varchar NOT NULL,
30
+ "environment" varchar NOT NULL,
31
+ "created_at" timestamp DEFAULT now() NOT NULL,
32
+ "metadata" jsonb
33
+ );
34
+ --> statement-breakpoint
35
+ CREATE TABLE IF NOT EXISTS "workflow"."workflow_runs" (
36
+ "id" varchar PRIMARY KEY NOT NULL,
37
+ "output" jsonb,
38
+ "deployment_id" varchar NOT NULL,
39
+ "status" "status" NOT NULL,
40
+ "name" varchar NOT NULL,
41
+ "execution_context" jsonb,
42
+ "input" jsonb NOT NULL,
43
+ "error" text,
44
+ "error_code" varchar,
45
+ "created_at" timestamp DEFAULT now() NOT NULL,
46
+ "updated_at" timestamp DEFAULT now() NOT NULL,
47
+ "completed_at" timestamp,
48
+ "started_at" timestamp
49
+ );
50
+ --> statement-breakpoint
51
+ CREATE TABLE IF NOT EXISTS "workflow"."workflow_steps" (
52
+ "run_id" varchar NOT NULL,
53
+ "step_id" varchar PRIMARY KEY NOT NULL,
54
+ "step_name" varchar NOT NULL,
55
+ "status" "step_status" NOT NULL,
56
+ "input" jsonb NOT NULL,
57
+ "output" jsonb,
58
+ "error" text,
59
+ "error_code" varchar,
60
+ "attempt" integer NOT NULL,
61
+ "started_at" timestamp,
62
+ "completed_at" timestamp,
63
+ "created_at" timestamp DEFAULT now() NOT NULL,
64
+ "updated_at" timestamp DEFAULT now() NOT NULL,
65
+ "retry_after" timestamp
66
+ );
67
+ --> statement-breakpoint
68
+ CREATE TABLE IF NOT EXISTS "workflow"."workflow_stream_chunks" (
69
+ "id" varchar NOT NULL,
70
+ "stream_id" varchar NOT NULL,
71
+ "data" "bytea" NOT NULL,
72
+ "created_at" timestamp DEFAULT now() NOT NULL,
73
+ "eof" boolean NOT NULL,
74
+ CONSTRAINT "workflow_stream_chunks_stream_id_id_pk" PRIMARY KEY("stream_id","id")
75
+ );
76
+ --> statement-breakpoint
77
+ CREATE INDEX IF NOT EXISTS "workflow_events_run_id_index" ON "workflow"."workflow_events" USING btree ("run_id");--> statement-breakpoint
78
+ CREATE INDEX IF NOT EXISTS "workflow_events_correlation_id_index" ON "workflow"."workflow_events" USING btree ("correlation_id");--> statement-breakpoint
79
+ CREATE INDEX IF NOT EXISTS "workflow_hooks_run_id_index" ON "workflow"."workflow_hooks" USING btree ("run_id");--> statement-breakpoint
80
+ CREATE INDEX IF NOT EXISTS "workflow_hooks_token_index" ON "workflow"."workflow_hooks" USING btree ("token");--> statement-breakpoint
81
+ CREATE INDEX IF NOT EXISTS "workflow_runs_name_index" ON "workflow"."workflow_runs" USING btree ("name");--> statement-breakpoint
82
+ CREATE INDEX IF NOT EXISTS "workflow_runs_status_index" ON "workflow"."workflow_runs" USING btree ("status");--> statement-breakpoint
83
+ CREATE INDEX IF NOT EXISTS "workflow_steps_run_id_index" ON "workflow"."workflow_steps" USING btree ("run_id");--> statement-breakpoint
84
+ CREATE INDEX IF NOT EXISTS "workflow_steps_status_index" ON "workflow"."workflow_steps" USING btree ("status");
@@ -0,0 +1,7 @@
1
+ -- Drop deprecated error columns from workflow_runs table
2
+ -- Error data is now stored as JSON in the error column
3
+ ALTER TABLE "workflow_runs" DROP COLUMN IF EXISTS "error_code";
4
+ --> statement-breakpoint
5
+
6
+ -- Drop deprecated error columns from workflow_steps table
7
+ ALTER TABLE "workflow_steps" DROP COLUMN IF EXISTS "error_code";