@stepflowjs/storage-postgres 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +32 -0
- package/dist/index.js +709 -0
- package/dist/index.js.map +1 -0
- package/package.json +61 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { StorageAdapter, QueueOperations, ExecutionOperations, EventOperations, LeaderOperations, RealtimeOperations } from '@stepflowjs/core/storage';
|
|
2
|
+
|
|
3
|
+
interface PostgresStorageOptions {
|
|
4
|
+
connectionString?: string;
|
|
5
|
+
host?: string;
|
|
6
|
+
port?: number;
|
|
7
|
+
database?: string;
|
|
8
|
+
user?: string;
|
|
9
|
+
password?: string;
|
|
10
|
+
schema?: string;
|
|
11
|
+
poolSize?: number;
|
|
12
|
+
}
|
|
13
|
+
declare class PostgresStorageAdapter implements StorageAdapter {
|
|
14
|
+
private pool;
|
|
15
|
+
private listenClient;
|
|
16
|
+
private schema;
|
|
17
|
+
private listeners;
|
|
18
|
+
private connected;
|
|
19
|
+
constructor(options: PostgresStorageOptions);
|
|
20
|
+
connect(): Promise<void>;
|
|
21
|
+
disconnect(): Promise<void>;
|
|
22
|
+
healthCheck(): Promise<boolean>;
|
|
23
|
+
private runMigrations;
|
|
24
|
+
queue: QueueOperations;
|
|
25
|
+
execution: ExecutionOperations;
|
|
26
|
+
events: EventOperations;
|
|
27
|
+
leader: LeaderOperations;
|
|
28
|
+
realtime: RealtimeOperations;
|
|
29
|
+
private hashLockId;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export { PostgresStorageAdapter, type PostgresStorageOptions, PostgresStorageAdapter as default };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,709 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import pg from "pg";
|
|
3
|
+
var { Pool, Client } = pg;
|
|
4
|
+
var PostgresStorageAdapter = class {
|
|
5
|
+
pool;
|
|
6
|
+
listenClient = null;
|
|
7
|
+
schema;
|
|
8
|
+
listeners = /* @__PURE__ */ new Map();
|
|
9
|
+
connected = false;
|
|
10
|
+
constructor(options) {
|
|
11
|
+
this.schema = options.schema ?? "stepflow";
|
|
12
|
+
this.pool = new Pool({
|
|
13
|
+
connectionString: options.connectionString,
|
|
14
|
+
host: options.host,
|
|
15
|
+
port: options.port,
|
|
16
|
+
database: options.database,
|
|
17
|
+
user: options.user,
|
|
18
|
+
password: options.password,
|
|
19
|
+
max: options.poolSize ?? 20
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
// ============================================================================
|
|
23
|
+
// Connection Lifecycle
|
|
24
|
+
// ============================================================================
|
|
25
|
+
async connect() {
|
|
26
|
+
const client = await this.pool.connect();
|
|
27
|
+
try {
|
|
28
|
+
await client.query("SELECT 1");
|
|
29
|
+
} finally {
|
|
30
|
+
client.release();
|
|
31
|
+
}
|
|
32
|
+
await this.runMigrations();
|
|
33
|
+
this.listenClient = new Client({
|
|
34
|
+
connectionString: this.pool.options.connectionString,
|
|
35
|
+
host: this.pool.options.host,
|
|
36
|
+
port: this.pool.options.port,
|
|
37
|
+
database: this.pool.options.database,
|
|
38
|
+
user: this.pool.options.user,
|
|
39
|
+
password: this.pool.options.password
|
|
40
|
+
});
|
|
41
|
+
await this.listenClient.connect();
|
|
42
|
+
this.listenClient.on("notification", (msg) => {
|
|
43
|
+
if (msg.channel && msg.payload) {
|
|
44
|
+
const callbacks = this.listeners.get(msg.channel);
|
|
45
|
+
if (callbacks) {
|
|
46
|
+
try {
|
|
47
|
+
const data = JSON.parse(msg.payload);
|
|
48
|
+
callbacks.forEach((cb) => cb(data));
|
|
49
|
+
} catch (error) {
|
|
50
|
+
console.error("Failed to parse notification payload:", error);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
this.connected = true;
|
|
56
|
+
}
|
|
57
|
+
async disconnect() {
|
|
58
|
+
if (this.listenClient) {
|
|
59
|
+
await this.listenClient.end();
|
|
60
|
+
this.listenClient = null;
|
|
61
|
+
}
|
|
62
|
+
await this.pool.end();
|
|
63
|
+
this.listeners.clear();
|
|
64
|
+
this.connected = false;
|
|
65
|
+
}
|
|
66
|
+
async healthCheck() {
|
|
67
|
+
if (!this.connected) return false;
|
|
68
|
+
try {
|
|
69
|
+
const client = await this.pool.connect();
|
|
70
|
+
try {
|
|
71
|
+
await client.query("SELECT 1");
|
|
72
|
+
return true;
|
|
73
|
+
} finally {
|
|
74
|
+
client.release();
|
|
75
|
+
}
|
|
76
|
+
} catch {
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
// ============================================================================
|
|
81
|
+
// Schema Migrations
|
|
82
|
+
// ============================================================================
|
|
83
|
+
async runMigrations() {
|
|
84
|
+
const client = await this.pool.connect();
|
|
85
|
+
try {
|
|
86
|
+
await client.query(`CREATE SCHEMA IF NOT EXISTS ${this.schema}`);
|
|
87
|
+
await client.query(`
|
|
88
|
+
CREATE TABLE IF NOT EXISTS ${this.schema}.jobs (
|
|
89
|
+
id UUID PRIMARY KEY,
|
|
90
|
+
workflow_id TEXT NOT NULL,
|
|
91
|
+
event_name TEXT NOT NULL,
|
|
92
|
+
payload JSONB NOT NULL,
|
|
93
|
+
metadata JSONB NOT NULL DEFAULT '{}',
|
|
94
|
+
priority INTEGER NOT NULL DEFAULT 0,
|
|
95
|
+
attempts INTEGER NOT NULL DEFAULT 0,
|
|
96
|
+
max_attempts INTEGER NOT NULL DEFAULT 3,
|
|
97
|
+
scheduled_for TIMESTAMPTZ,
|
|
98
|
+
locked_until TIMESTAMPTZ,
|
|
99
|
+
locked_by TEXT,
|
|
100
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
101
|
+
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
102
|
+
)
|
|
103
|
+
`);
|
|
104
|
+
await client.query(`
|
|
105
|
+
CREATE INDEX IF NOT EXISTS idx_jobs_workflow_id
|
|
106
|
+
ON ${this.schema}.jobs(workflow_id)
|
|
107
|
+
`);
|
|
108
|
+
await client.query(`
|
|
109
|
+
CREATE INDEX IF NOT EXISTS idx_jobs_scheduled_for
|
|
110
|
+
ON ${this.schema}.jobs(scheduled_for)
|
|
111
|
+
WHERE scheduled_for IS NOT NULL
|
|
112
|
+
`);
|
|
113
|
+
await client.query(`
|
|
114
|
+
CREATE INDEX IF NOT EXISTS idx_jobs_priority_created
|
|
115
|
+
ON ${this.schema}.jobs(priority DESC, created_at ASC)
|
|
116
|
+
`);
|
|
117
|
+
await client.query(`
|
|
118
|
+
CREATE TABLE IF NOT EXISTS ${this.schema}.executions (
|
|
119
|
+
id UUID PRIMARY KEY,
|
|
120
|
+
run_id TEXT NOT NULL,
|
|
121
|
+
workflow_id TEXT NOT NULL,
|
|
122
|
+
workflow_version TEXT,
|
|
123
|
+
event_name TEXT NOT NULL,
|
|
124
|
+
payload JSONB NOT NULL,
|
|
125
|
+
status TEXT NOT NULL,
|
|
126
|
+
result JSONB,
|
|
127
|
+
error JSONB,
|
|
128
|
+
metadata JSONB NOT NULL DEFAULT '{}',
|
|
129
|
+
attempt INTEGER NOT NULL DEFAULT 1,
|
|
130
|
+
started_at TIMESTAMPTZ NOT NULL,
|
|
131
|
+
completed_at TIMESTAMPTZ,
|
|
132
|
+
timeline JSONB NOT NULL DEFAULT '[]',
|
|
133
|
+
idempotency_key TEXT
|
|
134
|
+
)
|
|
135
|
+
`);
|
|
136
|
+
await client.query(`
|
|
137
|
+
CREATE INDEX IF NOT EXISTS idx_executions_run_id
|
|
138
|
+
ON ${this.schema}.executions(run_id)
|
|
139
|
+
`);
|
|
140
|
+
await client.query(`
|
|
141
|
+
CREATE INDEX IF NOT EXISTS idx_executions_workflow_id
|
|
142
|
+
ON ${this.schema}.executions(workflow_id)
|
|
143
|
+
`);
|
|
144
|
+
await client.query(`
|
|
145
|
+
CREATE INDEX IF NOT EXISTS idx_executions_status
|
|
146
|
+
ON ${this.schema}.executions(status)
|
|
147
|
+
`);
|
|
148
|
+
await client.query(`
|
|
149
|
+
CREATE INDEX IF NOT EXISTS idx_executions_idempotency
|
|
150
|
+
ON ${this.schema}.executions(workflow_id, idempotency_key)
|
|
151
|
+
WHERE idempotency_key IS NOT NULL
|
|
152
|
+
`);
|
|
153
|
+
await client.query(`
|
|
154
|
+
CREATE TABLE IF NOT EXISTS ${this.schema}.step_results (
|
|
155
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
156
|
+
execution_id UUID NOT NULL REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE,
|
|
157
|
+
step_name TEXT NOT NULL,
|
|
158
|
+
result JSONB NOT NULL,
|
|
159
|
+
started_at TIMESTAMPTZ NOT NULL,
|
|
160
|
+
completed_at TIMESTAMPTZ NOT NULL,
|
|
161
|
+
duration_ms INTEGER NOT NULL,
|
|
162
|
+
UNIQUE(execution_id, step_name)
|
|
163
|
+
)
|
|
164
|
+
`);
|
|
165
|
+
await client.query(`
|
|
166
|
+
CREATE INDEX IF NOT EXISTS idx_step_results_execution_id
|
|
167
|
+
ON ${this.schema}.step_results(execution_id)
|
|
168
|
+
`);
|
|
169
|
+
await client.query(`
|
|
170
|
+
CREATE TABLE IF NOT EXISTS ${this.schema}.event_waiters (
|
|
171
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
172
|
+
event_id TEXT NOT NULL,
|
|
173
|
+
execution_id UUID NOT NULL REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE,
|
|
174
|
+
timeout_at TIMESTAMPTZ,
|
|
175
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
176
|
+
)
|
|
177
|
+
`);
|
|
178
|
+
await client.query(`
|
|
179
|
+
CREATE INDEX IF NOT EXISTS idx_event_waiters_event_id
|
|
180
|
+
ON ${this.schema}.event_waiters(event_id)
|
|
181
|
+
`);
|
|
182
|
+
await client.query(`
|
|
183
|
+
CREATE TABLE IF NOT EXISTS ${this.schema}.leader_locks (
|
|
184
|
+
lock_id TEXT PRIMARY KEY,
|
|
185
|
+
owner TEXT NOT NULL,
|
|
186
|
+
expires_at TIMESTAMPTZ NOT NULL,
|
|
187
|
+
acquired_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
188
|
+
)
|
|
189
|
+
`);
|
|
190
|
+
} finally {
|
|
191
|
+
client.release();
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
// ============================================================================
|
|
195
|
+
// Queue Operations
|
|
196
|
+
// ============================================================================
|
|
197
|
+
queue = {
|
|
198
|
+
push: async (job) => {
|
|
199
|
+
await this.pool.query(
|
|
200
|
+
`INSERT INTO ${this.schema}.jobs (
|
|
201
|
+
id, workflow_id, event_name, payload, metadata,
|
|
202
|
+
priority, attempts, max_attempts, scheduled_for, created_at
|
|
203
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
|
|
204
|
+
[
|
|
205
|
+
job.id,
|
|
206
|
+
job.workflowId,
|
|
207
|
+
job.eventName,
|
|
208
|
+
JSON.stringify(job.payload),
|
|
209
|
+
JSON.stringify(job.metadata),
|
|
210
|
+
job.priority,
|
|
211
|
+
job.attempts,
|
|
212
|
+
job.maxAttempts,
|
|
213
|
+
job.scheduledFor,
|
|
214
|
+
job.createdAt
|
|
215
|
+
]
|
|
216
|
+
);
|
|
217
|
+
return job.id;
|
|
218
|
+
},
|
|
219
|
+
pop: async (options) => {
|
|
220
|
+
const workerId = options?.workerId ?? `worker-${process.pid}`;
|
|
221
|
+
const lockDuration = options?.lockDuration ?? 3e4;
|
|
222
|
+
const result = await this.pool.query(
|
|
223
|
+
`UPDATE ${this.schema}.jobs
|
|
224
|
+
SET locked_until = NOW() + INTERVAL '${lockDuration} milliseconds',
|
|
225
|
+
locked_by = $1,
|
|
226
|
+
updated_at = NOW()
|
|
227
|
+
WHERE id = (
|
|
228
|
+
SELECT id FROM ${this.schema}.jobs
|
|
229
|
+
WHERE (scheduled_for IS NULL OR scheduled_for <= NOW())
|
|
230
|
+
AND (locked_until IS NULL OR locked_until < NOW())
|
|
231
|
+
ORDER BY priority DESC, created_at ASC
|
|
232
|
+
FOR UPDATE SKIP LOCKED
|
|
233
|
+
LIMIT 1
|
|
234
|
+
)
|
|
235
|
+
RETURNING *`,
|
|
236
|
+
[workerId]
|
|
237
|
+
);
|
|
238
|
+
if (result.rows.length === 0) {
|
|
239
|
+
return null;
|
|
240
|
+
}
|
|
241
|
+
const row = result.rows[0];
|
|
242
|
+
return {
|
|
243
|
+
id: row.id,
|
|
244
|
+
workflowId: row.workflow_id,
|
|
245
|
+
eventName: row.event_name,
|
|
246
|
+
payload: row.payload,
|
|
247
|
+
metadata: row.metadata,
|
|
248
|
+
priority: row.priority,
|
|
249
|
+
attempts: row.attempts,
|
|
250
|
+
maxAttempts: row.max_attempts,
|
|
251
|
+
scheduledFor: row.scheduled_for,
|
|
252
|
+
createdAt: row.created_at
|
|
253
|
+
};
|
|
254
|
+
},
|
|
255
|
+
ack: async (jobId) => {
|
|
256
|
+
await this.pool.query(`DELETE FROM ${this.schema}.jobs WHERE id = $1`, [
|
|
257
|
+
jobId
|
|
258
|
+
]);
|
|
259
|
+
},
|
|
260
|
+
nack: async (jobId, options) => {
|
|
261
|
+
const delay = options?.delay ?? 0;
|
|
262
|
+
const scheduledFor = delay > 0 ? new Date(Date.now() + delay) : null;
|
|
263
|
+
await this.pool.query(
|
|
264
|
+
`UPDATE ${this.schema}.jobs
|
|
265
|
+
SET attempts = attempts + 1,
|
|
266
|
+
scheduled_for = $2,
|
|
267
|
+
locked_until = NULL,
|
|
268
|
+
locked_by = NULL,
|
|
269
|
+
updated_at = NOW()
|
|
270
|
+
WHERE id = $1`,
|
|
271
|
+
[jobId, scheduledFor]
|
|
272
|
+
);
|
|
273
|
+
},
|
|
274
|
+
schedule: async (job, executeAt) => {
|
|
275
|
+
await this.pool.query(
|
|
276
|
+
`INSERT INTO ${this.schema}.jobs (
|
|
277
|
+
id, workflow_id, event_name, payload, metadata,
|
|
278
|
+
priority, attempts, max_attempts, scheduled_for, created_at
|
|
279
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
|
|
280
|
+
[
|
|
281
|
+
job.id,
|
|
282
|
+
job.workflowId,
|
|
283
|
+
job.eventName,
|
|
284
|
+
JSON.stringify(job.payload),
|
|
285
|
+
JSON.stringify(job.metadata),
|
|
286
|
+
job.priority,
|
|
287
|
+
job.attempts,
|
|
288
|
+
job.maxAttempts,
|
|
289
|
+
executeAt,
|
|
290
|
+
job.createdAt
|
|
291
|
+
]
|
|
292
|
+
);
|
|
293
|
+
return job.id;
|
|
294
|
+
},
|
|
295
|
+
getDelayed: async () => {
|
|
296
|
+
const result = await this.pool.query(
|
|
297
|
+
`SELECT * FROM ${this.schema}.jobs
|
|
298
|
+
WHERE scheduled_for IS NOT NULL AND scheduled_for > NOW()
|
|
299
|
+
ORDER BY scheduled_for ASC`
|
|
300
|
+
);
|
|
301
|
+
return result.rows.map((row) => ({
|
|
302
|
+
id: row.id,
|
|
303
|
+
workflowId: row.workflow_id,
|
|
304
|
+
eventName: row.event_name,
|
|
305
|
+
payload: row.payload,
|
|
306
|
+
metadata: row.metadata,
|
|
307
|
+
priority: row.priority,
|
|
308
|
+
attempts: row.attempts,
|
|
309
|
+
maxAttempts: row.max_attempts,
|
|
310
|
+
scheduledFor: row.scheduled_for,
|
|
311
|
+
createdAt: row.created_at
|
|
312
|
+
}));
|
|
313
|
+
}
|
|
314
|
+
};
|
|
315
|
+
// ============================================================================
|
|
316
|
+
// Execution Operations
|
|
317
|
+
// ============================================================================
|
|
318
|
+
execution = {
|
|
319
|
+
create: async (execution) => {
|
|
320
|
+
await this.pool.query(
|
|
321
|
+
`INSERT INTO ${this.schema}.executions (
|
|
322
|
+
id, run_id, workflow_id, workflow_version, event_name,
|
|
323
|
+
payload, status, result, error, metadata, attempt,
|
|
324
|
+
started_at, completed_at, timeline, idempotency_key
|
|
325
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)`,
|
|
326
|
+
[
|
|
327
|
+
execution.id,
|
|
328
|
+
execution.runId,
|
|
329
|
+
execution.workflowId,
|
|
330
|
+
execution.workflowVersion,
|
|
331
|
+
execution.eventName,
|
|
332
|
+
JSON.stringify(execution.payload),
|
|
333
|
+
execution.status,
|
|
334
|
+
execution.result ? JSON.stringify(execution.result) : null,
|
|
335
|
+
execution.error ? JSON.stringify(execution.error) : null,
|
|
336
|
+
JSON.stringify(execution.metadata),
|
|
337
|
+
execution.attempt,
|
|
338
|
+
execution.startedAt,
|
|
339
|
+
execution.completedAt,
|
|
340
|
+
JSON.stringify(execution.timeline),
|
|
341
|
+
execution.metadata.idempotencyKey
|
|
342
|
+
]
|
|
343
|
+
);
|
|
344
|
+
await this.realtime.publish(`execution:${execution.id}`, execution);
|
|
345
|
+
return execution.id;
|
|
346
|
+
},
|
|
347
|
+
get: async (executionId) => {
|
|
348
|
+
const result = await this.pool.query(
|
|
349
|
+
`SELECT e.*,
|
|
350
|
+
COALESCE(
|
|
351
|
+
json_agg(
|
|
352
|
+
json_build_object(
|
|
353
|
+
'name', sr.step_name,
|
|
354
|
+
'status', 'completed',
|
|
355
|
+
'result', sr.result->'data',
|
|
356
|
+
'startedAt', sr.started_at,
|
|
357
|
+
'completedAt', sr.completed_at,
|
|
358
|
+
'durationMs', sr.duration_ms
|
|
359
|
+
)
|
|
360
|
+
ORDER BY sr.started_at
|
|
361
|
+
) FILTER (WHERE sr.id IS NOT NULL),
|
|
362
|
+
'[]'
|
|
363
|
+
) as steps
|
|
364
|
+
FROM ${this.schema}.executions e
|
|
365
|
+
LEFT JOIN ${this.schema}.step_results sr ON sr.execution_id = e.id
|
|
366
|
+
WHERE e.id = $1
|
|
367
|
+
GROUP BY e.id`,
|
|
368
|
+
[executionId]
|
|
369
|
+
);
|
|
370
|
+
if (result.rows.length === 0) {
|
|
371
|
+
return null;
|
|
372
|
+
}
|
|
373
|
+
const row = result.rows[0];
|
|
374
|
+
return {
|
|
375
|
+
id: row.id,
|
|
376
|
+
runId: row.run_id,
|
|
377
|
+
workflowId: row.workflow_id,
|
|
378
|
+
workflowVersion: row.workflow_version,
|
|
379
|
+
eventName: row.event_name,
|
|
380
|
+
payload: row.payload,
|
|
381
|
+
status: row.status,
|
|
382
|
+
result: row.result,
|
|
383
|
+
error: row.error,
|
|
384
|
+
steps: row.steps,
|
|
385
|
+
metadata: row.metadata,
|
|
386
|
+
attempt: row.attempt,
|
|
387
|
+
startedAt: row.started_at,
|
|
388
|
+
completedAt: row.completed_at,
|
|
389
|
+
timeline: row.timeline
|
|
390
|
+
};
|
|
391
|
+
},
|
|
392
|
+
getByIdempotencyKey: async (workflowId, idempotencyKey) => {
|
|
393
|
+
const result = await this.pool.query(
|
|
394
|
+
`SELECT e.*,
|
|
395
|
+
COALESCE(
|
|
396
|
+
json_agg(
|
|
397
|
+
json_build_object(
|
|
398
|
+
'name', sr.step_name,
|
|
399
|
+
'status', 'completed',
|
|
400
|
+
'result', sr.result->'data',
|
|
401
|
+
'startedAt', sr.started_at,
|
|
402
|
+
'completedAt', sr.completed_at,
|
|
403
|
+
'durationMs', sr.duration_ms
|
|
404
|
+
)
|
|
405
|
+
ORDER BY sr.started_at
|
|
406
|
+
) FILTER (WHERE sr.id IS NOT NULL),
|
|
407
|
+
'[]'
|
|
408
|
+
) as steps
|
|
409
|
+
FROM ${this.schema}.executions e
|
|
410
|
+
LEFT JOIN ${this.schema}.step_results sr ON sr.execution_id = e.id
|
|
411
|
+
WHERE e.workflow_id = $1 AND e.idempotency_key = $2
|
|
412
|
+
GROUP BY e.id
|
|
413
|
+
LIMIT 1`,
|
|
414
|
+
[workflowId, idempotencyKey]
|
|
415
|
+
);
|
|
416
|
+
if (result.rows.length === 0) {
|
|
417
|
+
return null;
|
|
418
|
+
}
|
|
419
|
+
const row = result.rows[0];
|
|
420
|
+
return {
|
|
421
|
+
id: row.id,
|
|
422
|
+
runId: row.run_id,
|
|
423
|
+
workflowId: row.workflow_id,
|
|
424
|
+
workflowVersion: row.workflow_version,
|
|
425
|
+
eventName: row.event_name,
|
|
426
|
+
payload: row.payload,
|
|
427
|
+
status: row.status,
|
|
428
|
+
result: row.result,
|
|
429
|
+
error: row.error,
|
|
430
|
+
steps: row.steps,
|
|
431
|
+
metadata: row.metadata,
|
|
432
|
+
attempt: row.attempt,
|
|
433
|
+
startedAt: row.started_at,
|
|
434
|
+
completedAt: row.completed_at,
|
|
435
|
+
timeline: row.timeline
|
|
436
|
+
};
|
|
437
|
+
},
|
|
438
|
+
update: async (executionId, updates) => {
|
|
439
|
+
const setClauses = [];
|
|
440
|
+
const values = [];
|
|
441
|
+
let paramIndex = 1;
|
|
442
|
+
if (updates.status !== void 0) {
|
|
443
|
+
setClauses.push(`status = $${paramIndex++}`);
|
|
444
|
+
values.push(updates.status);
|
|
445
|
+
}
|
|
446
|
+
if (updates.result !== void 0) {
|
|
447
|
+
setClauses.push(`result = $${paramIndex++}`);
|
|
448
|
+
values.push(JSON.stringify(updates.result));
|
|
449
|
+
}
|
|
450
|
+
if (updates.error !== void 0) {
|
|
451
|
+
setClauses.push(`error = $${paramIndex++}`);
|
|
452
|
+
values.push(JSON.stringify(updates.error));
|
|
453
|
+
}
|
|
454
|
+
if (updates.metadata !== void 0) {
|
|
455
|
+
setClauses.push(`metadata = metadata || $${paramIndex++}::jsonb`);
|
|
456
|
+
values.push(JSON.stringify(updates.metadata));
|
|
457
|
+
}
|
|
458
|
+
if (updates.completedAt !== void 0) {
|
|
459
|
+
setClauses.push(`completed_at = $${paramIndex++}`);
|
|
460
|
+
values.push(updates.completedAt);
|
|
461
|
+
}
|
|
462
|
+
if (updates.timeline !== void 0) {
|
|
463
|
+
setClauses.push(`timeline = $${paramIndex++}`);
|
|
464
|
+
values.push(JSON.stringify(updates.timeline));
|
|
465
|
+
}
|
|
466
|
+
if (setClauses.length === 0) {
|
|
467
|
+
return;
|
|
468
|
+
}
|
|
469
|
+
values.push(executionId);
|
|
470
|
+
await this.pool.query(
|
|
471
|
+
`UPDATE ${this.schema}.executions
|
|
472
|
+
SET ${setClauses.join(", ")}
|
|
473
|
+
WHERE id = $${paramIndex}`,
|
|
474
|
+
values
|
|
475
|
+
);
|
|
476
|
+
const execution = await this.execution.get(executionId);
|
|
477
|
+
if (execution) {
|
|
478
|
+
await this.realtime.publish(`execution:${executionId}`, execution);
|
|
479
|
+
}
|
|
480
|
+
},
|
|
481
|
+
list: async (options) => {
|
|
482
|
+
const whereClauses = [];
|
|
483
|
+
const values = [];
|
|
484
|
+
let paramIndex = 1;
|
|
485
|
+
if (options.workflowId) {
|
|
486
|
+
whereClauses.push(`e.workflow_id = $${paramIndex++}`);
|
|
487
|
+
values.push(options.workflowId);
|
|
488
|
+
}
|
|
489
|
+
if (options.status) {
|
|
490
|
+
whereClauses.push(`e.status = $${paramIndex++}`);
|
|
491
|
+
values.push(options.status);
|
|
492
|
+
}
|
|
493
|
+
if (options.runId) {
|
|
494
|
+
whereClauses.push(`e.run_id = $${paramIndex++}`);
|
|
495
|
+
values.push(options.runId);
|
|
496
|
+
}
|
|
497
|
+
const whereClause = whereClauses.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : "";
|
|
498
|
+
const limit = options.limit ?? 50;
|
|
499
|
+
const offset = options.offset ?? 0;
|
|
500
|
+
const result = await this.pool.query(
|
|
501
|
+
`SELECT e.*,
|
|
502
|
+
COALESCE(
|
|
503
|
+
json_agg(
|
|
504
|
+
json_build_object(
|
|
505
|
+
'name', sr.step_name,
|
|
506
|
+
'status', 'completed',
|
|
507
|
+
'result', sr.result->'data',
|
|
508
|
+
'startedAt', sr.started_at,
|
|
509
|
+
'completedAt', sr.completed_at,
|
|
510
|
+
'durationMs', sr.duration_ms
|
|
511
|
+
)
|
|
512
|
+
ORDER BY sr.started_at
|
|
513
|
+
) FILTER (WHERE sr.id IS NOT NULL),
|
|
514
|
+
'[]'
|
|
515
|
+
) as steps
|
|
516
|
+
FROM ${this.schema}.executions e
|
|
517
|
+
LEFT JOIN ${this.schema}.step_results sr ON sr.execution_id = e.id
|
|
518
|
+
${whereClause}
|
|
519
|
+
GROUP BY e.id
|
|
520
|
+
ORDER BY e.started_at DESC
|
|
521
|
+
LIMIT $${paramIndex++} OFFSET $${paramIndex++}`,
|
|
522
|
+
[...values, limit, offset]
|
|
523
|
+
);
|
|
524
|
+
return result.rows.map((row) => ({
|
|
525
|
+
id: row.id,
|
|
526
|
+
runId: row.run_id,
|
|
527
|
+
workflowId: row.workflow_id,
|
|
528
|
+
workflowVersion: row.workflow_version,
|
|
529
|
+
eventName: row.event_name,
|
|
530
|
+
payload: row.payload,
|
|
531
|
+
status: row.status,
|
|
532
|
+
result: row.result,
|
|
533
|
+
error: row.error,
|
|
534
|
+
steps: row.steps,
|
|
535
|
+
metadata: row.metadata,
|
|
536
|
+
attempt: row.attempt,
|
|
537
|
+
startedAt: row.started_at,
|
|
538
|
+
completedAt: row.completed_at,
|
|
539
|
+
timeline: row.timeline
|
|
540
|
+
}));
|
|
541
|
+
},
|
|
542
|
+
getStepResult: async (executionId, stepName) => {
|
|
543
|
+
const result = await this.pool.query(
|
|
544
|
+
`SELECT result FROM ${this.schema}.step_results
|
|
545
|
+
WHERE execution_id = $1 AND step_name = $2`,
|
|
546
|
+
[executionId, stepName]
|
|
547
|
+
);
|
|
548
|
+
if (result.rows.length === 0) {
|
|
549
|
+
return null;
|
|
550
|
+
}
|
|
551
|
+
return result.rows[0].result;
|
|
552
|
+
},
|
|
553
|
+
saveStepResult: async (executionId, stepName, result) => {
|
|
554
|
+
await this.pool.query(
|
|
555
|
+
`INSERT INTO ${this.schema}.step_results (
|
|
556
|
+
execution_id, step_name, result, started_at, completed_at, duration_ms
|
|
557
|
+
) VALUES ($1, $2, $3, $4, $5, $6)
|
|
558
|
+
ON CONFLICT (execution_id, step_name)
|
|
559
|
+
DO UPDATE SET
|
|
560
|
+
result = EXCLUDED.result,
|
|
561
|
+
started_at = EXCLUDED.started_at,
|
|
562
|
+
completed_at = EXCLUDED.completed_at,
|
|
563
|
+
duration_ms = EXCLUDED.duration_ms`,
|
|
564
|
+
[
|
|
565
|
+
executionId,
|
|
566
|
+
stepName,
|
|
567
|
+
JSON.stringify(result),
|
|
568
|
+
result.startedAt,
|
|
569
|
+
result.completedAt,
|
|
570
|
+
result.durationMs
|
|
571
|
+
]
|
|
572
|
+
);
|
|
573
|
+
}
|
|
574
|
+
};
|
|
575
|
+
// ============================================================================
|
|
576
|
+
// Event Operations
|
|
577
|
+
// ============================================================================
|
|
578
|
+
events = {
|
|
579
|
+
publish: async (eventId, data) => {
|
|
580
|
+
const result = await this.pool.query(
|
|
581
|
+
`DELETE FROM ${this.schema}.event_waiters
|
|
582
|
+
WHERE event_id = $1
|
|
583
|
+
RETURNING execution_id`,
|
|
584
|
+
[eventId]
|
|
585
|
+
);
|
|
586
|
+
for (const row of result.rows) {
|
|
587
|
+
await this.realtime.publish(
|
|
588
|
+
`event:${eventId}:${row.execution_id}`,
|
|
589
|
+
data
|
|
590
|
+
);
|
|
591
|
+
}
|
|
592
|
+
return result.rows.length;
|
|
593
|
+
},
|
|
594
|
+
subscribe: async (eventId, executionId, timeout) => {
|
|
595
|
+
await this.pool.query(
|
|
596
|
+
`INSERT INTO ${this.schema}.event_waiters (event_id, execution_id, timeout_at)
|
|
597
|
+
VALUES ($1, $2, $3)`,
|
|
598
|
+
[eventId, executionId, timeout]
|
|
599
|
+
);
|
|
600
|
+
},
|
|
601
|
+
getWaiters: async (eventId) => {
|
|
602
|
+
const result = await this.pool.query(
|
|
603
|
+
`SELECT execution_id, timeout_at FROM ${this.schema}.event_waiters
|
|
604
|
+
WHERE event_id = $1`,
|
|
605
|
+
[eventId]
|
|
606
|
+
);
|
|
607
|
+
return result.rows.map((row) => ({
|
|
608
|
+
executionId: row.execution_id,
|
|
609
|
+
timeoutAt: row.timeout_at
|
|
610
|
+
}));
|
|
611
|
+
}
|
|
612
|
+
};
|
|
613
|
+
// ============================================================================
|
|
614
|
+
// Leader Operations (using pg_advisory_lock)
|
|
615
|
+
// ============================================================================
|
|
616
|
+
leader = {
|
|
617
|
+
acquire: async (lockId, ttlSeconds) => {
|
|
618
|
+
const lockKey = this.hashLockId(lockId);
|
|
619
|
+
const result = await this.pool.query(
|
|
620
|
+
"SELECT pg_try_advisory_lock($1) as acquired",
|
|
621
|
+
[lockKey]
|
|
622
|
+
);
|
|
623
|
+
const acquired = result.rows[0].acquired;
|
|
624
|
+
if (acquired) {
|
|
625
|
+
const owner = `${process.pid}-${Date.now()}`;
|
|
626
|
+
const expiresAt = new Date(Date.now() + ttlSeconds * 1e3);
|
|
627
|
+
await this.pool.query(
|
|
628
|
+
`INSERT INTO ${this.schema}.leader_locks (lock_id, owner, expires_at)
|
|
629
|
+
VALUES ($1, $2, $3)
|
|
630
|
+
ON CONFLICT (lock_id)
|
|
631
|
+
DO UPDATE SET owner = EXCLUDED.owner, expires_at = EXCLUDED.expires_at, acquired_at = NOW()`,
|
|
632
|
+
[lockId, owner, expiresAt]
|
|
633
|
+
);
|
|
634
|
+
}
|
|
635
|
+
return acquired;
|
|
636
|
+
},
|
|
637
|
+
release: async (lockId) => {
|
|
638
|
+
const lockKey = this.hashLockId(lockId);
|
|
639
|
+
await this.pool.query("SELECT pg_advisory_unlock($1)", [lockKey]);
|
|
640
|
+
await this.pool.query(
|
|
641
|
+
`DELETE FROM ${this.schema}.leader_locks WHERE lock_id = $1`,
|
|
642
|
+
[lockId]
|
|
643
|
+
);
|
|
644
|
+
},
|
|
645
|
+
renew: async (lockId, ttlSeconds) => {
|
|
646
|
+
const expiresAt = new Date(Date.now() + ttlSeconds * 1e3);
|
|
647
|
+
const result = await this.pool.query(
|
|
648
|
+
`UPDATE ${this.schema}.leader_locks
|
|
649
|
+
SET expires_at = $2
|
|
650
|
+
WHERE lock_id = $1
|
|
651
|
+
RETURNING lock_id`,
|
|
652
|
+
[lockId, expiresAt]
|
|
653
|
+
);
|
|
654
|
+
return result.rows.length > 0;
|
|
655
|
+
}
|
|
656
|
+
};
|
|
657
|
+
// ============================================================================
|
|
658
|
+
// Realtime Operations (using LISTEN/NOTIFY)
|
|
659
|
+
// ============================================================================
|
|
660
|
+
realtime = {
|
|
661
|
+
subscribe: (channel, callback) => {
|
|
662
|
+
if (!this.listeners.has(channel)) {
|
|
663
|
+
this.listeners.set(channel, /* @__PURE__ */ new Set());
|
|
664
|
+
if (this.listenClient) {
|
|
665
|
+
this.listenClient.query(`LISTEN "${channel}"`).catch(
|
|
666
|
+
(error) => console.error(`Failed to LISTEN to ${channel}:`, error)
|
|
667
|
+
);
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
this.listeners.get(channel).add(callback);
|
|
671
|
+
return () => {
|
|
672
|
+
const callbacks = this.listeners.get(channel);
|
|
673
|
+
if (callbacks) {
|
|
674
|
+
callbacks.delete(callback);
|
|
675
|
+
if (callbacks.size === 0) {
|
|
676
|
+
this.listeners.delete(channel);
|
|
677
|
+
if (this.listenClient) {
|
|
678
|
+
this.listenClient.query(`UNLISTEN "${channel}"`).catch(
|
|
679
|
+
(error) => console.error(`Failed to UNLISTEN from ${channel}:`, error)
|
|
680
|
+
);
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
};
|
|
685
|
+
},
|
|
686
|
+
publish: async (channel, data) => {
|
|
687
|
+
const payload = JSON.stringify(data);
|
|
688
|
+
await this.pool.query(`NOTIFY "${channel}", $1`, [payload]);
|
|
689
|
+
}
|
|
690
|
+
};
|
|
691
|
+
// ============================================================================
|
|
692
|
+
// Helper Methods
|
|
693
|
+
// ============================================================================
|
|
694
|
+
hashLockId(lockId) {
|
|
695
|
+
let hash = 0;
|
|
696
|
+
for (let i = 0; i < lockId.length; i++) {
|
|
697
|
+
const char = lockId.charCodeAt(i);
|
|
698
|
+
hash = (hash << 5) - hash + char;
|
|
699
|
+
hash = hash & hash;
|
|
700
|
+
}
|
|
701
|
+
return hash;
|
|
702
|
+
}
|
|
703
|
+
};
|
|
704
|
+
var index_default = PostgresStorageAdapter;
|
|
705
|
+
export {
|
|
706
|
+
PostgresStorageAdapter,
|
|
707
|
+
index_default as default
|
|
708
|
+
};
|
|
709
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["// ============================================================================\n// PostgreSQL Storage Adapter\n// Production-ready storage with ACID guarantees\n// ============================================================================\n\nimport pg from \"pg\";\nimport type {\n StorageAdapter,\n QueueOperations,\n ExecutionOperations,\n EventOperations,\n LeaderOperations,\n RealtimeOperations,\n} from \"@stepflowjs/core/storage\";\nimport type {\n QueueJob,\n Execution,\n StepResult,\n EventWaiter,\n ListOptions,\n PopOptions,\n NackOptions,\n Unsubscribe,\n} from \"@stepflowjs/core\";\n\nconst { Pool, Client } = pg;\n\n// ============================================================================\n// Configuration Types\n// ============================================================================\n\nexport interface PostgresStorageOptions {\n connectionString?: string;\n host?: string;\n port?: number;\n database?: string;\n user?: string;\n password?: string;\n schema?: string;\n poolSize?: number;\n}\n\n// ============================================================================\n// PostgreSQL Storage Adapter\n// ============================================================================\n\nexport class PostgresStorageAdapter implements StorageAdapter {\n private pool: pg.Pool;\n private listenClient: pg.Client | null = null;\n private schema: string;\n private listeners: Map<string, Set<(data: unknown) => void>> = new Map();\n private connected = false;\n\n constructor(options: PostgresStorageOptions) {\n this.schema = options.schema ?? \"stepflow\";\n\n this.pool = new Pool({\n connectionString: options.connectionString,\n host: options.host,\n port: options.port,\n database: options.database,\n user: options.user,\n password: options.password,\n max: options.poolSize ?? 20,\n });\n }\n\n // ============================================================================\n // Connection Lifecycle\n // ============================================================================\n\n async connect(): Promise<void> {\n // Test connection\n const client = await this.pool.connect();\n try {\n await client.query(\"SELECT 1\");\n } finally {\n client.release();\n }\n\n // Run migrations\n await this.runMigrations();\n\n // Setup LISTEN client for realtime\n this.listenClient = new Client({\n connectionString: this.pool.options.connectionString,\n host: this.pool.options.host,\n port: this.pool.options.port,\n database: this.pool.options.database,\n user: this.pool.options.user,\n password: this.pool.options.password,\n });\n\n await this.listenClient.connect();\n\n // Handle notifications\n this.listenClient.on(\"notification\", (msg) => {\n if (msg.channel && msg.payload) {\n const callbacks = this.listeners.get(msg.channel);\n if (callbacks) {\n try {\n const data = JSON.parse(msg.payload);\n callbacks.forEach((cb) => cb(data));\n } catch (error) {\n console.error(\"Failed to parse notification payload:\", error);\n }\n }\n }\n });\n\n this.connected = true;\n }\n\n async disconnect(): Promise<void> {\n if (this.listenClient) {\n await this.listenClient.end();\n this.listenClient = null;\n }\n await this.pool.end();\n this.listeners.clear();\n this.connected = false;\n }\n\n async healthCheck(): Promise<boolean> {\n if (!this.connected) return false;\n try {\n const client = await this.pool.connect();\n try {\n await client.query(\"SELECT 1\");\n return true;\n } finally {\n client.release();\n }\n } catch {\n return false;\n }\n }\n\n // ============================================================================\n // Schema Migrations\n // ============================================================================\n\n private async runMigrations(): Promise<void> {\n const client = await this.pool.connect();\n try {\n // Create schema\n await client.query(`CREATE SCHEMA IF NOT EXISTS ${this.schema}`);\n\n // Jobs table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.schema}.jobs (\n id UUID PRIMARY KEY,\n workflow_id TEXT NOT NULL,\n event_name TEXT NOT NULL,\n payload JSONB NOT NULL,\n metadata JSONB NOT NULL DEFAULT '{}',\n priority INTEGER NOT NULL DEFAULT 0,\n attempts INTEGER NOT NULL DEFAULT 0,\n max_attempts INTEGER NOT NULL DEFAULT 3,\n scheduled_for TIMESTAMPTZ,\n locked_until TIMESTAMPTZ,\n locked_by TEXT,\n created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),\n updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()\n )\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_jobs_workflow_id \n ON ${this.schema}.jobs(workflow_id)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_jobs_scheduled_for \n ON ${this.schema}.jobs(scheduled_for) \n WHERE scheduled_for IS NOT NULL\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_jobs_priority_created \n ON ${this.schema}.jobs(priority DESC, created_at ASC)\n `);\n\n // Executions table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.schema}.executions (\n id UUID PRIMARY KEY,\n run_id TEXT NOT NULL,\n workflow_id TEXT NOT NULL,\n workflow_version TEXT,\n event_name TEXT NOT NULL,\n payload JSONB NOT NULL,\n status TEXT NOT NULL,\n result JSONB,\n error JSONB,\n metadata JSONB NOT NULL DEFAULT '{}',\n attempt INTEGER NOT NULL DEFAULT 1,\n started_at TIMESTAMPTZ NOT NULL,\n completed_at TIMESTAMPTZ,\n timeline JSONB NOT NULL DEFAULT '[]',\n idempotency_key TEXT\n )\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_executions_run_id \n ON ${this.schema}.executions(run_id)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_executions_workflow_id \n ON ${this.schema}.executions(workflow_id)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_executions_status \n ON ${this.schema}.executions(status)\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_executions_idempotency \n ON ${this.schema}.executions(workflow_id, idempotency_key) \n WHERE idempotency_key IS NOT NULL\n `);\n\n // Step results table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.schema}.step_results (\n id UUID PRIMARY KEY DEFAULT gen_random_uuid(),\n execution_id UUID NOT NULL REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE,\n step_name TEXT NOT NULL,\n result JSONB NOT NULL,\n started_at TIMESTAMPTZ NOT NULL,\n completed_at TIMESTAMPTZ NOT NULL,\n duration_ms INTEGER NOT NULL,\n UNIQUE(execution_id, step_name)\n )\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_step_results_execution_id \n ON ${this.schema}.step_results(execution_id)\n `);\n\n // Event waiters table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.schema}.event_waiters (\n id UUID PRIMARY KEY DEFAULT gen_random_uuid(),\n event_id TEXT NOT NULL,\n execution_id UUID NOT NULL REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE,\n timeout_at TIMESTAMPTZ,\n created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()\n )\n `);\n\n await client.query(`\n CREATE INDEX IF NOT EXISTS idx_event_waiters_event_id \n ON ${this.schema}.event_waiters(event_id)\n `);\n\n // Leader locks table\n await client.query(`\n CREATE TABLE IF NOT EXISTS ${this.schema}.leader_locks (\n lock_id TEXT PRIMARY KEY,\n owner TEXT NOT NULL,\n expires_at TIMESTAMPTZ NOT NULL,\n acquired_at TIMESTAMPTZ NOT NULL DEFAULT NOW()\n )\n `);\n } finally {\n client.release();\n }\n }\n\n // ============================================================================\n // Queue Operations\n // ============================================================================\n\n queue: QueueOperations = {\n push: async (job: QueueJob): Promise<string> => {\n await this.pool.query(\n `INSERT INTO ${this.schema}.jobs (\n id, workflow_id, event_name, payload, metadata, \n priority, attempts, max_attempts, scheduled_for, created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,\n [\n job.id,\n job.workflowId,\n job.eventName,\n JSON.stringify(job.payload),\n JSON.stringify(job.metadata),\n job.priority,\n job.attempts,\n job.maxAttempts,\n job.scheduledFor,\n job.createdAt,\n ],\n );\n return job.id;\n },\n\n pop: async (options?: PopOptions): Promise<QueueJob | null> => {\n const workerId = options?.workerId ?? `worker-${process.pid}`;\n const lockDuration = options?.lockDuration ?? 30000; // 30 seconds\n\n const result = await this.pool.query(\n `UPDATE ${this.schema}.jobs\n SET locked_until = NOW() + INTERVAL '${lockDuration} milliseconds',\n locked_by = $1,\n updated_at = NOW()\n WHERE id = (\n SELECT id FROM ${this.schema}.jobs\n WHERE (scheduled_for IS NULL OR scheduled_for <= NOW())\n AND (locked_until IS NULL OR locked_until < NOW())\n ORDER BY priority DESC, created_at ASC\n FOR UPDATE SKIP LOCKED\n LIMIT 1\n )\n RETURNING *`,\n [workerId],\n );\n\n if (result.rows.length === 0) {\n return null;\n }\n\n const row = result.rows[0];\n return {\n id: row.id,\n workflowId: row.workflow_id,\n eventName: row.event_name,\n payload: row.payload,\n metadata: row.metadata,\n priority: row.priority,\n attempts: row.attempts,\n maxAttempts: row.max_attempts,\n scheduledFor: row.scheduled_for,\n createdAt: row.created_at,\n };\n },\n\n ack: async (jobId: string): Promise<void> => {\n await this.pool.query(`DELETE FROM ${this.schema}.jobs WHERE id = $1`, [\n jobId,\n ]);\n },\n\n nack: async (jobId: string, options?: NackOptions): Promise<void> => {\n const delay = options?.delay ?? 0;\n const scheduledFor = delay > 0 ? new Date(Date.now() + delay) : null;\n\n await this.pool.query(\n `UPDATE ${this.schema}.jobs\n SET attempts = attempts + 1,\n scheduled_for = $2,\n locked_until = NULL,\n locked_by = NULL,\n updated_at = NOW()\n WHERE id = $1`,\n [jobId, scheduledFor],\n );\n },\n\n schedule: async (job: QueueJob, executeAt: Date): Promise<string> => {\n await this.pool.query(\n `INSERT INTO ${this.schema}.jobs (\n id, workflow_id, event_name, payload, metadata, \n priority, attempts, max_attempts, scheduled_for, created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,\n [\n job.id,\n job.workflowId,\n job.eventName,\n JSON.stringify(job.payload),\n JSON.stringify(job.metadata),\n job.priority,\n job.attempts,\n job.maxAttempts,\n executeAt,\n job.createdAt,\n ],\n );\n return job.id;\n },\n\n getDelayed: async (): Promise<QueueJob[]> => {\n const result = await this.pool.query(\n `SELECT * FROM ${this.schema}.jobs \n WHERE scheduled_for IS NOT NULL AND scheduled_for > NOW()\n ORDER BY scheduled_for ASC`,\n );\n\n return result.rows.map((row) => ({\n id: row.id,\n workflowId: row.workflow_id,\n eventName: row.event_name,\n payload: row.payload,\n metadata: row.metadata,\n priority: row.priority,\n attempts: row.attempts,\n maxAttempts: row.max_attempts,\n scheduledFor: row.scheduled_for,\n createdAt: row.created_at,\n }));\n },\n };\n\n // ============================================================================\n // Execution Operations\n // ============================================================================\n\n execution: ExecutionOperations = {\n create: async (execution: Execution): Promise<string> => {\n await this.pool.query(\n `INSERT INTO ${this.schema}.executions (\n id, run_id, workflow_id, workflow_version, event_name, \n payload, status, result, error, metadata, attempt, \n started_at, completed_at, timeline, idempotency_key\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)`,\n [\n execution.id,\n execution.runId,\n execution.workflowId,\n execution.workflowVersion,\n execution.eventName,\n JSON.stringify(execution.payload),\n execution.status,\n execution.result ? JSON.stringify(execution.result) : null,\n execution.error ? JSON.stringify(execution.error) : null,\n JSON.stringify(execution.metadata),\n execution.attempt,\n execution.startedAt,\n execution.completedAt,\n JSON.stringify(execution.timeline),\n execution.metadata.idempotencyKey as string | undefined,\n ],\n );\n\n // Emit realtime event\n await this.realtime.publish(`execution:${execution.id}`, execution);\n\n return execution.id;\n },\n\n get: async (executionId: string): Promise<Execution | null> => {\n const result = await this.pool.query(\n `SELECT e.*, \n COALESCE(\n json_agg(\n json_build_object(\n 'name', sr.step_name,\n 'status', 'completed',\n 'result', sr.result->'data',\n 'startedAt', sr.started_at,\n 'completedAt', sr.completed_at,\n 'durationMs', sr.duration_ms\n )\n ORDER BY sr.started_at\n ) FILTER (WHERE sr.id IS NOT NULL),\n '[]'\n ) as steps\n FROM ${this.schema}.executions e\n LEFT JOIN ${this.schema}.step_results sr ON sr.execution_id = e.id\n WHERE e.id = $1\n GROUP BY e.id`,\n [executionId],\n );\n\n if (result.rows.length === 0) {\n return null;\n }\n\n const row = result.rows[0];\n return {\n id: row.id,\n runId: row.run_id,\n workflowId: row.workflow_id,\n workflowVersion: row.workflow_version,\n eventName: row.event_name,\n payload: row.payload,\n status: row.status,\n result: row.result,\n error: row.error,\n steps: row.steps,\n metadata: row.metadata,\n attempt: row.attempt,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n timeline: row.timeline,\n };\n },\n\n getByIdempotencyKey: async (\n workflowId: string,\n idempotencyKey: string,\n ): Promise<Execution | null> => {\n const result = await this.pool.query(\n `SELECT e.*, \n COALESCE(\n json_agg(\n json_build_object(\n 'name', sr.step_name,\n 'status', 'completed',\n 'result', sr.result->'data',\n 'startedAt', sr.started_at,\n 'completedAt', sr.completed_at,\n 'durationMs', sr.duration_ms\n )\n ORDER BY sr.started_at\n ) FILTER (WHERE sr.id IS NOT NULL),\n '[]'\n ) as steps\n FROM ${this.schema}.executions e\n LEFT JOIN ${this.schema}.step_results sr ON sr.execution_id = e.id\n WHERE e.workflow_id = $1 AND e.idempotency_key = $2\n GROUP BY e.id\n LIMIT 1`,\n [workflowId, idempotencyKey],\n );\n\n if (result.rows.length === 0) {\n return null;\n }\n\n const row = result.rows[0];\n return {\n id: row.id,\n runId: row.run_id,\n workflowId: row.workflow_id,\n workflowVersion: row.workflow_version,\n eventName: row.event_name,\n payload: row.payload,\n status: row.status,\n result: row.result,\n error: row.error,\n steps: row.steps,\n metadata: row.metadata,\n attempt: row.attempt,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n timeline: row.timeline,\n };\n },\n\n update: async (\n executionId: string,\n updates: Partial<Execution>,\n ): Promise<void> => {\n const setClauses: string[] = [];\n const values: unknown[] = [];\n let paramIndex = 1;\n\n if (updates.status !== undefined) {\n setClauses.push(`status = $${paramIndex++}`);\n values.push(updates.status);\n }\n\n if (updates.result !== undefined) {\n setClauses.push(`result = $${paramIndex++}`);\n values.push(JSON.stringify(updates.result));\n }\n\n if (updates.error !== undefined) {\n setClauses.push(`error = $${paramIndex++}`);\n values.push(JSON.stringify(updates.error));\n }\n\n if (updates.metadata !== undefined) {\n setClauses.push(`metadata = metadata || $${paramIndex++}::jsonb`);\n values.push(JSON.stringify(updates.metadata));\n }\n\n if (updates.completedAt !== undefined) {\n setClauses.push(`completed_at = $${paramIndex++}`);\n values.push(updates.completedAt);\n }\n\n if (updates.timeline !== undefined) {\n setClauses.push(`timeline = $${paramIndex++}`);\n values.push(JSON.stringify(updates.timeline));\n }\n\n if (setClauses.length === 0) {\n return;\n }\n\n values.push(executionId);\n\n await this.pool.query(\n `UPDATE ${this.schema}.executions \n SET ${setClauses.join(\", \")}\n WHERE id = $${paramIndex}`,\n values,\n );\n\n // Emit realtime event\n const execution = await this.execution.get(executionId);\n if (execution) {\n await this.realtime.publish(`execution:${executionId}`, execution);\n }\n },\n\n list: async (options: ListOptions): Promise<Execution[]> => {\n const whereClauses: string[] = [];\n const values: unknown[] = [];\n let paramIndex = 1;\n\n if (options.workflowId) {\n whereClauses.push(`e.workflow_id = $${paramIndex++}`);\n values.push(options.workflowId);\n }\n\n if (options.status) {\n whereClauses.push(`e.status = $${paramIndex++}`);\n values.push(options.status);\n }\n\n if (options.runId) {\n whereClauses.push(`e.run_id = $${paramIndex++}`);\n values.push(options.runId);\n }\n\n const whereClause =\n whereClauses.length > 0 ? `WHERE ${whereClauses.join(\" AND \")}` : \"\";\n\n const limit = options.limit ?? 50;\n const offset = options.offset ?? 0;\n\n const result = await this.pool.query(\n `SELECT e.*, \n COALESCE(\n json_agg(\n json_build_object(\n 'name', sr.step_name,\n 'status', 'completed',\n 'result', sr.result->'data',\n 'startedAt', sr.started_at,\n 'completedAt', sr.completed_at,\n 'durationMs', sr.duration_ms\n )\n ORDER BY sr.started_at\n ) FILTER (WHERE sr.id IS NOT NULL),\n '[]'\n ) as steps\n FROM ${this.schema}.executions e\n LEFT JOIN ${this.schema}.step_results sr ON sr.execution_id = e.id\n ${whereClause}\n GROUP BY e.id\n ORDER BY e.started_at DESC\n LIMIT $${paramIndex++} OFFSET $${paramIndex++}`,\n [...values, limit, offset],\n );\n\n return result.rows.map((row) => ({\n id: row.id,\n runId: row.run_id,\n workflowId: row.workflow_id,\n workflowVersion: row.workflow_version,\n eventName: row.event_name,\n payload: row.payload,\n status: row.status,\n result: row.result,\n error: row.error,\n steps: row.steps,\n metadata: row.metadata,\n attempt: row.attempt,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n timeline: row.timeline,\n }));\n },\n\n getStepResult: async <T = unknown>(\n executionId: string,\n stepName: string,\n ): Promise<StepResult<T> | null> => {\n const result = await this.pool.query(\n `SELECT result FROM ${this.schema}.step_results \n WHERE execution_id = $1 AND step_name = $2`,\n [executionId, stepName],\n );\n\n if (result.rows.length === 0) {\n return null;\n }\n\n return result.rows[0].result as StepResult<T>;\n },\n\n saveStepResult: async (\n executionId: string,\n stepName: string,\n result: StepResult,\n ): Promise<void> => {\n await this.pool.query(\n `INSERT INTO ${this.schema}.step_results (\n execution_id, step_name, result, started_at, completed_at, duration_ms\n ) VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (execution_id, step_name) \n DO UPDATE SET \n result = EXCLUDED.result,\n started_at = EXCLUDED.started_at,\n completed_at = EXCLUDED.completed_at,\n duration_ms = EXCLUDED.duration_ms`,\n [\n executionId,\n stepName,\n JSON.stringify(result),\n result.startedAt,\n result.completedAt,\n result.durationMs,\n ],\n );\n },\n };\n\n // ============================================================================\n // Event Operations\n // ============================================================================\n\n events: EventOperations = {\n publish: async (eventId: string, data: unknown): Promise<number> => {\n // Get waiters\n const result = await this.pool.query(\n `DELETE FROM ${this.schema}.event_waiters \n WHERE event_id = $1 \n RETURNING execution_id`,\n [eventId],\n );\n\n // Notify each waiter via realtime\n for (const row of result.rows) {\n await this.realtime.publish(\n `event:${eventId}:${row.execution_id}`,\n data,\n );\n }\n\n return result.rows.length;\n },\n\n subscribe: async (\n eventId: string,\n executionId: string,\n timeout: Date,\n ): Promise<void> => {\n await this.pool.query(\n `INSERT INTO ${this.schema}.event_waiters (event_id, execution_id, timeout_at)\n VALUES ($1, $2, $3)`,\n [eventId, executionId, timeout],\n );\n },\n\n getWaiters: async (eventId: string): Promise<EventWaiter[]> => {\n const result = await this.pool.query(\n `SELECT execution_id, timeout_at FROM ${this.schema}.event_waiters \n WHERE event_id = $1`,\n [eventId],\n );\n\n return result.rows.map((row) => ({\n executionId: row.execution_id,\n timeoutAt: row.timeout_at,\n }));\n },\n };\n\n // ============================================================================\n // Leader Operations (using pg_advisory_lock)\n // ============================================================================\n\n leader: LeaderOperations = {\n acquire: async (lockId: string, ttlSeconds: number): Promise<boolean> => {\n // Use hash of lockId as advisory lock key\n const lockKey = this.hashLockId(lockId);\n\n // Try to acquire advisory lock\n const result = await this.pool.query(\n \"SELECT pg_try_advisory_lock($1) as acquired\",\n [lockKey],\n );\n\n const acquired = result.rows[0].acquired;\n\n if (acquired) {\n // Store lock metadata\n const owner = `${process.pid}-${Date.now()}`;\n const expiresAt = new Date(Date.now() + ttlSeconds * 1000);\n\n await this.pool.query(\n `INSERT INTO ${this.schema}.leader_locks (lock_id, owner, expires_at)\n VALUES ($1, $2, $3)\n ON CONFLICT (lock_id) \n DO UPDATE SET owner = EXCLUDED.owner, expires_at = EXCLUDED.expires_at, acquired_at = NOW()`,\n [lockId, owner, expiresAt],\n );\n }\n\n return acquired;\n },\n\n release: async (lockId: string): Promise<void> => {\n const lockKey = this.hashLockId(lockId);\n\n // Release advisory lock\n await this.pool.query(\"SELECT pg_advisory_unlock($1)\", [lockKey]);\n\n // Remove lock metadata\n await this.pool.query(\n `DELETE FROM ${this.schema}.leader_locks WHERE lock_id = $1`,\n [lockId],\n );\n },\n\n renew: async (lockId: string, ttlSeconds: number): Promise<boolean> => {\n const expiresAt = new Date(Date.now() + ttlSeconds * 1000);\n\n const result = await this.pool.query(\n `UPDATE ${this.schema}.leader_locks \n SET expires_at = $2 \n WHERE lock_id = $1 \n RETURNING lock_id`,\n [lockId, expiresAt],\n );\n\n return result.rows.length > 0;\n },\n };\n\n // ============================================================================\n // Realtime Operations (using LISTEN/NOTIFY)\n // ============================================================================\n\n realtime: RealtimeOperations = {\n subscribe: (\n channel: string,\n callback: (data: unknown) => void,\n ): Unsubscribe => {\n if (!this.listeners.has(channel)) {\n this.listeners.set(channel, new Set());\n\n // Start listening to this channel\n if (this.listenClient) {\n this.listenClient\n .query(`LISTEN \"${channel}\"`)\n .catch((error) =>\n console.error(`Failed to LISTEN to ${channel}:`, error),\n );\n }\n }\n\n this.listeners.get(channel)!.add(callback);\n\n return () => {\n const callbacks = this.listeners.get(channel);\n if (callbacks) {\n callbacks.delete(callback);\n if (callbacks.size === 0) {\n this.listeners.delete(channel);\n\n // Stop listening to this channel\n if (this.listenClient) {\n this.listenClient\n .query(`UNLISTEN \"${channel}\"`)\n .catch((error) =>\n console.error(`Failed to UNLISTEN from ${channel}:`, error),\n );\n }\n }\n }\n };\n },\n\n publish: async (channel: string, data: unknown): Promise<void> => {\n const payload = JSON.stringify(data);\n await this.pool.query(`NOTIFY \"${channel}\", $1`, [payload]);\n },\n };\n\n // ============================================================================\n // Helper Methods\n // ============================================================================\n\n private hashLockId(lockId: string): number {\n // Simple hash function to convert string to int32 for pg_advisory_lock\n let hash = 0;\n for (let i = 0; i < lockId.length; i++) {\n const char = lockId.charCodeAt(i);\n hash = (hash << 5) - hash + char;\n hash = hash & hash; // Convert to 32-bit integer\n }\n return hash;\n }\n}\n\n// Default export\nexport default PostgresStorageAdapter;\n"],"mappings":";AAKA,OAAO,QAAQ;AAoBf,IAAM,EAAE,MAAM,OAAO,IAAI;AAqBlB,IAAM,yBAAN,MAAuD;AAAA,EACpD;AAAA,EACA,eAAiC;AAAA,EACjC;AAAA,EACA,YAAuD,oBAAI,IAAI;AAAA,EAC/D,YAAY;AAAA,EAEpB,YAAY,SAAiC;AAC3C,SAAK,SAAS,QAAQ,UAAU;AAEhC,SAAK,OAAO,IAAI,KAAK;AAAA,MACnB,kBAAkB,QAAQ;AAAA,MAC1B,MAAM,QAAQ;AAAA,MACd,MAAM,QAAQ;AAAA,MACd,UAAU,QAAQ;AAAA,MAClB,MAAM,QAAQ;AAAA,MACd,UAAU,QAAQ;AAAA,MAClB,KAAK,QAAQ,YAAY;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAyB;AAE7B,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AACvC,QAAI;AACF,YAAM,OAAO,MAAM,UAAU;AAAA,IAC/B,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAGA,UAAM,KAAK,cAAc;AAGzB,SAAK,eAAe,IAAI,OAAO;AAAA,MAC7B,kBAAkB,KAAK,KAAK,QAAQ;AAAA,MACpC,MAAM,KAAK,KAAK,QAAQ;AAAA,MACxB,MAAM,KAAK,KAAK,QAAQ;AAAA,MACxB,UAAU,KAAK,KAAK,QAAQ;AAAA,MAC5B,MAAM,KAAK,KAAK,QAAQ;AAAA,MACxB,UAAU,KAAK,KAAK,QAAQ;AAAA,IAC9B,CAAC;AAED,UAAM,KAAK,aAAa,QAAQ;AAGhC,SAAK,aAAa,GAAG,gBAAgB,CAAC,QAAQ;AAC5C,UAAI,IAAI,WAAW,IAAI,SAAS;AAC9B,cAAM,YAAY,KAAK,UAAU,IAAI,IAAI,OAAO;AAChD,YAAI,WAAW;AACb,cAAI;AACF,kBAAM,OAAO,KAAK,MAAM,IAAI,OAAO;AACnC,sBAAU,QAAQ,CAAC,OAAO,GAAG,IAAI,CAAC;AAAA,UACpC,SAAS,OAAO;AACd,oBAAQ,MAAM,yCAAyC,KAAK;AAAA,UAC9D;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,KAAK,cAAc;AACrB,YAAM,KAAK,aAAa,IAAI;AAC5B,WAAK,eAAe;AAAA,IACtB;AACA,UAAM,KAAK,KAAK,IAAI;AACpB,SAAK,UAAU,MAAM;AACrB,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,cAAgC;AACpC,QAAI,CAAC,KAAK,UAAW,QAAO;AAC5B,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AACvC,UAAI;AACF,cAAM,OAAO,MAAM,UAAU;AAC7B,eAAO;AAAA,MACT,UAAE;AACA,eAAO,QAAQ;AAAA,MACjB;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBAA+B;AAC3C,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AACvC,QAAI;AAEF,YAAM,OAAO,MAAM,+BAA+B,KAAK,MAAM,EAAE;AAG/D,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAezC;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA,OACjB;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA;AAAA,OAEjB;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA,OACjB;AAGD,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAiBzC;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA,OACjB;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA,OACjB;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA,OACjB;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA;AAAA,OAEjB;AAGD,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,MAAM;AAAA;AAAA,kDAEE,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAQtD;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA,OACjB;AAGD,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,MAAM;AAAA;AAAA;AAAA,kDAGE,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA,OAItD;AAED,YAAM,OAAO,MAAM;AAAA;AAAA,aAEZ,KAAK,MAAM;AAAA,OACjB;AAGD,YAAM,OAAO,MAAM;AAAA,qCACY,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMzC;AAAA,IACH,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,QAAyB;AAAA,IACvB,MAAM,OAAO,QAAmC;AAC9C,YAAM,KAAK,KAAK;AAAA,QACd,eAAe,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA,QAI1B;AAAA,UACE,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,UAC1B,KAAK,UAAU,IAAI,QAAQ;AAAA,UAC3B,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,IAAI;AAAA,QACN;AAAA,MACF;AACA,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,KAAK,OAAO,YAAmD;AAC7D,YAAM,WAAW,SAAS,YAAY,UAAU,QAAQ,GAAG;AAC3D,YAAM,eAAe,SAAS,gBAAgB;AAE9C,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B,UAAU,KAAK,MAAM;AAAA,gDACmB,YAAY;AAAA;AAAA;AAAA;AAAA,4BAIhC,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAQ/B,CAAC,QAAQ;AAAA,MACX;AAEA,UAAI,OAAO,KAAK,WAAW,GAAG;AAC5B,eAAO;AAAA,MACT;AAEA,YAAM,MAAM,OAAO,KAAK,CAAC;AACzB,aAAO;AAAA,QACL,IAAI,IAAI;AAAA,QACR,YAAY,IAAI;AAAA,QAChB,WAAW,IAAI;AAAA,QACf,SAAS,IAAI;AAAA,QACb,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd,aAAa,IAAI;AAAA,QACjB,cAAc,IAAI;AAAA,QAClB,WAAW,IAAI;AAAA,MACjB;AAAA,IACF;AAAA,IAEA,KAAK,OAAO,UAAiC;AAC3C,YAAM,KAAK,KAAK,MAAM,eAAe,KAAK,MAAM,uBAAuB;AAAA,QACrE;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,OAAO,OAAe,YAAyC;AACnE,YAAM,QAAQ,SAAS,SAAS;AAChC,YAAM,eAAe,QAAQ,IAAI,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,IAAI;AAEhE,YAAM,KAAK,KAAK;AAAA,QACd,UAAU,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOrB,CAAC,OAAO,YAAY;AAAA,MACtB;AAAA,IACF;AAAA,IAEA,UAAU,OAAO,KAAe,cAAqC;AACnE,YAAM,KAAK,KAAK;AAAA,QACd,eAAe,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA,QAI1B;AAAA,UACE,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,UAC1B,KAAK,UAAU,IAAI,QAAQ;AAAA,UAC3B,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ;AAAA,UACA,IAAI;AAAA,QACN;AAAA,MACF;AACA,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,YAAY,YAAiC;AAC3C,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B,iBAAiB,KAAK,MAAM;AAAA;AAAA;AAAA,MAG9B;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,IAAI,IAAI;AAAA,QACR,YAAY,IAAI;AAAA,QAChB,WAAW,IAAI;AAAA,QACf,SAAS,IAAI;AAAA,QACb,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd,aAAa,IAAI;AAAA,QACjB,cAAc,IAAI;AAAA,QAClB,WAAW,IAAI;AAAA,MACjB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,YAAiC;AAAA,IAC/B,QAAQ,OAAO,cAA0C;AACvD,YAAM,KAAK,KAAK;AAAA,QACd,eAAe,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,QAK1B;AAAA,UACE,UAAU;AAAA,UACV,UAAU;AAAA,UACV,UAAU;AAAA,UACV,UAAU;AAAA,UACV,UAAU;AAAA,UACV,KAAK,UAAU,UAAU,OAAO;AAAA,UAChC,UAAU;AAAA,UACV,UAAU,SAAS,KAAK,UAAU,UAAU,MAAM,IAAI;AAAA,UACtD,UAAU,QAAQ,KAAK,UAAU,UAAU,KAAK,IAAI;AAAA,UACpD,KAAK,UAAU,UAAU,QAAQ;AAAA,UACjC,UAAU;AAAA,UACV,UAAU;AAAA,UACV,UAAU;AAAA,UACV,KAAK,UAAU,UAAU,QAAQ;AAAA,UACjC,UAAU,SAAS;AAAA,QACrB;AAAA,MACF;AAGA,YAAM,KAAK,SAAS,QAAQ,aAAa,UAAU,EAAE,IAAI,SAAS;AAElE,aAAO,UAAU;AAAA,IACnB;AAAA,IAEA,KAAK,OAAO,gBAAmD;AAC7D,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAeQ,KAAK,MAAM;AAAA,qBACN,KAAK,MAAM;AAAA;AAAA;AAAA,QAGxB,CAAC,WAAW;AAAA,MACd;AAEA,UAAI,OAAO,KAAK,WAAW,GAAG;AAC5B,eAAO;AAAA,MACT;AAEA,YAAM,MAAM,OAAO,KAAK,CAAC;AACzB,aAAO;AAAA,QACL,IAAI,IAAI;AAAA,QACR,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,WAAW,IAAI;AAAA,QACf,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI;AAAA,QACZ,OAAO,IAAI;AAAA,QACX,OAAO,IAAI;AAAA,QACX,UAAU,IAAI;AAAA,QACd,SAAS,IAAI;AAAA,QACb,WAAW,IAAI;AAAA,QACf,aAAa,IAAI;AAAA,QACjB,UAAU,IAAI;AAAA,MAChB;AAAA,IACF;AAAA,IAEA,qBAAqB,OACnB,YACA,mBAC8B;AAC9B,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAeQ,KAAK,MAAM;AAAA,qBACN,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA,QAIxB,CAAC,YAAY,cAAc;AAAA,MAC7B;AAEA,UAAI,OAAO,KAAK,WAAW,GAAG;AAC5B,eAAO;AAAA,MACT;AAEA,YAAM,MAAM,OAAO,KAAK,CAAC;AACzB,aAAO;AAAA,QACL,IAAI,IAAI;AAAA,QACR,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,WAAW,IAAI;AAAA,QACf,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI;AAAA,QACZ,OAAO,IAAI;AAAA,QACX,OAAO,IAAI;AAAA,QACX,UAAU,IAAI;AAAA,QACd,SAAS,IAAI;AAAA,QACb,WAAW,IAAI;AAAA,QACf,aAAa,IAAI;AAAA,QACjB,UAAU,IAAI;AAAA,MAChB;AAAA,IACF;AAAA,IAEA,QAAQ,OACN,aACA,YACkB;AAClB,YAAM,aAAuB,CAAC;AAC9B,YAAM,SAAoB,CAAC;AAC3B,UAAI,aAAa;AAEjB,UAAI,QAAQ,WAAW,QAAW;AAChC,mBAAW,KAAK,aAAa,YAAY,EAAE;AAC3C,eAAO,KAAK,QAAQ,MAAM;AAAA,MAC5B;AAEA,UAAI,QAAQ,WAAW,QAAW;AAChC,mBAAW,KAAK,aAAa,YAAY,EAAE;AAC3C,eAAO,KAAK,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,MAC5C;AAEA,UAAI,QAAQ,UAAU,QAAW;AAC/B,mBAAW,KAAK,YAAY,YAAY,EAAE;AAC1C,eAAO,KAAK,KAAK,UAAU,QAAQ,KAAK,CAAC;AAAA,MAC3C;AAEA,UAAI,QAAQ,aAAa,QAAW;AAClC,mBAAW,KAAK,2BAA2B,YAAY,SAAS;AAChE,eAAO,KAAK,KAAK,UAAU,QAAQ,QAAQ,CAAC;AAAA,MAC9C;AAEA,UAAI,QAAQ,gBAAgB,QAAW;AACrC,mBAAW,KAAK,mBAAmB,YAAY,EAAE;AACjD,eAAO,KAAK,QAAQ,WAAW;AAAA,MACjC;AAEA,UAAI,QAAQ,aAAa,QAAW;AAClC,mBAAW,KAAK,eAAe,YAAY,EAAE;AAC7C,eAAO,KAAK,KAAK,UAAU,QAAQ,QAAQ,CAAC;AAAA,MAC9C;AAEA,UAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,MACF;AAEA,aAAO,KAAK,WAAW;AAEvB,YAAM,KAAK,KAAK;AAAA,QACd,UAAU,KAAK,MAAM;AAAA,eACd,WAAW,KAAK,IAAI,CAAC;AAAA,uBACb,UAAU;AAAA,QACzB;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,KAAK,UAAU,IAAI,WAAW;AACtD,UAAI,WAAW;AACb,cAAM,KAAK,SAAS,QAAQ,aAAa,WAAW,IAAI,SAAS;AAAA,MACnE;AAAA,IACF;AAAA,IAEA,MAAM,OAAO,YAA+C;AAC1D,YAAM,eAAyB,CAAC;AAChC,YAAM,SAAoB,CAAC;AAC3B,UAAI,aAAa;AAEjB,UAAI,QAAQ,YAAY;AACtB,qBAAa,KAAK,oBAAoB,YAAY,EAAE;AACpD,eAAO,KAAK,QAAQ,UAAU;AAAA,MAChC;AAEA,UAAI,QAAQ,QAAQ;AAClB,qBAAa,KAAK,eAAe,YAAY,EAAE;AAC/C,eAAO,KAAK,QAAQ,MAAM;AAAA,MAC5B;AAEA,UAAI,QAAQ,OAAO;AACjB,qBAAa,KAAK,eAAe,YAAY,EAAE;AAC/C,eAAO,KAAK,QAAQ,KAAK;AAAA,MAC3B;AAEA,YAAM,cACJ,aAAa,SAAS,IAAI,SAAS,aAAa,KAAK,OAAO,CAAC,KAAK;AAEpE,YAAM,QAAQ,QAAQ,SAAS;AAC/B,YAAM,SAAS,QAAQ,UAAU;AAEjC,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAeQ,KAAK,MAAM;AAAA,qBACN,KAAK,MAAM;AAAA,WACrB,WAAW;AAAA;AAAA;AAAA,kBAGJ,YAAY,YAAY,YAAY;AAAA,QAC9C,CAAC,GAAG,QAAQ,OAAO,MAAM;AAAA,MAC3B;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,IAAI,IAAI;AAAA,QACR,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,WAAW,IAAI;AAAA,QACf,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI;AAAA,QACZ,OAAO,IAAI;AAAA,QACX,OAAO,IAAI;AAAA,QACX,UAAU,IAAI;AAAA,QACd,SAAS,IAAI;AAAA,QACb,WAAW,IAAI;AAAA,QACf,aAAa,IAAI;AAAA,QACjB,UAAU,IAAI;AAAA,MAChB,EAAE;AAAA,IACJ;AAAA,IAEA,eAAe,OACb,aACA,aACkC;AAClC,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B,sBAAsB,KAAK,MAAM;AAAA;AAAA,QAEjC,CAAC,aAAa,QAAQ;AAAA,MACxB;AAEA,UAAI,OAAO,KAAK,WAAW,GAAG;AAC5B,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACxB;AAAA,IAEA,gBAAgB,OACd,aACA,UACA,WACkB;AAClB,YAAM,KAAK,KAAK;AAAA,QACd,eAAe,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAS1B;AAAA,UACE;AAAA,UACA;AAAA,UACA,KAAK,UAAU,MAAM;AAAA,UACrB,OAAO;AAAA,UACP,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,SAA0B;AAAA,IACxB,SAAS,OAAO,SAAiB,SAAmC;AAElE,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B,eAAe,KAAK,MAAM;AAAA;AAAA;AAAA,QAG1B,CAAC,OAAO;AAAA,MACV;AAGA,iBAAW,OAAO,OAAO,MAAM;AAC7B,cAAM,KAAK,SAAS;AAAA,UAClB,SAAS,OAAO,IAAI,IAAI,YAAY;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AAEA,aAAO,OAAO,KAAK;AAAA,IACrB;AAAA,IAEA,WAAW,OACT,SACA,aACA,YACkB;AAClB,YAAM,KAAK,KAAK;AAAA,QACd,eAAe,KAAK,MAAM;AAAA;AAAA,QAE1B,CAAC,SAAS,aAAa,OAAO;AAAA,MAChC;AAAA,IACF;AAAA,IAEA,YAAY,OAAO,YAA4C;AAC7D,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B,wCAAwC,KAAK,MAAM;AAAA;AAAA,QAEnD,CAAC,OAAO;AAAA,MACV;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,aAAa,IAAI;AAAA,QACjB,WAAW,IAAI;AAAA,MACjB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,SAA2B;AAAA,IACzB,SAAS,OAAO,QAAgB,eAAyC;AAEvE,YAAM,UAAU,KAAK,WAAW,MAAM;AAGtC,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B;AAAA,QACA,CAAC,OAAO;AAAA,MACV;AAEA,YAAM,WAAW,OAAO,KAAK,CAAC,EAAE;AAEhC,UAAI,UAAU;AAEZ,cAAM,QAAQ,GAAG,QAAQ,GAAG,IAAI,KAAK,IAAI,CAAC;AAC1C,cAAM,YAAY,IAAI,KAAK,KAAK,IAAI,IAAI,aAAa,GAAI;AAEzD,cAAM,KAAK,KAAK;AAAA,UACd,eAAe,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA,UAI1B,CAAC,QAAQ,OAAO,SAAS;AAAA,QAC3B;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,SAAS,OAAO,WAAkC;AAChD,YAAM,UAAU,KAAK,WAAW,MAAM;AAGtC,YAAM,KAAK,KAAK,MAAM,iCAAiC,CAAC,OAAO,CAAC;AAGhE,YAAM,KAAK,KAAK;AAAA,QACd,eAAe,KAAK,MAAM;AAAA,QAC1B,CAAC,MAAM;AAAA,MACT;AAAA,IACF;AAAA,IAEA,OAAO,OAAO,QAAgB,eAAyC;AACrE,YAAM,YAAY,IAAI,KAAK,KAAK,IAAI,IAAI,aAAa,GAAI;AAEzD,YAAM,SAAS,MAAM,KAAK,KAAK;AAAA,QAC7B,UAAU,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA,QAIrB,CAAC,QAAQ,SAAS;AAAA,MACpB;AAEA,aAAO,OAAO,KAAK,SAAS;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,WAA+B;AAAA,IAC7B,WAAW,CACT,SACA,aACgB;AAChB,UAAI,CAAC,KAAK,UAAU,IAAI,OAAO,GAAG;AAChC,aAAK,UAAU,IAAI,SAAS,oBAAI,IAAI,CAAC;AAGrC,YAAI,KAAK,cAAc;AACrB,eAAK,aACF,MAAM,WAAW,OAAO,GAAG,EAC3B;AAAA,YAAM,CAAC,UACN,QAAQ,MAAM,uBAAuB,OAAO,KAAK,KAAK;AAAA,UACxD;AAAA,QACJ;AAAA,MACF;AAEA,WAAK,UAAU,IAAI,OAAO,EAAG,IAAI,QAAQ;AAEzC,aAAO,MAAM;AACX,cAAM,YAAY,KAAK,UAAU,IAAI,OAAO;AAC5C,YAAI,WAAW;AACb,oBAAU,OAAO,QAAQ;AACzB,cAAI,UAAU,SAAS,GAAG;AACxB,iBAAK,UAAU,OAAO,OAAO;AAG7B,gBAAI,KAAK,cAAc;AACrB,mBAAK,aACF,MAAM,aAAa,OAAO,GAAG,EAC7B;AAAA,gBAAM,CAAC,UACN,QAAQ,MAAM,2BAA2B,OAAO,KAAK,KAAK;AAAA,cAC5D;AAAA,YACJ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,SAAS,OAAO,SAAiB,SAAiC;AAChE,YAAM,UAAU,KAAK,UAAU,IAAI;AACnC,YAAM,KAAK,KAAK,MAAM,WAAW,OAAO,SAAS,CAAC,OAAO,CAAC;AAAA,IAC5D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMQ,WAAW,QAAwB;AAEzC,QAAI,OAAO;AACX,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,OAAO,OAAO,WAAW,CAAC;AAChC,cAAQ,QAAQ,KAAK,OAAO;AAC5B,aAAO,OAAO;AAAA,IAChB;AACA,WAAO;AAAA,EACT;AACF;AAGA,IAAO,gBAAQ;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@stepflowjs/storage-postgres",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "PostgreSQL storage adapter for Stepflow",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"import": "./dist/index.js",
|
|
12
|
+
"types": "./dist/index.d.ts"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"pg": "^8.13.1",
|
|
20
|
+
"@stepflowjs/core": "0.0.1"
|
|
21
|
+
},
|
|
22
|
+
"devDependencies": {
|
|
23
|
+
"@types/pg": "^8.11.10",
|
|
24
|
+
"tsup": "^8.5.1",
|
|
25
|
+
"vitest": "^4.0.17",
|
|
26
|
+
"@stepflowjs/storage-tests": "0.1.0"
|
|
27
|
+
},
|
|
28
|
+
"peerDependencies": {
|
|
29
|
+
"typescript": "^5.0.0"
|
|
30
|
+
},
|
|
31
|
+
"license": "MIT",
|
|
32
|
+
"author": "Stepflow Contributors",
|
|
33
|
+
"repository": {
|
|
34
|
+
"type": "git",
|
|
35
|
+
"url": "https://stepflow-production.up.railway.app",
|
|
36
|
+
"directory": "packages/storage/postgres"
|
|
37
|
+
},
|
|
38
|
+
"homepage": "https://stepflow-production.up.railway.app",
|
|
39
|
+
"bugs": {
|
|
40
|
+
"url": "https://stepflow-production.up.railway.app"
|
|
41
|
+
},
|
|
42
|
+
"keywords": [
|
|
43
|
+
"stepflow",
|
|
44
|
+
"storage",
|
|
45
|
+
"postgres",
|
|
46
|
+
"postgresql",
|
|
47
|
+
"adapter",
|
|
48
|
+
"workflow",
|
|
49
|
+
"orchestration"
|
|
50
|
+
],
|
|
51
|
+
"publishConfig": {
|
|
52
|
+
"access": "public"
|
|
53
|
+
},
|
|
54
|
+
"scripts": {
|
|
55
|
+
"build": "tsup",
|
|
56
|
+
"dev": "tsup --watch",
|
|
57
|
+
"typecheck": "tsc --noEmit",
|
|
58
|
+
"test": "vitest",
|
|
59
|
+
"clean": "rm -rf dist"
|
|
60
|
+
}
|
|
61
|
+
}
|