@stepflowjs/storage-mssql 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +34 -0
- package/dist/index.js +704 -0
- package/dist/index.js.map +1 -0
- package/package.json +61 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { StorageAdapter, QueueOperations, ExecutionOperations, EventOperations, LeaderOperations, RealtimeOperations } from '@stepflowjs/core/storage';
|
|
2
|
+
|
|
3
|
+
interface MssqlStorageOptions {
|
|
4
|
+
server: string;
|
|
5
|
+
database: string;
|
|
6
|
+
user: string;
|
|
7
|
+
password: string;
|
|
8
|
+
port?: number;
|
|
9
|
+
schema?: string;
|
|
10
|
+
poolSize?: number;
|
|
11
|
+
options?: {
|
|
12
|
+
encrypt?: boolean;
|
|
13
|
+
trustServerCertificate?: boolean;
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
declare class MssqlStorageAdapter implements StorageAdapter {
|
|
17
|
+
private pool;
|
|
18
|
+
private schema;
|
|
19
|
+
private emitter;
|
|
20
|
+
private connected;
|
|
21
|
+
private ownerId;
|
|
22
|
+
constructor(options: MssqlStorageOptions);
|
|
23
|
+
connect(): Promise<void>;
|
|
24
|
+
disconnect(): Promise<void>;
|
|
25
|
+
healthCheck(): Promise<boolean>;
|
|
26
|
+
private runMigrations;
|
|
27
|
+
queue: QueueOperations;
|
|
28
|
+
execution: ExecutionOperations;
|
|
29
|
+
events: EventOperations;
|
|
30
|
+
leader: LeaderOperations;
|
|
31
|
+
realtime: RealtimeOperations;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export { MssqlStorageAdapter, type MssqlStorageOptions, MssqlStorageAdapter as default };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,704 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import sql from "mssql";
|
|
3
|
+
import { EventEmitter } from "events";
|
|
4
|
+
var MssqlStorageAdapter = class {
|
|
5
|
+
pool;
|
|
6
|
+
schema;
|
|
7
|
+
emitter;
|
|
8
|
+
connected = false;
|
|
9
|
+
ownerId;
|
|
10
|
+
constructor(options) {
|
|
11
|
+
this.schema = options.schema ?? "stepflow";
|
|
12
|
+
this.ownerId = `${process.pid}-${Date.now()}`;
|
|
13
|
+
this.emitter = new EventEmitter();
|
|
14
|
+
this.emitter.setMaxListeners(0);
|
|
15
|
+
const config = {
|
|
16
|
+
server: options.server,
|
|
17
|
+
database: options.database,
|
|
18
|
+
user: options.user,
|
|
19
|
+
password: options.password,
|
|
20
|
+
port: options.port ?? 1433,
|
|
21
|
+
pool: {
|
|
22
|
+
max: options.poolSize ?? 20,
|
|
23
|
+
min: 0,
|
|
24
|
+
idleTimeoutMillis: 3e4
|
|
25
|
+
},
|
|
26
|
+
options: {
|
|
27
|
+
encrypt: options.options?.encrypt ?? true,
|
|
28
|
+
trustServerCertificate: options.options?.trustServerCertificate ?? false,
|
|
29
|
+
enableArithAbort: true
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
this.pool = new sql.ConnectionPool(config);
|
|
33
|
+
}
|
|
34
|
+
// ============================================================================
|
|
35
|
+
// Connection Lifecycle
|
|
36
|
+
// ============================================================================
|
|
37
|
+
async connect() {
|
|
38
|
+
await this.pool.connect();
|
|
39
|
+
await this.pool.request().query("SELECT 1");
|
|
40
|
+
await this.runMigrations();
|
|
41
|
+
this.connected = true;
|
|
42
|
+
}
|
|
43
|
+
async disconnect() {
|
|
44
|
+
try {
|
|
45
|
+
await this.pool.request().query(`EXEC sp_releaseapplock @Resource = 'stepflow_leader'`);
|
|
46
|
+
} catch {
|
|
47
|
+
}
|
|
48
|
+
await this.pool.close();
|
|
49
|
+
this.emitter.removeAllListeners();
|
|
50
|
+
this.connected = false;
|
|
51
|
+
}
|
|
52
|
+
async healthCheck() {
|
|
53
|
+
if (!this.connected) return false;
|
|
54
|
+
try {
|
|
55
|
+
await this.pool.request().query("SELECT 1");
|
|
56
|
+
return true;
|
|
57
|
+
} catch {
|
|
58
|
+
return false;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
// ============================================================================
|
|
62
|
+
// Schema Migrations
|
|
63
|
+
// ============================================================================
|
|
64
|
+
async runMigrations() {
|
|
65
|
+
await this.pool.request().query(`
|
|
66
|
+
IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '${this.schema}')
|
|
67
|
+
BEGIN
|
|
68
|
+
EXEC('CREATE SCHEMA ${this.schema}')
|
|
69
|
+
END
|
|
70
|
+
`);
|
|
71
|
+
await this.pool.request().query(`
|
|
72
|
+
IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'jobs' AND schema_id = SCHEMA_ID('${this.schema}'))
|
|
73
|
+
BEGIN
|
|
74
|
+
CREATE TABLE ${this.schema}.jobs (
|
|
75
|
+
id UNIQUEIDENTIFIER PRIMARY KEY,
|
|
76
|
+
workflow_id NVARCHAR(255) NOT NULL,
|
|
77
|
+
event_name NVARCHAR(255) NOT NULL,
|
|
78
|
+
payload NVARCHAR(MAX) NOT NULL,
|
|
79
|
+
metadata NVARCHAR(MAX) NOT NULL DEFAULT '{}',
|
|
80
|
+
priority INT NOT NULL DEFAULT 0,
|
|
81
|
+
attempts INT NOT NULL DEFAULT 0,
|
|
82
|
+
max_attempts INT NOT NULL DEFAULT 3,
|
|
83
|
+
scheduled_for DATETIME2,
|
|
84
|
+
locked_until DATETIME2,
|
|
85
|
+
locked_by NVARCHAR(255),
|
|
86
|
+
created_at DATETIME2 NOT NULL DEFAULT GETUTCDATE(),
|
|
87
|
+
updated_at DATETIME2 NOT NULL DEFAULT GETUTCDATE()
|
|
88
|
+
)
|
|
89
|
+
END
|
|
90
|
+
`);
|
|
91
|
+
await this.pool.request().query(`
|
|
92
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_jobs_workflow_id' AND object_id = OBJECT_ID('${this.schema}.jobs'))
|
|
93
|
+
BEGIN
|
|
94
|
+
CREATE INDEX idx_jobs_workflow_id ON ${this.schema}.jobs(workflow_id)
|
|
95
|
+
END
|
|
96
|
+
`);
|
|
97
|
+
await this.pool.request().query(`
|
|
98
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_jobs_scheduled_for' AND object_id = OBJECT_ID('${this.schema}.jobs'))
|
|
99
|
+
BEGIN
|
|
100
|
+
CREATE INDEX idx_jobs_scheduled_for ON ${this.schema}.jobs(scheduled_for) WHERE scheduled_for IS NOT NULL
|
|
101
|
+
END
|
|
102
|
+
`);
|
|
103
|
+
await this.pool.request().query(`
|
|
104
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_jobs_priority_created' AND object_id = OBJECT_ID('${this.schema}.jobs'))
|
|
105
|
+
BEGIN
|
|
106
|
+
CREATE INDEX idx_jobs_priority_created ON ${this.schema}.jobs(priority DESC, created_at ASC)
|
|
107
|
+
END
|
|
108
|
+
`);
|
|
109
|
+
await this.pool.request().query(`
|
|
110
|
+
IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'executions' AND schema_id = SCHEMA_ID('${this.schema}'))
|
|
111
|
+
BEGIN
|
|
112
|
+
CREATE TABLE ${this.schema}.executions (
|
|
113
|
+
id UNIQUEIDENTIFIER PRIMARY KEY,
|
|
114
|
+
run_id NVARCHAR(255) NOT NULL,
|
|
115
|
+
workflow_id NVARCHAR(255) NOT NULL,
|
|
116
|
+
workflow_version NVARCHAR(255),
|
|
117
|
+
event_name NVARCHAR(255) NOT NULL,
|
|
118
|
+
payload NVARCHAR(MAX) NOT NULL,
|
|
119
|
+
status NVARCHAR(50) NOT NULL,
|
|
120
|
+
result NVARCHAR(MAX),
|
|
121
|
+
error NVARCHAR(MAX),
|
|
122
|
+
metadata NVARCHAR(MAX) NOT NULL DEFAULT '{}',
|
|
123
|
+
attempt INT NOT NULL DEFAULT 1,
|
|
124
|
+
started_at DATETIME2 NOT NULL,
|
|
125
|
+
completed_at DATETIME2,
|
|
126
|
+
timeline NVARCHAR(MAX) NOT NULL DEFAULT '[]',
|
|
127
|
+
idempotency_key NVARCHAR(255)
|
|
128
|
+
)
|
|
129
|
+
END
|
|
130
|
+
`);
|
|
131
|
+
await this.pool.request().query(`
|
|
132
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_run_id' AND object_id = OBJECT_ID('${this.schema}.executions'))
|
|
133
|
+
BEGIN
|
|
134
|
+
CREATE INDEX idx_executions_run_id ON ${this.schema}.executions(run_id)
|
|
135
|
+
END
|
|
136
|
+
`);
|
|
137
|
+
await this.pool.request().query(`
|
|
138
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_workflow_id' AND object_id = OBJECT_ID('${this.schema}.executions'))
|
|
139
|
+
BEGIN
|
|
140
|
+
CREATE INDEX idx_executions_workflow_id ON ${this.schema}.executions(workflow_id)
|
|
141
|
+
END
|
|
142
|
+
`);
|
|
143
|
+
await this.pool.request().query(`
|
|
144
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_status' AND object_id = OBJECT_ID('${this.schema}.executions'))
|
|
145
|
+
BEGIN
|
|
146
|
+
CREATE INDEX idx_executions_status ON ${this.schema}.executions(status)
|
|
147
|
+
END
|
|
148
|
+
`);
|
|
149
|
+
await this.pool.request().query(`
|
|
150
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_idempotency' AND object_id = OBJECT_ID('${this.schema}.executions'))
|
|
151
|
+
BEGIN
|
|
152
|
+
CREATE INDEX idx_executions_idempotency ON ${this.schema}.executions(workflow_id, idempotency_key) WHERE idempotency_key IS NOT NULL
|
|
153
|
+
END
|
|
154
|
+
`);
|
|
155
|
+
await this.pool.request().query(`
|
|
156
|
+
IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'step_results' AND schema_id = SCHEMA_ID('${this.schema}'))
|
|
157
|
+
BEGIN
|
|
158
|
+
CREATE TABLE ${this.schema}.step_results (
|
|
159
|
+
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
|
160
|
+
execution_id UNIQUEIDENTIFIER NOT NULL,
|
|
161
|
+
step_name NVARCHAR(255) NOT NULL,
|
|
162
|
+
result NVARCHAR(MAX) NOT NULL,
|
|
163
|
+
started_at DATETIME2 NOT NULL,
|
|
164
|
+
completed_at DATETIME2 NOT NULL,
|
|
165
|
+
duration_ms INT NOT NULL,
|
|
166
|
+
CONSTRAINT uq_step_results_execution_step UNIQUE(execution_id, step_name),
|
|
167
|
+
CONSTRAINT fk_step_results_execution FOREIGN KEY (execution_id) REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE
|
|
168
|
+
)
|
|
169
|
+
END
|
|
170
|
+
`);
|
|
171
|
+
await this.pool.request().query(`
|
|
172
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_step_results_execution_id' AND object_id = OBJECT_ID('${this.schema}.step_results'))
|
|
173
|
+
BEGIN
|
|
174
|
+
CREATE INDEX idx_step_results_execution_id ON ${this.schema}.step_results(execution_id)
|
|
175
|
+
END
|
|
176
|
+
`);
|
|
177
|
+
await this.pool.request().query(`
|
|
178
|
+
IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'event_waiters' AND schema_id = SCHEMA_ID('${this.schema}'))
|
|
179
|
+
BEGIN
|
|
180
|
+
CREATE TABLE ${this.schema}.event_waiters (
|
|
181
|
+
id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),
|
|
182
|
+
event_id NVARCHAR(255) NOT NULL,
|
|
183
|
+
execution_id UNIQUEIDENTIFIER NOT NULL,
|
|
184
|
+
timeout_at DATETIME2,
|
|
185
|
+
created_at DATETIME2 NOT NULL DEFAULT GETUTCDATE(),
|
|
186
|
+
CONSTRAINT fk_event_waiters_execution FOREIGN KEY (execution_id) REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE
|
|
187
|
+
)
|
|
188
|
+
END
|
|
189
|
+
`);
|
|
190
|
+
await this.pool.request().query(`
|
|
191
|
+
IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_event_waiters_event_id' AND object_id = OBJECT_ID('${this.schema}.event_waiters'))
|
|
192
|
+
BEGIN
|
|
193
|
+
CREATE INDEX idx_event_waiters_event_id ON ${this.schema}.event_waiters(event_id)
|
|
194
|
+
END
|
|
195
|
+
`);
|
|
196
|
+
await this.pool.request().query(`
|
|
197
|
+
IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'leader_locks' AND schema_id = SCHEMA_ID('${this.schema}'))
|
|
198
|
+
BEGIN
|
|
199
|
+
CREATE TABLE ${this.schema}.leader_locks (
|
|
200
|
+
lock_id NVARCHAR(255) PRIMARY KEY,
|
|
201
|
+
owner NVARCHAR(255) NOT NULL,
|
|
202
|
+
expires_at DATETIME2 NOT NULL,
|
|
203
|
+
acquired_at DATETIME2 NOT NULL DEFAULT GETUTCDATE()
|
|
204
|
+
)
|
|
205
|
+
END
|
|
206
|
+
`);
|
|
207
|
+
}
|
|
208
|
+
// ============================================================================
|
|
209
|
+
// Queue Operations
|
|
210
|
+
// ============================================================================
|
|
211
|
+
queue = {
|
|
212
|
+
push: async (job) => {
|
|
213
|
+
await this.pool.request().input("id", sql.UniqueIdentifier, job.id).input("workflow_id", sql.NVarChar, job.workflowId).input("event_name", sql.NVarChar, job.eventName).input("payload", sql.NVarChar(sql.MAX), JSON.stringify(job.payload)).input("metadata", sql.NVarChar(sql.MAX), JSON.stringify(job.metadata)).input("priority", sql.Int, job.priority).input("attempts", sql.Int, job.attempts).input("max_attempts", sql.Int, job.maxAttempts).input("scheduled_for", sql.DateTime2, job.scheduledFor).input("created_at", sql.DateTime2, job.createdAt).query(`
|
|
214
|
+
INSERT INTO ${this.schema}.jobs (
|
|
215
|
+
id, workflow_id, event_name, payload, metadata,
|
|
216
|
+
priority, attempts, max_attempts, scheduled_for, created_at
|
|
217
|
+
) VALUES (
|
|
218
|
+
@id, @workflow_id, @event_name, @payload, @metadata,
|
|
219
|
+
@priority, @attempts, @max_attempts, @scheduled_for, @created_at
|
|
220
|
+
)
|
|
221
|
+
`);
|
|
222
|
+
return job.id;
|
|
223
|
+
},
|
|
224
|
+
pop: async (options) => {
|
|
225
|
+
const workerId = options?.workerId ?? `worker-${process.pid}`;
|
|
226
|
+
const lockDuration = options?.lockDuration ?? 3e4;
|
|
227
|
+
const result = await this.pool.request().input("worker_id", sql.NVarChar, workerId).input("lock_duration", sql.Int, lockDuration).query(`
|
|
228
|
+
WITH NextJob AS (
|
|
229
|
+
SELECT TOP 1 id
|
|
230
|
+
FROM ${this.schema}.jobs WITH (UPDLOCK, READPAST)
|
|
231
|
+
WHERE (scheduled_for IS NULL OR scheduled_for <= GETUTCDATE())
|
|
232
|
+
AND (locked_until IS NULL OR locked_until < GETUTCDATE())
|
|
233
|
+
ORDER BY priority DESC, created_at ASC
|
|
234
|
+
)
|
|
235
|
+
UPDATE j
|
|
236
|
+
SET locked_until = DATEADD(MILLISECOND, @lock_duration, GETUTCDATE()),
|
|
237
|
+
locked_by = @worker_id,
|
|
238
|
+
updated_at = GETUTCDATE()
|
|
239
|
+
OUTPUT
|
|
240
|
+
INSERTED.id,
|
|
241
|
+
INSERTED.workflow_id,
|
|
242
|
+
INSERTED.event_name,
|
|
243
|
+
INSERTED.payload,
|
|
244
|
+
INSERTED.metadata,
|
|
245
|
+
INSERTED.priority,
|
|
246
|
+
INSERTED.attempts,
|
|
247
|
+
INSERTED.max_attempts,
|
|
248
|
+
INSERTED.scheduled_for,
|
|
249
|
+
INSERTED.created_at
|
|
250
|
+
FROM ${this.schema}.jobs j
|
|
251
|
+
INNER JOIN NextJob ON j.id = NextJob.id
|
|
252
|
+
`);
|
|
253
|
+
if (result.recordset.length === 0) {
|
|
254
|
+
return null;
|
|
255
|
+
}
|
|
256
|
+
const row = result.recordset[0];
|
|
257
|
+
return {
|
|
258
|
+
id: row.id,
|
|
259
|
+
workflowId: row.workflow_id,
|
|
260
|
+
eventName: row.event_name,
|
|
261
|
+
payload: JSON.parse(row.payload),
|
|
262
|
+
metadata: JSON.parse(row.metadata),
|
|
263
|
+
priority: row.priority,
|
|
264
|
+
attempts: row.attempts,
|
|
265
|
+
maxAttempts: row.max_attempts,
|
|
266
|
+
scheduledFor: row.scheduled_for,
|
|
267
|
+
createdAt: row.created_at
|
|
268
|
+
};
|
|
269
|
+
},
|
|
270
|
+
ack: async (jobId) => {
|
|
271
|
+
await this.pool.request().input("id", sql.UniqueIdentifier, jobId).query(`DELETE FROM ${this.schema}.jobs WHERE id = @id`);
|
|
272
|
+
},
|
|
273
|
+
nack: async (jobId, options) => {
|
|
274
|
+
const delay = options?.delay ?? 0;
|
|
275
|
+
const scheduledFor = delay > 0 ? new Date(Date.now() + delay) : null;
|
|
276
|
+
await this.pool.request().input("id", sql.UniqueIdentifier, jobId).input("scheduled_for", sql.DateTime2, scheduledFor).query(`
|
|
277
|
+
UPDATE ${this.schema}.jobs
|
|
278
|
+
SET attempts = attempts + 1,
|
|
279
|
+
scheduled_for = @scheduled_for,
|
|
280
|
+
locked_until = NULL,
|
|
281
|
+
locked_by = NULL,
|
|
282
|
+
updated_at = GETUTCDATE()
|
|
283
|
+
WHERE id = @id
|
|
284
|
+
`);
|
|
285
|
+
},
|
|
286
|
+
schedule: async (job, executeAt) => {
|
|
287
|
+
await this.pool.request().input("id", sql.UniqueIdentifier, job.id).input("workflow_id", sql.NVarChar, job.workflowId).input("event_name", sql.NVarChar, job.eventName).input("payload", sql.NVarChar(sql.MAX), JSON.stringify(job.payload)).input("metadata", sql.NVarChar(sql.MAX), JSON.stringify(job.metadata)).input("priority", sql.Int, job.priority).input("attempts", sql.Int, job.attempts).input("max_attempts", sql.Int, job.maxAttempts).input("scheduled_for", sql.DateTime2, executeAt).input("created_at", sql.DateTime2, job.createdAt).query(`
|
|
288
|
+
INSERT INTO ${this.schema}.jobs (
|
|
289
|
+
id, workflow_id, event_name, payload, metadata,
|
|
290
|
+
priority, attempts, max_attempts, scheduled_for, created_at
|
|
291
|
+
) VALUES (
|
|
292
|
+
@id, @workflow_id, @event_name, @payload, @metadata,
|
|
293
|
+
@priority, @attempts, @max_attempts, @scheduled_for, @created_at
|
|
294
|
+
)
|
|
295
|
+
`);
|
|
296
|
+
return job.id;
|
|
297
|
+
},
|
|
298
|
+
getDelayed: async () => {
|
|
299
|
+
const result = await this.pool.request().query(`
|
|
300
|
+
SELECT * FROM ${this.schema}.jobs
|
|
301
|
+
WHERE scheduled_for IS NOT NULL AND scheduled_for > GETUTCDATE()
|
|
302
|
+
ORDER BY scheduled_for ASC
|
|
303
|
+
`);
|
|
304
|
+
return result.recordset.map((row) => ({
|
|
305
|
+
id: row.id,
|
|
306
|
+
workflowId: row.workflow_id,
|
|
307
|
+
eventName: row.event_name,
|
|
308
|
+
payload: JSON.parse(row.payload),
|
|
309
|
+
metadata: JSON.parse(row.metadata),
|
|
310
|
+
priority: row.priority,
|
|
311
|
+
attempts: row.attempts,
|
|
312
|
+
maxAttempts: row.max_attempts,
|
|
313
|
+
scheduledFor: row.scheduled_for,
|
|
314
|
+
createdAt: row.created_at
|
|
315
|
+
}));
|
|
316
|
+
}
|
|
317
|
+
};
|
|
318
|
+
// ============================================================================
|
|
319
|
+
// Execution Operations
|
|
320
|
+
// ============================================================================
|
|
321
|
+
execution = {
|
|
322
|
+
create: async (execution) => {
|
|
323
|
+
await this.pool.request().input("id", sql.UniqueIdentifier, execution.id).input("run_id", sql.NVarChar, execution.runId).input("workflow_id", sql.NVarChar, execution.workflowId).input("workflow_version", sql.NVarChar, execution.workflowVersion).input("event_name", sql.NVarChar, execution.eventName).input(
|
|
324
|
+
"payload",
|
|
325
|
+
sql.NVarChar(sql.MAX),
|
|
326
|
+
JSON.stringify(execution.payload)
|
|
327
|
+
).input("status", sql.NVarChar, execution.status).input(
|
|
328
|
+
"result",
|
|
329
|
+
sql.NVarChar(sql.MAX),
|
|
330
|
+
execution.result ? JSON.stringify(execution.result) : null
|
|
331
|
+
).input(
|
|
332
|
+
"error",
|
|
333
|
+
sql.NVarChar(sql.MAX),
|
|
334
|
+
execution.error ? JSON.stringify(execution.error) : null
|
|
335
|
+
).input(
|
|
336
|
+
"metadata",
|
|
337
|
+
sql.NVarChar(sql.MAX),
|
|
338
|
+
JSON.stringify(execution.metadata)
|
|
339
|
+
).input("attempt", sql.Int, execution.attempt).input("started_at", sql.DateTime2, execution.startedAt).input("completed_at", sql.DateTime2, execution.completedAt).input(
|
|
340
|
+
"timeline",
|
|
341
|
+
sql.NVarChar(sql.MAX),
|
|
342
|
+
JSON.stringify(execution.timeline)
|
|
343
|
+
).input(
|
|
344
|
+
"idempotency_key",
|
|
345
|
+
sql.NVarChar,
|
|
346
|
+
execution.metadata.idempotencyKey
|
|
347
|
+
).query(`
|
|
348
|
+
INSERT INTO ${this.schema}.executions (
|
|
349
|
+
id, run_id, workflow_id, workflow_version, event_name,
|
|
350
|
+
payload, status, result, error, metadata, attempt,
|
|
351
|
+
started_at, completed_at, timeline, idempotency_key
|
|
352
|
+
) VALUES (
|
|
353
|
+
@id, @run_id, @workflow_id, @workflow_version, @event_name,
|
|
354
|
+
@payload, @status, @result, @error, @metadata, @attempt,
|
|
355
|
+
@started_at, @completed_at, @timeline, @idempotency_key
|
|
356
|
+
)
|
|
357
|
+
`);
|
|
358
|
+
await this.realtime.publish(`execution:${execution.id}`, execution);
|
|
359
|
+
return execution.id;
|
|
360
|
+
},
|
|
361
|
+
get: async (executionId) => {
|
|
362
|
+
const result = await this.pool.request().input("id", sql.UniqueIdentifier, executionId).query(`
|
|
363
|
+
SELECT
|
|
364
|
+
e.*,
|
|
365
|
+
(
|
|
366
|
+
SELECT
|
|
367
|
+
sr.step_name as name,
|
|
368
|
+
'completed' as status,
|
|
369
|
+
JSON_QUERY(sr.result, '$.data') as result,
|
|
370
|
+
sr.started_at as startedAt,
|
|
371
|
+
sr.completed_at as completedAt,
|
|
372
|
+
sr.duration_ms as durationMs
|
|
373
|
+
FROM ${this.schema}.step_results sr
|
|
374
|
+
WHERE sr.execution_id = e.id
|
|
375
|
+
ORDER BY sr.started_at
|
|
376
|
+
FOR JSON PATH
|
|
377
|
+
) as steps
|
|
378
|
+
FROM ${this.schema}.executions e
|
|
379
|
+
WHERE e.id = @id
|
|
380
|
+
`);
|
|
381
|
+
if (result.recordset.length === 0) {
|
|
382
|
+
return null;
|
|
383
|
+
}
|
|
384
|
+
const row = result.recordset[0];
|
|
385
|
+
return {
|
|
386
|
+
id: row.id,
|
|
387
|
+
runId: row.run_id,
|
|
388
|
+
workflowId: row.workflow_id,
|
|
389
|
+
workflowVersion: row.workflow_version,
|
|
390
|
+
eventName: row.event_name,
|
|
391
|
+
payload: JSON.parse(row.payload),
|
|
392
|
+
status: row.status,
|
|
393
|
+
result: row.result ? JSON.parse(row.result) : void 0,
|
|
394
|
+
error: row.error ? JSON.parse(row.error) : void 0,
|
|
395
|
+
steps: row.steps ? JSON.parse(row.steps) : [],
|
|
396
|
+
metadata: JSON.parse(row.metadata),
|
|
397
|
+
attempt: row.attempt,
|
|
398
|
+
startedAt: row.started_at,
|
|
399
|
+
completedAt: row.completed_at,
|
|
400
|
+
timeline: JSON.parse(row.timeline)
|
|
401
|
+
};
|
|
402
|
+
},
|
|
403
|
+
getByIdempotencyKey: async (workflowId, idempotencyKey) => {
|
|
404
|
+
const result = await this.pool.request().input("workflow_id", sql.NVarChar, workflowId).input("idempotency_key", sql.NVarChar, idempotencyKey).query(`
|
|
405
|
+
SELECT TOP 1
|
|
406
|
+
e.*,
|
|
407
|
+
(
|
|
408
|
+
SELECT
|
|
409
|
+
sr.step_name as name,
|
|
410
|
+
'completed' as status,
|
|
411
|
+
JSON_QUERY(sr.result, '$.data') as result,
|
|
412
|
+
sr.started_at as startedAt,
|
|
413
|
+
sr.completed_at as completedAt,
|
|
414
|
+
sr.duration_ms as durationMs
|
|
415
|
+
FROM ${this.schema}.step_results sr
|
|
416
|
+
WHERE sr.execution_id = e.id
|
|
417
|
+
ORDER BY sr.started_at
|
|
418
|
+
FOR JSON PATH
|
|
419
|
+
) as steps
|
|
420
|
+
FROM ${this.schema}.executions e
|
|
421
|
+
WHERE e.workflow_id = @workflow_id AND e.idempotency_key = @idempotency_key
|
|
422
|
+
`);
|
|
423
|
+
if (result.recordset.length === 0) {
|
|
424
|
+
return null;
|
|
425
|
+
}
|
|
426
|
+
const row = result.recordset[0];
|
|
427
|
+
return {
|
|
428
|
+
id: row.id,
|
|
429
|
+
runId: row.run_id,
|
|
430
|
+
workflowId: row.workflow_id,
|
|
431
|
+
workflowVersion: row.workflow_version,
|
|
432
|
+
eventName: row.event_name,
|
|
433
|
+
payload: JSON.parse(row.payload),
|
|
434
|
+
status: row.status,
|
|
435
|
+
result: row.result ? JSON.parse(row.result) : void 0,
|
|
436
|
+
error: row.error ? JSON.parse(row.error) : void 0,
|
|
437
|
+
steps: row.steps ? JSON.parse(row.steps) : [],
|
|
438
|
+
metadata: JSON.parse(row.metadata),
|
|
439
|
+
attempt: row.attempt,
|
|
440
|
+
startedAt: row.started_at,
|
|
441
|
+
completedAt: row.completed_at,
|
|
442
|
+
timeline: JSON.parse(row.timeline)
|
|
443
|
+
};
|
|
444
|
+
},
|
|
445
|
+
update: async (executionId, updates) => {
|
|
446
|
+
const setClauses = [];
|
|
447
|
+
const request = this.pool.request();
|
|
448
|
+
request.input("id", sql.UniqueIdentifier, executionId);
|
|
449
|
+
if (updates.status !== void 0) {
|
|
450
|
+
setClauses.push("status = @status");
|
|
451
|
+
request.input("status", sql.NVarChar, updates.status);
|
|
452
|
+
}
|
|
453
|
+
if (updates.result !== void 0) {
|
|
454
|
+
setClauses.push("result = @result");
|
|
455
|
+
request.input(
|
|
456
|
+
"result",
|
|
457
|
+
sql.NVarChar(sql.MAX),
|
|
458
|
+
JSON.stringify(updates.result)
|
|
459
|
+
);
|
|
460
|
+
}
|
|
461
|
+
if (updates.error !== void 0) {
|
|
462
|
+
setClauses.push("error = @error");
|
|
463
|
+
request.input(
|
|
464
|
+
"error",
|
|
465
|
+
sql.NVarChar(sql.MAX),
|
|
466
|
+
JSON.stringify(updates.error)
|
|
467
|
+
);
|
|
468
|
+
}
|
|
469
|
+
if (updates.metadata !== void 0) {
|
|
470
|
+
setClauses.push(
|
|
471
|
+
"metadata = JSON_MODIFY(metadata, '$', JSON_QUERY(@metadata))"
|
|
472
|
+
);
|
|
473
|
+
request.input(
|
|
474
|
+
"metadata",
|
|
475
|
+
sql.NVarChar(sql.MAX),
|
|
476
|
+
JSON.stringify(updates.metadata)
|
|
477
|
+
);
|
|
478
|
+
}
|
|
479
|
+
if (updates.completedAt !== void 0) {
|
|
480
|
+
setClauses.push("completed_at = @completed_at");
|
|
481
|
+
request.input("completed_at", sql.DateTime2, updates.completedAt);
|
|
482
|
+
}
|
|
483
|
+
if (updates.timeline !== void 0) {
|
|
484
|
+
setClauses.push("timeline = @timeline");
|
|
485
|
+
request.input(
|
|
486
|
+
"timeline",
|
|
487
|
+
sql.NVarChar(sql.MAX),
|
|
488
|
+
JSON.stringify(updates.timeline)
|
|
489
|
+
);
|
|
490
|
+
}
|
|
491
|
+
if (setClauses.length === 0) {
|
|
492
|
+
return;
|
|
493
|
+
}
|
|
494
|
+
await request.query(`
|
|
495
|
+
UPDATE ${this.schema}.executions
|
|
496
|
+
SET ${setClauses.join(", ")}
|
|
497
|
+
WHERE id = @id
|
|
498
|
+
`);
|
|
499
|
+
const execution = await this.execution.get(executionId);
|
|
500
|
+
if (execution) {
|
|
501
|
+
await this.realtime.publish(`execution:${executionId}`, execution);
|
|
502
|
+
}
|
|
503
|
+
},
|
|
504
|
+
list: async (options) => {
|
|
505
|
+
const whereClauses = [];
|
|
506
|
+
const request = this.pool.request();
|
|
507
|
+
if (options.workflowId) {
|
|
508
|
+
whereClauses.push("e.workflow_id = @workflow_id");
|
|
509
|
+
request.input("workflow_id", sql.NVarChar, options.workflowId);
|
|
510
|
+
}
|
|
511
|
+
if (options.status) {
|
|
512
|
+
whereClauses.push("e.status = @status");
|
|
513
|
+
request.input("status", sql.NVarChar, options.status);
|
|
514
|
+
}
|
|
515
|
+
if (options.runId) {
|
|
516
|
+
whereClauses.push("e.run_id = @run_id");
|
|
517
|
+
request.input("run_id", sql.NVarChar, options.runId);
|
|
518
|
+
}
|
|
519
|
+
const whereClause = whereClauses.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : "";
|
|
520
|
+
const limit = options.limit ?? 50;
|
|
521
|
+
const offset = options.offset ?? 0;
|
|
522
|
+
const result = await request.query(`
|
|
523
|
+
SELECT
|
|
524
|
+
e.*,
|
|
525
|
+
(
|
|
526
|
+
SELECT
|
|
527
|
+
sr.step_name as name,
|
|
528
|
+
'completed' as status,
|
|
529
|
+
JSON_QUERY(sr.result, '$.data') as result,
|
|
530
|
+
sr.started_at as startedAt,
|
|
531
|
+
sr.completed_at as completedAt,
|
|
532
|
+
sr.duration_ms as durationMs
|
|
533
|
+
FROM ${this.schema}.step_results sr
|
|
534
|
+
WHERE sr.execution_id = e.id
|
|
535
|
+
ORDER BY sr.started_at
|
|
536
|
+
FOR JSON PATH
|
|
537
|
+
) as steps
|
|
538
|
+
FROM ${this.schema}.executions e
|
|
539
|
+
${whereClause}
|
|
540
|
+
ORDER BY e.started_at DESC
|
|
541
|
+
OFFSET ${offset} ROWS
|
|
542
|
+
FETCH NEXT ${limit} ROWS ONLY
|
|
543
|
+
`);
|
|
544
|
+
return result.recordset.map((row) => ({
|
|
545
|
+
id: row.id,
|
|
546
|
+
runId: row.run_id,
|
|
547
|
+
workflowId: row.workflow_id,
|
|
548
|
+
workflowVersion: row.workflow_version,
|
|
549
|
+
eventName: row.event_name,
|
|
550
|
+
payload: JSON.parse(row.payload),
|
|
551
|
+
status: row.status,
|
|
552
|
+
result: row.result ? JSON.parse(row.result) : void 0,
|
|
553
|
+
error: row.error ? JSON.parse(row.error) : void 0,
|
|
554
|
+
steps: row.steps ? JSON.parse(row.steps) : [],
|
|
555
|
+
metadata: JSON.parse(row.metadata),
|
|
556
|
+
attempt: row.attempt,
|
|
557
|
+
startedAt: row.started_at,
|
|
558
|
+
completedAt: row.completed_at,
|
|
559
|
+
timeline: JSON.parse(row.timeline)
|
|
560
|
+
}));
|
|
561
|
+
},
|
|
562
|
+
getStepResult: async (executionId, stepName) => {
|
|
563
|
+
const result = await this.pool.request().input("execution_id", sql.UniqueIdentifier, executionId).input("step_name", sql.NVarChar, stepName).query(`
|
|
564
|
+
SELECT result FROM ${this.schema}.step_results
|
|
565
|
+
WHERE execution_id = @execution_id AND step_name = @step_name
|
|
566
|
+
`);
|
|
567
|
+
if (result.recordset.length === 0) {
|
|
568
|
+
return null;
|
|
569
|
+
}
|
|
570
|
+
return JSON.parse(result.recordset[0].result);
|
|
571
|
+
},
|
|
572
|
+
saveStepResult: async (executionId, stepName, result) => {
|
|
573
|
+
await this.pool.request().input("execution_id", sql.UniqueIdentifier, executionId).input("step_name", sql.NVarChar, stepName).input("result", sql.NVarChar(sql.MAX), JSON.stringify(result)).input("started_at", sql.DateTime2, result.startedAt).input("completed_at", sql.DateTime2, result.completedAt).input("duration_ms", sql.Int, result.durationMs).query(`
|
|
574
|
+
MERGE ${this.schema}.step_results AS target
|
|
575
|
+
USING (SELECT @execution_id AS execution_id, @step_name AS step_name) AS source
|
|
576
|
+
ON target.execution_id = source.execution_id AND target.step_name = source.step_name
|
|
577
|
+
WHEN MATCHED THEN
|
|
578
|
+
UPDATE SET
|
|
579
|
+
result = @result,
|
|
580
|
+
started_at = @started_at,
|
|
581
|
+
completed_at = @completed_at,
|
|
582
|
+
duration_ms = @duration_ms
|
|
583
|
+
WHEN NOT MATCHED THEN
|
|
584
|
+
INSERT (execution_id, step_name, result, started_at, completed_at, duration_ms)
|
|
585
|
+
VALUES (@execution_id, @step_name, @result, @started_at, @completed_at, @duration_ms);
|
|
586
|
+
`);
|
|
587
|
+
}
|
|
588
|
+
};
|
|
589
|
+
// ============================================================================
|
|
590
|
+
// Event Operations
|
|
591
|
+
// ============================================================================
|
|
592
|
+
events = {
|
|
593
|
+
publish: async (eventId, data) => {
|
|
594
|
+
const result = await this.pool.request().input("event_id", sql.NVarChar, eventId).query(`
|
|
595
|
+
DELETE FROM ${this.schema}.event_waiters
|
|
596
|
+
OUTPUT DELETED.execution_id
|
|
597
|
+
WHERE event_id = @event_id
|
|
598
|
+
`);
|
|
599
|
+
for (const row of result.recordset) {
|
|
600
|
+
await this.realtime.publish(
|
|
601
|
+
`event:${eventId}:${row.execution_id}`,
|
|
602
|
+
data
|
|
603
|
+
);
|
|
604
|
+
}
|
|
605
|
+
return result.recordset.length;
|
|
606
|
+
},
|
|
607
|
+
subscribe: async (eventId, executionId, timeout) => {
|
|
608
|
+
await this.pool.request().input("event_id", sql.NVarChar, eventId).input("execution_id", sql.UniqueIdentifier, executionId).input("timeout_at", sql.DateTime2, timeout).query(`
|
|
609
|
+
INSERT INTO ${this.schema}.event_waiters (event_id, execution_id, timeout_at)
|
|
610
|
+
VALUES (@event_id, @execution_id, @timeout_at)
|
|
611
|
+
`);
|
|
612
|
+
},
|
|
613
|
+
getWaiters: async (eventId) => {
|
|
614
|
+
const result = await this.pool.request().input("event_id", sql.NVarChar, eventId).query(`
|
|
615
|
+
SELECT execution_id, timeout_at FROM ${this.schema}.event_waiters
|
|
616
|
+
WHERE event_id = @event_id
|
|
617
|
+
`);
|
|
618
|
+
return result.recordset.map((row) => ({
|
|
619
|
+
executionId: row.execution_id,
|
|
620
|
+
timeoutAt: row.timeout_at
|
|
621
|
+
}));
|
|
622
|
+
}
|
|
623
|
+
};
|
|
624
|
+
// ============================================================================
|
|
625
|
+
// Leader Operations (using sp_getapplock)
|
|
626
|
+
// ============================================================================
|
|
627
|
+
leader = {
|
|
628
|
+
acquire: async (lockId, ttlSeconds) => {
|
|
629
|
+
try {
|
|
630
|
+
const result = await this.pool.request().input("Resource", sql.NVarChar, lockId).input("LockMode", sql.NVarChar, "Exclusive").input("LockOwner", sql.NVarChar, "Session").input("LockTimeout", sql.Int, 0).query(`
|
|
631
|
+
DECLARE @result INT
|
|
632
|
+
EXEC @result = sp_getapplock
|
|
633
|
+
@Resource = @Resource,
|
|
634
|
+
@LockMode = @LockMode,
|
|
635
|
+
@LockOwner = @LockOwner,
|
|
636
|
+
@LockTimeout = @LockTimeout
|
|
637
|
+
SELECT @result as lock_result
|
|
638
|
+
`);
|
|
639
|
+
const lockResult = result.recordset[0].lock_result;
|
|
640
|
+
const acquired = lockResult >= 0;
|
|
641
|
+
if (acquired) {
|
|
642
|
+
const expiresAt = new Date(Date.now() + ttlSeconds * 1e3);
|
|
643
|
+
await this.pool.request().input("lock_id", sql.NVarChar, lockId).input("owner", sql.NVarChar, this.ownerId).input("expires_at", sql.DateTime2, expiresAt).query(`
|
|
644
|
+
MERGE ${this.schema}.leader_locks AS target
|
|
645
|
+
USING (SELECT @lock_id AS lock_id) AS source
|
|
646
|
+
ON target.lock_id = source.lock_id
|
|
647
|
+
WHEN MATCHED THEN
|
|
648
|
+
UPDATE SET owner = @owner, expires_at = @expires_at, acquired_at = GETUTCDATE()
|
|
649
|
+
WHEN NOT MATCHED THEN
|
|
650
|
+
INSERT (lock_id, owner, expires_at)
|
|
651
|
+
VALUES (@lock_id, @owner, @expires_at);
|
|
652
|
+
`);
|
|
653
|
+
}
|
|
654
|
+
return acquired;
|
|
655
|
+
} catch (error) {
|
|
656
|
+
console.error("Failed to acquire leader lock:", error);
|
|
657
|
+
return false;
|
|
658
|
+
}
|
|
659
|
+
},
|
|
660
|
+
release: async (lockId) => {
|
|
661
|
+
try {
|
|
662
|
+
await this.pool.request().input("Resource", sql.NVarChar, lockId).input("LockOwner", sql.NVarChar, "Session").query(`
|
|
663
|
+
EXEC sp_releaseapplock
|
|
664
|
+
@Resource = @Resource,
|
|
665
|
+
@LockOwner = @LockOwner
|
|
666
|
+
`);
|
|
667
|
+
await this.pool.request().input("lock_id", sql.NVarChar, lockId).query(
|
|
668
|
+
`DELETE FROM ${this.schema}.leader_locks WHERE lock_id = @lock_id`
|
|
669
|
+
);
|
|
670
|
+
} catch (error) {
|
|
671
|
+
console.error("Failed to release leader lock:", error);
|
|
672
|
+
}
|
|
673
|
+
},
|
|
674
|
+
renew: async (lockId, ttlSeconds) => {
|
|
675
|
+
const expiresAt = new Date(Date.now() + ttlSeconds * 1e3);
|
|
676
|
+
const result = await this.pool.request().input("lock_id", sql.NVarChar, lockId).input("expires_at", sql.DateTime2, expiresAt).query(`
|
|
677
|
+
UPDATE ${this.schema}.leader_locks
|
|
678
|
+
SET expires_at = @expires_at
|
|
679
|
+
WHERE lock_id = @lock_id
|
|
680
|
+
`);
|
|
681
|
+
return result.rowsAffected[0] > 0;
|
|
682
|
+
}
|
|
683
|
+
};
|
|
684
|
+
// ============================================================================
|
|
685
|
+
// Realtime Operations (using EventEmitter - MSSQL has no native pub/sub)
|
|
686
|
+
// ============================================================================
|
|
687
|
+
realtime = {
|
|
688
|
+
subscribe: (channel, callback) => {
|
|
689
|
+
this.emitter.on(channel, callback);
|
|
690
|
+
return () => {
|
|
691
|
+
this.emitter.off(channel, callback);
|
|
692
|
+
};
|
|
693
|
+
},
|
|
694
|
+
publish: async (channel, data) => {
|
|
695
|
+
this.emitter.emit(channel, data);
|
|
696
|
+
}
|
|
697
|
+
};
|
|
698
|
+
};
|
|
699
|
+
var index_default = MssqlStorageAdapter;
|
|
700
|
+
export {
|
|
701
|
+
MssqlStorageAdapter,
|
|
702
|
+
index_default as default
|
|
703
|
+
};
|
|
704
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["// ============================================================================\n// Microsoft SQL Server Storage Adapter\n// Enterprise-grade storage with ACID guarantees\n// ============================================================================\n\nimport sql from \"mssql\";\nimport { EventEmitter } from \"events\";\nimport type {\n StorageAdapter,\n QueueOperations,\n ExecutionOperations,\n EventOperations,\n LeaderOperations,\n RealtimeOperations,\n} from \"@stepflowjs/core/storage\";\nimport type {\n QueueJob,\n Execution,\n StepResult,\n EventWaiter,\n ListOptions,\n PopOptions,\n NackOptions,\n Unsubscribe,\n} from \"@stepflowjs/core\";\n\n// ============================================================================\n// Configuration Types\n// ============================================================================\n\nexport interface MssqlStorageOptions {\n server: string;\n database: string;\n user: string;\n password: string;\n port?: number;\n schema?: string;\n poolSize?: number;\n options?: {\n encrypt?: boolean;\n trustServerCertificate?: boolean;\n };\n}\n\n// ============================================================================\n// MSSQL Storage Adapter\n// ============================================================================\n\nexport class MssqlStorageAdapter implements StorageAdapter {\n private pool: sql.ConnectionPool;\n private schema: string;\n private emitter: EventEmitter;\n private connected = false;\n private ownerId: string;\n\n constructor(options: MssqlStorageOptions) {\n this.schema = options.schema ?? \"stepflow\";\n this.ownerId = `${process.pid}-${Date.now()}`;\n this.emitter = new EventEmitter();\n this.emitter.setMaxListeners(0); // Unlimited listeners\n\n const config: sql.config = {\n server: options.server,\n database: options.database,\n user: options.user,\n password: options.password,\n port: options.port ?? 1433,\n pool: {\n max: options.poolSize ?? 20,\n min: 0,\n idleTimeoutMillis: 30000,\n },\n options: {\n encrypt: options.options?.encrypt ?? true,\n trustServerCertificate:\n options.options?.trustServerCertificate ?? false,\n enableArithAbort: true,\n },\n };\n\n this.pool = new sql.ConnectionPool(config);\n }\n\n // ============================================================================\n // Connection Lifecycle\n // ============================================================================\n\n async connect(): Promise<void> {\n await this.pool.connect();\n\n // Test connection\n await this.pool.request().query(\"SELECT 1\");\n\n // Run migrations\n await this.runMigrations();\n\n this.connected = true;\n }\n\n async disconnect(): Promise<void> {\n // Release all locks\n try {\n await this.pool\n .request()\n .query(`EXEC sp_releaseapplock @Resource = 'stepflow_leader'`);\n } catch {\n // Ignore errors on disconnect\n }\n\n await this.pool.close();\n this.emitter.removeAllListeners();\n this.connected = false;\n }\n\n async healthCheck(): Promise<boolean> {\n if (!this.connected) return false;\n try {\n await this.pool.request().query(\"SELECT 1\");\n return true;\n } catch {\n return false;\n }\n }\n\n // ============================================================================\n // Schema Migrations\n // ============================================================================\n\n private async runMigrations(): Promise<void> {\n // Create schema\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '${this.schema}')\n BEGIN\n EXEC('CREATE SCHEMA ${this.schema}')\n END\n `);\n\n // Jobs table\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'jobs' AND schema_id = SCHEMA_ID('${this.schema}'))\n BEGIN\n CREATE TABLE ${this.schema}.jobs (\n id UNIQUEIDENTIFIER PRIMARY KEY,\n workflow_id NVARCHAR(255) NOT NULL,\n event_name NVARCHAR(255) NOT NULL,\n payload NVARCHAR(MAX) NOT NULL,\n metadata NVARCHAR(MAX) NOT NULL DEFAULT '{}',\n priority INT NOT NULL DEFAULT 0,\n attempts INT NOT NULL DEFAULT 0,\n max_attempts INT NOT NULL DEFAULT 3,\n scheduled_for DATETIME2,\n locked_until DATETIME2,\n locked_by NVARCHAR(255),\n created_at DATETIME2 NOT NULL DEFAULT GETUTCDATE(),\n updated_at DATETIME2 NOT NULL DEFAULT GETUTCDATE()\n )\n END\n `);\n\n // Jobs indexes\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_jobs_workflow_id' AND object_id = OBJECT_ID('${this.schema}.jobs'))\n BEGIN\n CREATE INDEX idx_jobs_workflow_id ON ${this.schema}.jobs(workflow_id)\n END\n `);\n\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_jobs_scheduled_for' AND object_id = OBJECT_ID('${this.schema}.jobs'))\n BEGIN\n CREATE INDEX idx_jobs_scheduled_for ON ${this.schema}.jobs(scheduled_for) WHERE scheduled_for IS NOT NULL\n END\n `);\n\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_jobs_priority_created' AND object_id = OBJECT_ID('${this.schema}.jobs'))\n BEGIN\n CREATE INDEX idx_jobs_priority_created ON ${this.schema}.jobs(priority DESC, created_at ASC)\n END\n `);\n\n // Executions table\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'executions' AND schema_id = SCHEMA_ID('${this.schema}'))\n BEGIN\n CREATE TABLE ${this.schema}.executions (\n id UNIQUEIDENTIFIER PRIMARY KEY,\n run_id NVARCHAR(255) NOT NULL,\n workflow_id NVARCHAR(255) NOT NULL,\n workflow_version NVARCHAR(255),\n event_name NVARCHAR(255) NOT NULL,\n payload NVARCHAR(MAX) NOT NULL,\n status NVARCHAR(50) NOT NULL,\n result NVARCHAR(MAX),\n error NVARCHAR(MAX),\n metadata NVARCHAR(MAX) NOT NULL DEFAULT '{}',\n attempt INT NOT NULL DEFAULT 1,\n started_at DATETIME2 NOT NULL,\n completed_at DATETIME2,\n timeline NVARCHAR(MAX) NOT NULL DEFAULT '[]',\n idempotency_key NVARCHAR(255)\n )\n END\n `);\n\n // Executions indexes\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_run_id' AND object_id = OBJECT_ID('${this.schema}.executions'))\n BEGIN\n CREATE INDEX idx_executions_run_id ON ${this.schema}.executions(run_id)\n END\n `);\n\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_workflow_id' AND object_id = OBJECT_ID('${this.schema}.executions'))\n BEGIN\n CREATE INDEX idx_executions_workflow_id ON ${this.schema}.executions(workflow_id)\n END\n `);\n\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_status' AND object_id = OBJECT_ID('${this.schema}.executions'))\n BEGIN\n CREATE INDEX idx_executions_status ON ${this.schema}.executions(status)\n END\n `);\n\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_executions_idempotency' AND object_id = OBJECT_ID('${this.schema}.executions'))\n BEGIN\n CREATE INDEX idx_executions_idempotency ON ${this.schema}.executions(workflow_id, idempotency_key) WHERE idempotency_key IS NOT NULL\n END\n `);\n\n // Step results table\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'step_results' AND schema_id = SCHEMA_ID('${this.schema}'))\n BEGIN\n CREATE TABLE ${this.schema}.step_results (\n id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),\n execution_id UNIQUEIDENTIFIER NOT NULL,\n step_name NVARCHAR(255) NOT NULL,\n result NVARCHAR(MAX) NOT NULL,\n started_at DATETIME2 NOT NULL,\n completed_at DATETIME2 NOT NULL,\n duration_ms INT NOT NULL,\n CONSTRAINT uq_step_results_execution_step UNIQUE(execution_id, step_name),\n CONSTRAINT fk_step_results_execution FOREIGN KEY (execution_id) REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE\n )\n END\n `);\n\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_step_results_execution_id' AND object_id = OBJECT_ID('${this.schema}.step_results'))\n BEGIN\n CREATE INDEX idx_step_results_execution_id ON ${this.schema}.step_results(execution_id)\n END\n `);\n\n // Event waiters table\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'event_waiters' AND schema_id = SCHEMA_ID('${this.schema}'))\n BEGIN\n CREATE TABLE ${this.schema}.event_waiters (\n id UNIQUEIDENTIFIER PRIMARY KEY DEFAULT NEWID(),\n event_id NVARCHAR(255) NOT NULL,\n execution_id UNIQUEIDENTIFIER NOT NULL,\n timeout_at DATETIME2,\n created_at DATETIME2 NOT NULL DEFAULT GETUTCDATE(),\n CONSTRAINT fk_event_waiters_execution FOREIGN KEY (execution_id) REFERENCES ${this.schema}.executions(id) ON DELETE CASCADE\n )\n END\n `);\n\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.indexes WHERE name = 'idx_event_waiters_event_id' AND object_id = OBJECT_ID('${this.schema}.event_waiters'))\n BEGIN\n CREATE INDEX idx_event_waiters_event_id ON ${this.schema}.event_waiters(event_id)\n END\n `);\n\n // Leader locks table\n await this.pool.request().query(`\n IF NOT EXISTS (SELECT * FROM sys.tables WHERE name = 'leader_locks' AND schema_id = SCHEMA_ID('${this.schema}'))\n BEGIN\n CREATE TABLE ${this.schema}.leader_locks (\n lock_id NVARCHAR(255) PRIMARY KEY,\n owner NVARCHAR(255) NOT NULL,\n expires_at DATETIME2 NOT NULL,\n acquired_at DATETIME2 NOT NULL DEFAULT GETUTCDATE()\n )\n END\n `);\n }\n\n // ============================================================================\n // Queue Operations\n // ============================================================================\n\n queue: QueueOperations = {\n push: async (job: QueueJob): Promise<string> => {\n await this.pool\n .request()\n .input(\"id\", sql.UniqueIdentifier, job.id)\n .input(\"workflow_id\", sql.NVarChar, job.workflowId)\n .input(\"event_name\", sql.NVarChar, job.eventName)\n .input(\"payload\", sql.NVarChar(sql.MAX), JSON.stringify(job.payload))\n .input(\"metadata\", sql.NVarChar(sql.MAX), JSON.stringify(job.metadata))\n .input(\"priority\", sql.Int, job.priority)\n .input(\"attempts\", sql.Int, job.attempts)\n .input(\"max_attempts\", sql.Int, job.maxAttempts)\n .input(\"scheduled_for\", sql.DateTime2, job.scheduledFor)\n .input(\"created_at\", sql.DateTime2, job.createdAt).query(`\n INSERT INTO ${this.schema}.jobs (\n id, workflow_id, event_name, payload, metadata, \n priority, attempts, max_attempts, scheduled_for, created_at\n ) VALUES (\n @id, @workflow_id, @event_name, @payload, @metadata,\n @priority, @attempts, @max_attempts, @scheduled_for, @created_at\n )\n `);\n return job.id;\n },\n\n pop: async (options?: PopOptions): Promise<QueueJob | null> => {\n const workerId = options?.workerId ?? `worker-${process.pid}`;\n const lockDuration = options?.lockDuration ?? 30000; // 30 seconds\n\n const result = await this.pool\n .request()\n .input(\"worker_id\", sql.NVarChar, workerId)\n .input(\"lock_duration\", sql.Int, lockDuration).query(`\n WITH NextJob AS (\n SELECT TOP 1 id\n FROM ${this.schema}.jobs WITH (UPDLOCK, READPAST)\n WHERE (scheduled_for IS NULL OR scheduled_for <= GETUTCDATE())\n AND (locked_until IS NULL OR locked_until < GETUTCDATE())\n ORDER BY priority DESC, created_at ASC\n )\n UPDATE j\n SET locked_until = DATEADD(MILLISECOND, @lock_duration, GETUTCDATE()),\n locked_by = @worker_id,\n updated_at = GETUTCDATE()\n OUTPUT \n INSERTED.id,\n INSERTED.workflow_id,\n INSERTED.event_name,\n INSERTED.payload,\n INSERTED.metadata,\n INSERTED.priority,\n INSERTED.attempts,\n INSERTED.max_attempts,\n INSERTED.scheduled_for,\n INSERTED.created_at\n FROM ${this.schema}.jobs j\n INNER JOIN NextJob ON j.id = NextJob.id\n `);\n\n if (result.recordset.length === 0) {\n return null;\n }\n\n const row = result.recordset[0];\n return {\n id: row.id,\n workflowId: row.workflow_id,\n eventName: row.event_name,\n payload: JSON.parse(row.payload),\n metadata: JSON.parse(row.metadata),\n priority: row.priority,\n attempts: row.attempts,\n maxAttempts: row.max_attempts,\n scheduledFor: row.scheduled_for,\n createdAt: row.created_at,\n };\n },\n\n ack: async (jobId: string): Promise<void> => {\n await this.pool\n .request()\n .input(\"id\", sql.UniqueIdentifier, jobId)\n .query(`DELETE FROM ${this.schema}.jobs WHERE id = @id`);\n },\n\n nack: async (jobId: string, options?: NackOptions): Promise<void> => {\n const delay = options?.delay ?? 0;\n const scheduledFor = delay > 0 ? new Date(Date.now() + delay) : null;\n\n await this.pool\n .request()\n .input(\"id\", sql.UniqueIdentifier, jobId)\n .input(\"scheduled_for\", sql.DateTime2, scheduledFor).query(`\n UPDATE ${this.schema}.jobs\n SET attempts = attempts + 1,\n scheduled_for = @scheduled_for,\n locked_until = NULL,\n locked_by = NULL,\n updated_at = GETUTCDATE()\n WHERE id = @id\n `);\n },\n\n schedule: async (job: QueueJob, executeAt: Date): Promise<string> => {\n await this.pool\n .request()\n .input(\"id\", sql.UniqueIdentifier, job.id)\n .input(\"workflow_id\", sql.NVarChar, job.workflowId)\n .input(\"event_name\", sql.NVarChar, job.eventName)\n .input(\"payload\", sql.NVarChar(sql.MAX), JSON.stringify(job.payload))\n .input(\"metadata\", sql.NVarChar(sql.MAX), JSON.stringify(job.metadata))\n .input(\"priority\", sql.Int, job.priority)\n .input(\"attempts\", sql.Int, job.attempts)\n .input(\"max_attempts\", sql.Int, job.maxAttempts)\n .input(\"scheduled_for\", sql.DateTime2, executeAt)\n .input(\"created_at\", sql.DateTime2, job.createdAt).query(`\n INSERT INTO ${this.schema}.jobs (\n id, workflow_id, event_name, payload, metadata, \n priority, attempts, max_attempts, scheduled_for, created_at\n ) VALUES (\n @id, @workflow_id, @event_name, @payload, @metadata,\n @priority, @attempts, @max_attempts, @scheduled_for, @created_at\n )\n `);\n return job.id;\n },\n\n getDelayed: async (): Promise<QueueJob[]> => {\n const result = await this.pool.request().query(`\n SELECT * FROM ${this.schema}.jobs\n WHERE scheduled_for IS NOT NULL AND scheduled_for > GETUTCDATE()\n ORDER BY scheduled_for ASC\n `);\n\n return result.recordset.map((row: sql.IRecordSet<any>[number]) => ({\n id: row.id,\n workflowId: row.workflow_id,\n eventName: row.event_name,\n payload: JSON.parse(row.payload),\n metadata: JSON.parse(row.metadata),\n priority: row.priority,\n attempts: row.attempts,\n maxAttempts: row.max_attempts,\n scheduledFor: row.scheduled_for,\n createdAt: row.created_at,\n }));\n },\n };\n\n // ============================================================================\n // Execution Operations\n // ============================================================================\n\n execution: ExecutionOperations = {\n create: async (execution: Execution): Promise<string> => {\n await this.pool\n .request()\n .input(\"id\", sql.UniqueIdentifier, execution.id)\n .input(\"run_id\", sql.NVarChar, execution.runId)\n .input(\"workflow_id\", sql.NVarChar, execution.workflowId)\n .input(\"workflow_version\", sql.NVarChar, execution.workflowVersion)\n .input(\"event_name\", sql.NVarChar, execution.eventName)\n .input(\n \"payload\",\n sql.NVarChar(sql.MAX),\n JSON.stringify(execution.payload),\n )\n .input(\"status\", sql.NVarChar, execution.status)\n .input(\n \"result\",\n sql.NVarChar(sql.MAX),\n execution.result ? JSON.stringify(execution.result) : null,\n )\n .input(\n \"error\",\n sql.NVarChar(sql.MAX),\n execution.error ? JSON.stringify(execution.error) : null,\n )\n .input(\n \"metadata\",\n sql.NVarChar(sql.MAX),\n JSON.stringify(execution.metadata),\n )\n .input(\"attempt\", sql.Int, execution.attempt)\n .input(\"started_at\", sql.DateTime2, execution.startedAt)\n .input(\"completed_at\", sql.DateTime2, execution.completedAt)\n .input(\n \"timeline\",\n sql.NVarChar(sql.MAX),\n JSON.stringify(execution.timeline),\n )\n .input(\n \"idempotency_key\",\n sql.NVarChar,\n execution.metadata.idempotencyKey as string | undefined,\n ).query(`\n INSERT INTO ${this.schema}.executions (\n id, run_id, workflow_id, workflow_version, event_name, \n payload, status, result, error, metadata, attempt, \n started_at, completed_at, timeline, idempotency_key\n ) VALUES (\n @id, @run_id, @workflow_id, @workflow_version, @event_name,\n @payload, @status, @result, @error, @metadata, @attempt,\n @started_at, @completed_at, @timeline, @idempotency_key\n )\n `);\n\n // Emit realtime event\n await this.realtime.publish(`execution:${execution.id}`, execution);\n\n return execution.id;\n },\n\n get: async (executionId: string): Promise<Execution | null> => {\n const result = await this.pool\n .request()\n .input(\"id\", sql.UniqueIdentifier, executionId).query(`\n SELECT \n e.*,\n (\n SELECT \n sr.step_name as name,\n 'completed' as status,\n JSON_QUERY(sr.result, '$.data') as result,\n sr.started_at as startedAt,\n sr.completed_at as completedAt,\n sr.duration_ms as durationMs\n FROM ${this.schema}.step_results sr\n WHERE sr.execution_id = e.id\n ORDER BY sr.started_at\n FOR JSON PATH\n ) as steps\n FROM ${this.schema}.executions e\n WHERE e.id = @id\n `);\n\n if (result.recordset.length === 0) {\n return null;\n }\n\n const row = result.recordset[0];\n return {\n id: row.id,\n runId: row.run_id,\n workflowId: row.workflow_id,\n workflowVersion: row.workflow_version,\n eventName: row.event_name,\n payload: JSON.parse(row.payload),\n status: row.status,\n result: row.result ? JSON.parse(row.result) : undefined,\n error: row.error ? JSON.parse(row.error) : undefined,\n steps: row.steps ? JSON.parse(row.steps) : [],\n metadata: JSON.parse(row.metadata),\n attempt: row.attempt,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n timeline: JSON.parse(row.timeline),\n };\n },\n\n getByIdempotencyKey: async (\n workflowId: string,\n idempotencyKey: string,\n ): Promise<Execution | null> => {\n const result = await this.pool\n .request()\n .input(\"workflow_id\", sql.NVarChar, workflowId)\n .input(\"idempotency_key\", sql.NVarChar, idempotencyKey).query(`\n SELECT TOP 1\n e.*,\n (\n SELECT \n sr.step_name as name,\n 'completed' as status,\n JSON_QUERY(sr.result, '$.data') as result,\n sr.started_at as startedAt,\n sr.completed_at as completedAt,\n sr.duration_ms as durationMs\n FROM ${this.schema}.step_results sr\n WHERE sr.execution_id = e.id\n ORDER BY sr.started_at\n FOR JSON PATH\n ) as steps\n FROM ${this.schema}.executions e\n WHERE e.workflow_id = @workflow_id AND e.idempotency_key = @idempotency_key\n `);\n\n if (result.recordset.length === 0) {\n return null;\n }\n\n const row = result.recordset[0];\n return {\n id: row.id,\n runId: row.run_id,\n workflowId: row.workflow_id,\n workflowVersion: row.workflow_version,\n eventName: row.event_name,\n payload: JSON.parse(row.payload),\n status: row.status,\n result: row.result ? JSON.parse(row.result) : undefined,\n error: row.error ? JSON.parse(row.error) : undefined,\n steps: row.steps ? JSON.parse(row.steps) : [],\n metadata: JSON.parse(row.metadata),\n attempt: row.attempt,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n timeline: JSON.parse(row.timeline),\n };\n },\n\n update: async (\n executionId: string,\n updates: Partial<Execution>,\n ): Promise<void> => {\n const setClauses: string[] = [];\n const request = this.pool.request();\n request.input(\"id\", sql.UniqueIdentifier, executionId);\n\n if (updates.status !== undefined) {\n setClauses.push(\"status = @status\");\n request.input(\"status\", sql.NVarChar, updates.status);\n }\n\n if (updates.result !== undefined) {\n setClauses.push(\"result = @result\");\n request.input(\n \"result\",\n sql.NVarChar(sql.MAX),\n JSON.stringify(updates.result),\n );\n }\n\n if (updates.error !== undefined) {\n setClauses.push(\"error = @error\");\n request.input(\n \"error\",\n sql.NVarChar(sql.MAX),\n JSON.stringify(updates.error),\n );\n }\n\n if (updates.metadata !== undefined) {\n setClauses.push(\n \"metadata = JSON_MODIFY(metadata, '$', JSON_QUERY(@metadata))\",\n );\n request.input(\n \"metadata\",\n sql.NVarChar(sql.MAX),\n JSON.stringify(updates.metadata),\n );\n }\n\n if (updates.completedAt !== undefined) {\n setClauses.push(\"completed_at = @completed_at\");\n request.input(\"completed_at\", sql.DateTime2, updates.completedAt);\n }\n\n if (updates.timeline !== undefined) {\n setClauses.push(\"timeline = @timeline\");\n request.input(\n \"timeline\",\n sql.NVarChar(sql.MAX),\n JSON.stringify(updates.timeline),\n );\n }\n\n if (setClauses.length === 0) {\n return;\n }\n\n await request.query(`\n UPDATE ${this.schema}.executions\n SET ${setClauses.join(\", \")}\n WHERE id = @id\n `);\n\n // Emit realtime event\n const execution = await this.execution.get(executionId);\n if (execution) {\n await this.realtime.publish(`execution:${executionId}`, execution);\n }\n },\n\n list: async (options: ListOptions): Promise<Execution[]> => {\n const whereClauses: string[] = [];\n const request = this.pool.request();\n\n if (options.workflowId) {\n whereClauses.push(\"e.workflow_id = @workflow_id\");\n request.input(\"workflow_id\", sql.NVarChar, options.workflowId);\n }\n\n if (options.status) {\n whereClauses.push(\"e.status = @status\");\n request.input(\"status\", sql.NVarChar, options.status);\n }\n\n if (options.runId) {\n whereClauses.push(\"e.run_id = @run_id\");\n request.input(\"run_id\", sql.NVarChar, options.runId);\n }\n\n const whereClause =\n whereClauses.length > 0 ? `WHERE ${whereClauses.join(\" AND \")}` : \"\";\n\n const limit = options.limit ?? 50;\n const offset = options.offset ?? 0;\n\n const result = await request.query(`\n SELECT \n e.*,\n (\n SELECT \n sr.step_name as name,\n 'completed' as status,\n JSON_QUERY(sr.result, '$.data') as result,\n sr.started_at as startedAt,\n sr.completed_at as completedAt,\n sr.duration_ms as durationMs\n FROM ${this.schema}.step_results sr\n WHERE sr.execution_id = e.id\n ORDER BY sr.started_at\n FOR JSON PATH\n ) as steps\n FROM ${this.schema}.executions e\n ${whereClause}\n ORDER BY e.started_at DESC\n OFFSET ${offset} ROWS\n FETCH NEXT ${limit} ROWS ONLY\n `);\n\n return result.recordset.map((row: sql.IRecordSet<any>[number]) => ({\n id: row.id,\n runId: row.run_id,\n workflowId: row.workflow_id,\n workflowVersion: row.workflow_version,\n eventName: row.event_name,\n payload: JSON.parse(row.payload),\n status: row.status,\n result: row.result ? JSON.parse(row.result) : undefined,\n error: row.error ? JSON.parse(row.error) : undefined,\n steps: row.steps ? JSON.parse(row.steps) : [],\n metadata: JSON.parse(row.metadata),\n attempt: row.attempt,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n timeline: JSON.parse(row.timeline),\n }));\n },\n\n getStepResult: async <T = unknown>(\n executionId: string,\n stepName: string,\n ): Promise<StepResult<T> | null> => {\n const result = await this.pool\n .request()\n .input(\"execution_id\", sql.UniqueIdentifier, executionId)\n .input(\"step_name\", sql.NVarChar, stepName).query(`\n SELECT result FROM ${this.schema}.step_results\n WHERE execution_id = @execution_id AND step_name = @step_name\n `);\n\n if (result.recordset.length === 0) {\n return null;\n }\n\n return JSON.parse(result.recordset[0].result) as StepResult<T>;\n },\n\n saveStepResult: async (\n executionId: string,\n stepName: string,\n result: StepResult,\n ): Promise<void> => {\n await this.pool\n .request()\n .input(\"execution_id\", sql.UniqueIdentifier, executionId)\n .input(\"step_name\", sql.NVarChar, stepName)\n .input(\"result\", sql.NVarChar(sql.MAX), JSON.stringify(result))\n .input(\"started_at\", sql.DateTime2, result.startedAt)\n .input(\"completed_at\", sql.DateTime2, result.completedAt)\n .input(\"duration_ms\", sql.Int, result.durationMs).query(`\n MERGE ${this.schema}.step_results AS target\n USING (SELECT @execution_id AS execution_id, @step_name AS step_name) AS source\n ON target.execution_id = source.execution_id AND target.step_name = source.step_name\n WHEN MATCHED THEN\n UPDATE SET \n result = @result,\n started_at = @started_at,\n completed_at = @completed_at,\n duration_ms = @duration_ms\n WHEN NOT MATCHED THEN\n INSERT (execution_id, step_name, result, started_at, completed_at, duration_ms)\n VALUES (@execution_id, @step_name, @result, @started_at, @completed_at, @duration_ms);\n `);\n },\n };\n\n // ============================================================================\n // Event Operations\n // ============================================================================\n\n events: EventOperations = {\n publish: async (eventId: string, data: unknown): Promise<number> => {\n // Get and delete waiters\n const result = await this.pool\n .request()\n .input(\"event_id\", sql.NVarChar, eventId).query(`\n DELETE FROM ${this.schema}.event_waiters\n OUTPUT DELETED.execution_id\n WHERE event_id = @event_id\n `);\n\n // Notify each waiter via realtime\n for (const row of result.recordset) {\n await this.realtime.publish(\n `event:${eventId}:${row.execution_id}`,\n data,\n );\n }\n\n return result.recordset.length;\n },\n\n subscribe: async (\n eventId: string,\n executionId: string,\n timeout: Date,\n ): Promise<void> => {\n await this.pool\n .request()\n .input(\"event_id\", sql.NVarChar, eventId)\n .input(\"execution_id\", sql.UniqueIdentifier, executionId)\n .input(\"timeout_at\", sql.DateTime2, timeout).query(`\n INSERT INTO ${this.schema}.event_waiters (event_id, execution_id, timeout_at)\n VALUES (@event_id, @execution_id, @timeout_at)\n `);\n },\n\n getWaiters: async (eventId: string): Promise<EventWaiter[]> => {\n const result = await this.pool\n .request()\n .input(\"event_id\", sql.NVarChar, eventId).query(`\n SELECT execution_id, timeout_at FROM ${this.schema}.event_waiters\n WHERE event_id = @event_id\n `);\n\n return result.recordset.map((row: sql.IRecordSet<any>[number]) => ({\n executionId: row.execution_id,\n timeoutAt: row.timeout_at,\n }));\n },\n };\n\n // ============================================================================\n // Leader Operations (using sp_getapplock)\n // ============================================================================\n\n leader: LeaderOperations = {\n acquire: async (lockId: string, ttlSeconds: number): Promise<boolean> => {\n try {\n // Try to acquire application lock with immediate timeout\n const result = await this.pool\n .request()\n .input(\"Resource\", sql.NVarChar, lockId)\n .input(\"LockMode\", sql.NVarChar, \"Exclusive\")\n .input(\"LockOwner\", sql.NVarChar, \"Session\")\n .input(\"LockTimeout\", sql.Int, 0).query(`\n DECLARE @result INT\n EXEC @result = sp_getapplock \n @Resource = @Resource,\n @LockMode = @LockMode,\n @LockOwner = @LockOwner,\n @LockTimeout = @LockTimeout\n SELECT @result as lock_result\n `);\n\n const lockResult = result.recordset[0].lock_result;\n\n // Lock acquired if result >= 0\n // 0 = success, 1 = granted after waiting (won't happen with timeout=0)\n // -1 = timeout, -2 = cancelled, -3 = deadlock, -999 = parameter/other error\n const acquired = lockResult >= 0;\n\n if (acquired) {\n // Store lock metadata\n const expiresAt = new Date(Date.now() + ttlSeconds * 1000);\n\n await this.pool\n .request()\n .input(\"lock_id\", sql.NVarChar, lockId)\n .input(\"owner\", sql.NVarChar, this.ownerId)\n .input(\"expires_at\", sql.DateTime2, expiresAt).query(`\n MERGE ${this.schema}.leader_locks AS target\n USING (SELECT @lock_id AS lock_id) AS source\n ON target.lock_id = source.lock_id\n WHEN MATCHED THEN\n UPDATE SET owner = @owner, expires_at = @expires_at, acquired_at = GETUTCDATE()\n WHEN NOT MATCHED THEN\n INSERT (lock_id, owner, expires_at)\n VALUES (@lock_id, @owner, @expires_at);\n `);\n }\n\n return acquired;\n } catch (error) {\n console.error(\"Failed to acquire leader lock:\", error);\n return false;\n }\n },\n\n release: async (lockId: string): Promise<void> => {\n try {\n // Release application lock\n await this.pool\n .request()\n .input(\"Resource\", sql.NVarChar, lockId)\n .input(\"LockOwner\", sql.NVarChar, \"Session\").query(`\n EXEC sp_releaseapplock \n @Resource = @Resource,\n @LockOwner = @LockOwner\n `);\n\n // Remove lock metadata\n await this.pool\n .request()\n .input(\"lock_id\", sql.NVarChar, lockId)\n .query(\n `DELETE FROM ${this.schema}.leader_locks WHERE lock_id = @lock_id`,\n );\n } catch (error) {\n console.error(\"Failed to release leader lock:\", error);\n }\n },\n\n renew: async (lockId: string, ttlSeconds: number): Promise<boolean> => {\n const expiresAt = new Date(Date.now() + ttlSeconds * 1000);\n\n const result = await this.pool\n .request()\n .input(\"lock_id\", sql.NVarChar, lockId)\n .input(\"expires_at\", sql.DateTime2, expiresAt).query(`\n UPDATE ${this.schema}.leader_locks\n SET expires_at = @expires_at\n WHERE lock_id = @lock_id\n `);\n\n return result.rowsAffected[0] > 0;\n },\n };\n\n // ============================================================================\n // Realtime Operations (using EventEmitter - MSSQL has no native pub/sub)\n // ============================================================================\n\n realtime: RealtimeOperations = {\n subscribe: (\n channel: string,\n callback: (data: unknown) => void,\n ): Unsubscribe => {\n this.emitter.on(channel, callback);\n\n return () => {\n this.emitter.off(channel, callback);\n };\n },\n\n publish: async (channel: string, data: unknown): Promise<void> => {\n this.emitter.emit(channel, data);\n },\n };\n}\n\n// Default export\nexport default MssqlStorageAdapter;\n"],"mappings":";AAKA,OAAO,SAAS;AAChB,SAAS,oBAAoB;AA0CtB,IAAM,sBAAN,MAAoD;AAAA,EACjD;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ;AAAA,EAER,YAAY,SAA8B;AACxC,SAAK,SAAS,QAAQ,UAAU;AAChC,SAAK,UAAU,GAAG,QAAQ,GAAG,IAAI,KAAK,IAAI,CAAC;AAC3C,SAAK,UAAU,IAAI,aAAa;AAChC,SAAK,QAAQ,gBAAgB,CAAC;AAE9B,UAAM,SAAqB;AAAA,MACzB,QAAQ,QAAQ;AAAA,MAChB,UAAU,QAAQ;AAAA,MAClB,MAAM,QAAQ;AAAA,MACd,UAAU,QAAQ;AAAA,MAClB,MAAM,QAAQ,QAAQ;AAAA,MACtB,MAAM;AAAA,QACJ,KAAK,QAAQ,YAAY;AAAA,QACzB,KAAK;AAAA,QACL,mBAAmB;AAAA,MACrB;AAAA,MACA,SAAS;AAAA,QACP,SAAS,QAAQ,SAAS,WAAW;AAAA,QACrC,wBACE,QAAQ,SAAS,0BAA0B;AAAA,QAC7C,kBAAkB;AAAA,MACpB;AAAA,IACF;AAEA,SAAK,OAAO,IAAI,IAAI,eAAe,MAAM;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAyB;AAC7B,UAAM,KAAK,KAAK,QAAQ;AAGxB,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM,UAAU;AAG1C,UAAM,KAAK,cAAc;AAEzB,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,aAA4B;AAEhC,QAAI;AACF,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,sDAAsD;AAAA,IACjE,QAAQ;AAAA,IAER;AAEA,UAAM,KAAK,KAAK,MAAM;AACtB,SAAK,QAAQ,mBAAmB;AAChC,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,cAAgC;AACpC,QAAI,CAAC,KAAK,UAAW,QAAO;AAC5B,QAAI;AACF,YAAM,KAAK,KAAK,QAAQ,EAAE,MAAM,UAAU;AAC1C,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBAA+B;AAE3C,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,+DAC2B,KAAK,MAAM;AAAA;AAAA,8BAE5C,KAAK,MAAM;AAAA;AAAA,KAEpC;AAGD,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,+FAC2D,KAAK,MAAM;AAAA;AAAA,uBAEnF,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAgB7B;AAGD,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,gHAC4E,KAAK,MAAM;AAAA;AAAA,+CAE5E,KAAK,MAAM;AAAA;AAAA,KAErD;AAED,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,kHAC8E,KAAK,MAAM;AAAA;AAAA,iDAE5E,KAAK,MAAM;AAAA;AAAA,KAEvD;AAED,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,qHACiF,KAAK,MAAM;AAAA;AAAA,oDAE5E,KAAK,MAAM;AAAA;AAAA,KAE1D;AAGD,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,qGACiE,KAAK,MAAM;AAAA;AAAA,uBAEzF,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAkB7B;AAGD,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,iHAC6E,KAAK,MAAM;AAAA;AAAA,gDAE5E,KAAK,MAAM;AAAA;AAAA,KAEtD;AAED,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,sHACkF,KAAK,MAAM;AAAA;AAAA,qDAE5E,KAAK,MAAM;AAAA;AAAA,KAE3D;AAED,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,iHAC6E,KAAK,MAAM;AAAA;AAAA,gDAE5E,KAAK,MAAM;AAAA;AAAA,KAEtD;AAED,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,sHACkF,KAAK,MAAM;AAAA;AAAA,qDAE5E,KAAK,MAAM;AAAA;AAAA,KAE3D;AAGD,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,uGACmE,KAAK,MAAM;AAAA;AAAA,uBAE3F,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uFASqD,KAAK,MAAM;AAAA;AAAA;AAAA,KAG7F;AAED,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,yHACqF,KAAK,MAAM;AAAA;AAAA,wDAE5E,KAAK,MAAM;AAAA;AAAA,KAE9D;AAGD,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,wGACoE,KAAK,MAAM;AAAA;AAAA,uBAE5F,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wFAMsD,KAAK,MAAM;AAAA;AAAA;AAAA,KAG9F;AAED,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,sHACkF,KAAK,MAAM;AAAA;AAAA,qDAE5E,KAAK,MAAM;AAAA;AAAA,KAE3D;AAGD,UAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,uGACmE,KAAK,MAAM;AAAA;AAAA,uBAE3F,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAO7B;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAMA,QAAyB;AAAA,IACvB,MAAM,OAAO,QAAmC;AAC9C,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,MAAM,IAAI,kBAAkB,IAAI,EAAE,EACxC,MAAM,eAAe,IAAI,UAAU,IAAI,UAAU,EACjD,MAAM,cAAc,IAAI,UAAU,IAAI,SAAS,EAC/C,MAAM,WAAW,IAAI,SAAS,IAAI,GAAG,GAAG,KAAK,UAAU,IAAI,OAAO,CAAC,EACnE,MAAM,YAAY,IAAI,SAAS,IAAI,GAAG,GAAG,KAAK,UAAU,IAAI,QAAQ,CAAC,EACrE,MAAM,YAAY,IAAI,KAAK,IAAI,QAAQ,EACvC,MAAM,YAAY,IAAI,KAAK,IAAI,QAAQ,EACvC,MAAM,gBAAgB,IAAI,KAAK,IAAI,WAAW,EAC9C,MAAM,iBAAiB,IAAI,WAAW,IAAI,YAAY,EACtD,MAAM,cAAc,IAAI,WAAW,IAAI,SAAS,EAAE,MAAM;AAAA,wBACzC,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAO1B;AACH,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,KAAK,OAAO,YAAmD;AAC7D,YAAM,WAAW,SAAS,YAAY,UAAU,QAAQ,GAAG;AAC3D,YAAM,eAAe,SAAS,gBAAgB;AAE9C,YAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,aAAa,IAAI,UAAU,QAAQ,EACzC,MAAM,iBAAiB,IAAI,KAAK,YAAY,EAAE,MAAM;AAAA;AAAA;AAAA,mBAG1C,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAoBb,KAAK,MAAM;AAAA;AAAA,SAEnB;AAEH,UAAI,OAAO,UAAU,WAAW,GAAG;AACjC,eAAO;AAAA,MACT;AAEA,YAAM,MAAM,OAAO,UAAU,CAAC;AAC9B,aAAO;AAAA,QACL,IAAI,IAAI;AAAA,QACR,YAAY,IAAI;AAAA,QAChB,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,QAC/B,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,QACjC,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd,aAAa,IAAI;AAAA,QACjB,cAAc,IAAI;AAAA,QAClB,WAAW,IAAI;AAAA,MACjB;AAAA,IACF;AAAA,IAEA,KAAK,OAAO,UAAiC;AAC3C,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,MAAM,IAAI,kBAAkB,KAAK,EACvC,MAAM,eAAe,KAAK,MAAM,sBAAsB;AAAA,IAC3D;AAAA,IAEA,MAAM,OAAO,OAAe,YAAyC;AACnE,YAAM,QAAQ,SAAS,SAAS;AAChC,YAAM,eAAe,QAAQ,IAAI,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,IAAI;AAEhE,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,MAAM,IAAI,kBAAkB,KAAK,EACvC,MAAM,iBAAiB,IAAI,WAAW,YAAY,EAAE,MAAM;AAAA,mBAChD,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOrB;AAAA,IACL;AAAA,IAEA,UAAU,OAAO,KAAe,cAAqC;AACnE,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,MAAM,IAAI,kBAAkB,IAAI,EAAE,EACxC,MAAM,eAAe,IAAI,UAAU,IAAI,UAAU,EACjD,MAAM,cAAc,IAAI,UAAU,IAAI,SAAS,EAC/C,MAAM,WAAW,IAAI,SAAS,IAAI,GAAG,GAAG,KAAK,UAAU,IAAI,OAAO,CAAC,EACnE,MAAM,YAAY,IAAI,SAAS,IAAI,GAAG,GAAG,KAAK,UAAU,IAAI,QAAQ,CAAC,EACrE,MAAM,YAAY,IAAI,KAAK,IAAI,QAAQ,EACvC,MAAM,YAAY,IAAI,KAAK,IAAI,QAAQ,EACvC,MAAM,gBAAgB,IAAI,KAAK,IAAI,WAAW,EAC9C,MAAM,iBAAiB,IAAI,WAAW,SAAS,EAC/C,MAAM,cAAc,IAAI,WAAW,IAAI,SAAS,EAAE,MAAM;AAAA,wBACzC,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAO1B;AACH,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,YAAY,YAAiC;AAC3C,YAAM,SAAS,MAAM,KAAK,KAAK,QAAQ,EAAE,MAAM;AAAA,wBAC7B,KAAK,MAAM;AAAA;AAAA;AAAA,OAG5B;AAED,aAAO,OAAO,UAAU,IAAI,CAAC,SAAsC;AAAA,QACjE,IAAI,IAAI;AAAA,QACR,YAAY,IAAI;AAAA,QAChB,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,QAC/B,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,QACjC,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd,aAAa,IAAI;AAAA,QACjB,cAAc,IAAI;AAAA,QAClB,WAAW,IAAI;AAAA,MACjB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,YAAiC;AAAA,IAC/B,QAAQ,OAAO,cAA0C;AACvD,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,MAAM,IAAI,kBAAkB,UAAU,EAAE,EAC9C,MAAM,UAAU,IAAI,UAAU,UAAU,KAAK,EAC7C,MAAM,eAAe,IAAI,UAAU,UAAU,UAAU,EACvD,MAAM,oBAAoB,IAAI,UAAU,UAAU,eAAe,EACjE,MAAM,cAAc,IAAI,UAAU,UAAU,SAAS,EACrD;AAAA,QACC;AAAA,QACA,IAAI,SAAS,IAAI,GAAG;AAAA,QACpB,KAAK,UAAU,UAAU,OAAO;AAAA,MAClC,EACC,MAAM,UAAU,IAAI,UAAU,UAAU,MAAM,EAC9C;AAAA,QACC;AAAA,QACA,IAAI,SAAS,IAAI,GAAG;AAAA,QACpB,UAAU,SAAS,KAAK,UAAU,UAAU,MAAM,IAAI;AAAA,MACxD,EACC;AAAA,QACC;AAAA,QACA,IAAI,SAAS,IAAI,GAAG;AAAA,QACpB,UAAU,QAAQ,KAAK,UAAU,UAAU,KAAK,IAAI;AAAA,MACtD,EACC;AAAA,QACC;AAAA,QACA,IAAI,SAAS,IAAI,GAAG;AAAA,QACpB,KAAK,UAAU,UAAU,QAAQ;AAAA,MACnC,EACC,MAAM,WAAW,IAAI,KAAK,UAAU,OAAO,EAC3C,MAAM,cAAc,IAAI,WAAW,UAAU,SAAS,EACtD,MAAM,gBAAgB,IAAI,WAAW,UAAU,WAAW,EAC1D;AAAA,QACC;AAAA,QACA,IAAI,SAAS,IAAI,GAAG;AAAA,QACpB,KAAK,UAAU,UAAU,QAAQ;AAAA,MACnC,EACC;AAAA,QACC;AAAA,QACA,IAAI;AAAA,QACJ,UAAU,SAAS;AAAA,MACrB,EAAE,MAAM;AAAA,wBACQ,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAS1B;AAGH,YAAM,KAAK,SAAS,QAAQ,aAAa,UAAU,EAAE,IAAI,SAAS;AAElE,aAAO,UAAU;AAAA,IACnB;AAAA,IAEA,KAAK,OAAO,gBAAmD;AAC7D,YAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,MAAM,IAAI,kBAAkB,WAAW,EAAE,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAWzC,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,iBAKf,KAAK,MAAM;AAAA;AAAA,SAEnB;AAEH,UAAI,OAAO,UAAU,WAAW,GAAG;AACjC,eAAO;AAAA,MACT;AAEA,YAAM,MAAM,OAAO,UAAU,CAAC;AAC9B,aAAO;AAAA,QACL,IAAI,IAAI;AAAA,QACR,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,QAC/B,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,QAC9C,OAAO,IAAI,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI;AAAA,QAC3C,OAAO,IAAI,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI,CAAC;AAAA,QAC5C,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,QACjC,SAAS,IAAI;AAAA,QACb,WAAW,IAAI;AAAA,QACf,aAAa,IAAI;AAAA,QACjB,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,MACnC;AAAA,IACF;AAAA,IAEA,qBAAqB,OACnB,YACA,mBAC8B;AAC9B,YAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,eAAe,IAAI,UAAU,UAAU,EAC7C,MAAM,mBAAmB,IAAI,UAAU,cAAc,EAAE,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAWjD,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,iBAKf,KAAK,MAAM;AAAA;AAAA,SAEnB;AAEH,UAAI,OAAO,UAAU,WAAW,GAAG;AACjC,eAAO;AAAA,MACT;AAEA,YAAM,MAAM,OAAO,UAAU,CAAC;AAC9B,aAAO;AAAA,QACL,IAAI,IAAI;AAAA,QACR,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,QAC/B,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,QAC9C,OAAO,IAAI,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI;AAAA,QAC3C,OAAO,IAAI,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI,CAAC;AAAA,QAC5C,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,QACjC,SAAS,IAAI;AAAA,QACb,WAAW,IAAI;AAAA,QACf,aAAa,IAAI;AAAA,QACjB,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,MACnC;AAAA,IACF;AAAA,IAEA,QAAQ,OACN,aACA,YACkB;AAClB,YAAM,aAAuB,CAAC;AAC9B,YAAM,UAAU,KAAK,KAAK,QAAQ;AAClC,cAAQ,MAAM,MAAM,IAAI,kBAAkB,WAAW;AAErD,UAAI,QAAQ,WAAW,QAAW;AAChC,mBAAW,KAAK,kBAAkB;AAClC,gBAAQ,MAAM,UAAU,IAAI,UAAU,QAAQ,MAAM;AAAA,MACtD;AAEA,UAAI,QAAQ,WAAW,QAAW;AAChC,mBAAW,KAAK,kBAAkB;AAClC,gBAAQ;AAAA,UACN;AAAA,UACA,IAAI,SAAS,IAAI,GAAG;AAAA,UACpB,KAAK,UAAU,QAAQ,MAAM;AAAA,QAC/B;AAAA,MACF;AAEA,UAAI,QAAQ,UAAU,QAAW;AAC/B,mBAAW,KAAK,gBAAgB;AAChC,gBAAQ;AAAA,UACN;AAAA,UACA,IAAI,SAAS,IAAI,GAAG;AAAA,UACpB,KAAK,UAAU,QAAQ,KAAK;AAAA,QAC9B;AAAA,MACF;AAEA,UAAI,QAAQ,aAAa,QAAW;AAClC,mBAAW;AAAA,UACT;AAAA,QACF;AACA,gBAAQ;AAAA,UACN;AAAA,UACA,IAAI,SAAS,IAAI,GAAG;AAAA,UACpB,KAAK,UAAU,QAAQ,QAAQ;AAAA,QACjC;AAAA,MACF;AAEA,UAAI,QAAQ,gBAAgB,QAAW;AACrC,mBAAW,KAAK,8BAA8B;AAC9C,gBAAQ,MAAM,gBAAgB,IAAI,WAAW,QAAQ,WAAW;AAAA,MAClE;AAEA,UAAI,QAAQ,aAAa,QAAW;AAClC,mBAAW,KAAK,sBAAsB;AACtC,gBAAQ;AAAA,UACN;AAAA,UACA,IAAI,SAAS,IAAI,GAAG;AAAA,UACpB,KAAK,UAAU,QAAQ,QAAQ;AAAA,QACjC;AAAA,MACF;AAEA,UAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM;AAAA,iBACT,KAAK,MAAM;AAAA,cACd,WAAW,KAAK,IAAI,CAAC;AAAA;AAAA,OAE5B;AAGD,YAAM,YAAY,MAAM,KAAK,UAAU,IAAI,WAAW;AACtD,UAAI,WAAW;AACb,cAAM,KAAK,SAAS,QAAQ,aAAa,WAAW,IAAI,SAAS;AAAA,MACnE;AAAA,IACF;AAAA,IAEA,MAAM,OAAO,YAA+C;AAC1D,YAAM,eAAyB,CAAC;AAChC,YAAM,UAAU,KAAK,KAAK,QAAQ;AAElC,UAAI,QAAQ,YAAY;AACtB,qBAAa,KAAK,8BAA8B;AAChD,gBAAQ,MAAM,eAAe,IAAI,UAAU,QAAQ,UAAU;AAAA,MAC/D;AAEA,UAAI,QAAQ,QAAQ;AAClB,qBAAa,KAAK,oBAAoB;AACtC,gBAAQ,MAAM,UAAU,IAAI,UAAU,QAAQ,MAAM;AAAA,MACtD;AAEA,UAAI,QAAQ,OAAO;AACjB,qBAAa,KAAK,oBAAoB;AACtC,gBAAQ,MAAM,UAAU,IAAI,UAAU,QAAQ,KAAK;AAAA,MACrD;AAEA,YAAM,cACJ,aAAa,SAAS,IAAI,SAAS,aAAa,KAAK,OAAO,CAAC,KAAK;AAEpE,YAAM,QAAQ,QAAQ,SAAS;AAC/B,YAAM,SAAS,QAAQ,UAAU;AAEjC,YAAM,SAAS,MAAM,QAAQ,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAWtB,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,eAKf,KAAK,MAAM;AAAA,UAChB,WAAW;AAAA;AAAA,iBAEJ,MAAM;AAAA,qBACF,KAAK;AAAA,OACnB;AAED,aAAO,OAAO,UAAU,IAAI,CAAC,SAAsC;AAAA,QACjE,IAAI,IAAI;AAAA,QACR,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,QAC/B,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,QAC9C,OAAO,IAAI,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI;AAAA,QAC3C,OAAO,IAAI,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI,CAAC;AAAA,QAC5C,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,QACjC,SAAS,IAAI;AAAA,QACb,WAAW,IAAI;AAAA,QACf,aAAa,IAAI;AAAA,QACjB,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,MACnC,EAAE;AAAA,IACJ;AAAA,IAEA,eAAe,OACb,aACA,aACkC;AAClC,YAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,gBAAgB,IAAI,kBAAkB,WAAW,EACvD,MAAM,aAAa,IAAI,UAAU,QAAQ,EAAE,MAAM;AAAA,+BAC3B,KAAK,MAAM;AAAA;AAAA,SAEjC;AAEH,UAAI,OAAO,UAAU,WAAW,GAAG;AACjC,eAAO;AAAA,MACT;AAEA,aAAO,KAAK,MAAM,OAAO,UAAU,CAAC,EAAE,MAAM;AAAA,IAC9C;AAAA,IAEA,gBAAgB,OACd,aACA,UACA,WACkB;AAClB,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,gBAAgB,IAAI,kBAAkB,WAAW,EACvD,MAAM,aAAa,IAAI,UAAU,QAAQ,EACzC,MAAM,UAAU,IAAI,SAAS,IAAI,GAAG,GAAG,KAAK,UAAU,MAAM,CAAC,EAC7D,MAAM,cAAc,IAAI,WAAW,OAAO,SAAS,EACnD,MAAM,gBAAgB,IAAI,WAAW,OAAO,WAAW,EACvD,MAAM,eAAe,IAAI,KAAK,OAAO,UAAU,EAAE,MAAM;AAAA,kBAC9C,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAYpB;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,SAA0B;AAAA,IACxB,SAAS,OAAO,SAAiB,SAAmC;AAElE,YAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,YAAY,IAAI,UAAU,OAAO,EAAE,MAAM;AAAA,wBAChC,KAAK,MAAM;AAAA;AAAA;AAAA,SAG1B;AAGH,iBAAW,OAAO,OAAO,WAAW;AAClC,cAAM,KAAK,SAAS;AAAA,UAClB,SAAS,OAAO,IAAI,IAAI,YAAY;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AAEA,aAAO,OAAO,UAAU;AAAA,IAC1B;AAAA,IAEA,WAAW,OACT,SACA,aACA,YACkB;AAClB,YAAM,KAAK,KACR,QAAQ,EACR,MAAM,YAAY,IAAI,UAAU,OAAO,EACvC,MAAM,gBAAgB,IAAI,kBAAkB,WAAW,EACvD,MAAM,cAAc,IAAI,WAAW,OAAO,EAAE,MAAM;AAAA,wBACnC,KAAK,MAAM;AAAA;AAAA,SAE1B;AAAA,IACL;AAAA,IAEA,YAAY,OAAO,YAA4C;AAC7D,YAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,YAAY,IAAI,UAAU,OAAO,EAAE,MAAM;AAAA,iDACP,KAAK,MAAM;AAAA;AAAA,SAEnD;AAEH,aAAO,OAAO,UAAU,IAAI,CAAC,SAAsC;AAAA,QACjE,aAAa,IAAI;AAAA,QACjB,WAAW,IAAI;AAAA,MACjB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,SAA2B;AAAA,IACzB,SAAS,OAAO,QAAgB,eAAyC;AACvE,UAAI;AAEF,cAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,YAAY,IAAI,UAAU,MAAM,EACtC,MAAM,YAAY,IAAI,UAAU,WAAW,EAC3C,MAAM,aAAa,IAAI,UAAU,SAAS,EAC1C,MAAM,eAAe,IAAI,KAAK,CAAC,EAAE,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,WAQvC;AAEH,cAAM,aAAa,OAAO,UAAU,CAAC,EAAE;AAKvC,cAAM,WAAW,cAAc;AAE/B,YAAI,UAAU;AAEZ,gBAAM,YAAY,IAAI,KAAK,KAAK,IAAI,IAAI,aAAa,GAAI;AAEzD,gBAAM,KAAK,KACR,QAAQ,EACR,MAAM,WAAW,IAAI,UAAU,MAAM,EACrC,MAAM,SAAS,IAAI,UAAU,KAAK,OAAO,EACzC,MAAM,cAAc,IAAI,WAAW,SAAS,EAAE,MAAM;AAAA,sBAC3C,KAAK,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAQpB;AAAA,QACL;AAEA,eAAO;AAAA,MACT,SAAS,OAAO;AACd,gBAAQ,MAAM,kCAAkC,KAAK;AACrD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IAEA,SAAS,OAAO,WAAkC;AAChD,UAAI;AAEF,cAAM,KAAK,KACR,QAAQ,EACR,MAAM,YAAY,IAAI,UAAU,MAAM,EACtC,MAAM,aAAa,IAAI,UAAU,SAAS,EAAE,MAAM;AAAA;AAAA;AAAA;AAAA,WAIlD;AAGH,cAAM,KAAK,KACR,QAAQ,EACR,MAAM,WAAW,IAAI,UAAU,MAAM,EACrC;AAAA,UACC,eAAe,KAAK,MAAM;AAAA,QAC5B;AAAA,MACJ,SAAS,OAAO;AACd,gBAAQ,MAAM,kCAAkC,KAAK;AAAA,MACvD;AAAA,IACF;AAAA,IAEA,OAAO,OAAO,QAAgB,eAAyC;AACrE,YAAM,YAAY,IAAI,KAAK,KAAK,IAAI,IAAI,aAAa,GAAI;AAEzD,YAAM,SAAS,MAAM,KAAK,KACvB,QAAQ,EACR,MAAM,WAAW,IAAI,UAAU,MAAM,EACrC,MAAM,cAAc,IAAI,WAAW,SAAS,EAAE,MAAM;AAAA,mBAC1C,KAAK,MAAM;AAAA;AAAA;AAAA,SAGrB;AAEH,aAAO,OAAO,aAAa,CAAC,IAAI;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,WAA+B;AAAA,IAC7B,WAAW,CACT,SACA,aACgB;AAChB,WAAK,QAAQ,GAAG,SAAS,QAAQ;AAEjC,aAAO,MAAM;AACX,aAAK,QAAQ,IAAI,SAAS,QAAQ;AAAA,MACpC;AAAA,IACF;AAAA,IAEA,SAAS,OAAO,SAAiB,SAAiC;AAChE,WAAK,QAAQ,KAAK,SAAS,IAAI;AAAA,IACjC;AAAA,EACF;AACF;AAGA,IAAO,gBAAQ;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@stepflowjs/storage-mssql",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Microsoft SQL Server storage adapter for Stepflow",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"import": "./dist/index.js",
|
|
12
|
+
"types": "./dist/index.d.ts"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"mssql": "^11.0.1",
|
|
20
|
+
"@stepflowjs/core": "0.0.1"
|
|
21
|
+
},
|
|
22
|
+
"devDependencies": {
|
|
23
|
+
"@types/mssql": "^9.1.5",
|
|
24
|
+
"tsup": "^8.5.1",
|
|
25
|
+
"vitest": "^4.0.17",
|
|
26
|
+
"@stepflowjs/storage-tests": "0.1.0"
|
|
27
|
+
},
|
|
28
|
+
"peerDependencies": {
|
|
29
|
+
"typescript": "^5.0.0"
|
|
30
|
+
},
|
|
31
|
+
"license": "MIT",
|
|
32
|
+
"author": "Stepflow Contributors",
|
|
33
|
+
"repository": {
|
|
34
|
+
"type": "git",
|
|
35
|
+
"url": "https://stepflow-production.up.railway.app",
|
|
36
|
+
"directory": "packages/storage/mssql"
|
|
37
|
+
},
|
|
38
|
+
"homepage": "https://stepflow-production.up.railway.app",
|
|
39
|
+
"bugs": {
|
|
40
|
+
"url": "https://stepflow-production.up.railway.app"
|
|
41
|
+
},
|
|
42
|
+
"keywords": [
|
|
43
|
+
"stepflow",
|
|
44
|
+
"storage",
|
|
45
|
+
"mssql",
|
|
46
|
+
"sql-server",
|
|
47
|
+
"adapter",
|
|
48
|
+
"workflow",
|
|
49
|
+
"orchestration"
|
|
50
|
+
],
|
|
51
|
+
"publishConfig": {
|
|
52
|
+
"access": "public"
|
|
53
|
+
},
|
|
54
|
+
"scripts": {
|
|
55
|
+
"build": "tsup",
|
|
56
|
+
"dev": "tsup --watch",
|
|
57
|
+
"typecheck": "tsc --noEmit",
|
|
58
|
+
"test": "vitest",
|
|
59
|
+
"clean": "rm -rf dist"
|
|
60
|
+
}
|
|
61
|
+
}
|