@pikku/kysely 0.11.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/CHANGELOG.md +4 -1
  2. package/dist/src/index.d.ts +10 -0
  3. package/dist/src/index.js +7 -0
  4. package/dist/src/kysely-agent-run-service.d.ts +19 -0
  5. package/dist/src/kysely-agent-run-service.js +171 -0
  6. package/dist/src/kysely-ai-storage-service.d.ts +37 -0
  7. package/dist/src/kysely-ai-storage-service.js +586 -0
  8. package/dist/src/kysely-channel-store.d.ts +17 -0
  9. package/dist/src/kysely-channel-store.js +80 -0
  10. package/dist/src/kysely-deployment-service.d.ts +17 -0
  11. package/dist/src/kysely-deployment-service.js +128 -0
  12. package/dist/src/kysely-eventhub-store.d.ts +13 -0
  13. package/dist/src/kysely-eventhub-store.js +45 -0
  14. package/dist/src/kysely-json.d.ts +1 -0
  15. package/dist/src/kysely-json.js +7 -0
  16. package/dist/src/kysely-tables.d.ts +136 -0
  17. package/dist/src/kysely-tables.js +1 -0
  18. package/dist/src/kysely-workflow-run-service.d.ts +29 -0
  19. package/dist/src/kysely-workflow-run-service.js +194 -0
  20. package/dist/src/kysely-workflow-service.d.ts +47 -0
  21. package/dist/src/kysely-workflow-service.js +485 -0
  22. package/dist/tsconfig.tsbuildinfo +1 -1
  23. package/package.json +12 -6
  24. package/src/index.ts +12 -0
  25. package/src/kysely-agent-run-service.ts +205 -0
  26. package/src/kysely-ai-storage-service.ts +713 -0
  27. package/src/kysely-channel-store.ts +109 -0
  28. package/src/kysely-deployment-service.ts +171 -0
  29. package/src/kysely-eventhub-store.ts +53 -0
  30. package/src/kysely-json.ts +5 -0
  31. package/src/kysely-services.test.ts +800 -0
  32. package/src/kysely-tables.ts +150 -0
  33. package/src/kysely-workflow-run-service.ts +242 -0
  34. package/src/kysely-workflow-service.ts +642 -0
@@ -0,0 +1,485 @@
1
+ import { PikkuWorkflowService, } from '@pikku/core/workflow';
2
+ import { sql } from 'kysely';
3
+ import { KyselyWorkflowRunService } from './kysely-workflow-run-service.js';
4
+ import { parseJson } from './kysely-json.js';
5
+ export class KyselyWorkflowService extends PikkuWorkflowService {
6
+ db;
7
+ initialized = false;
8
+ runService;
9
+ constructor(db) {
10
+ super();
11
+ this.db = db;
12
+ this.runService = new KyselyWorkflowRunService(db);
13
+ }
14
+ async init() {
15
+ if (this.initialized) {
16
+ return;
17
+ }
18
+ await this.db.schema
19
+ .createTable('workflow_runs')
20
+ .ifNotExists()
21
+ .addColumn('workflow_run_id', 'text', (col) => col
22
+ .primaryKey()
23
+ .defaultTo(sql `${sql.raw("'" + crypto.randomUUID() + "'")}`))
24
+ .addColumn('workflow', 'text', (col) => col.notNull())
25
+ .addColumn('status', 'text', (col) => col.notNull())
26
+ .addColumn('input', 'text', (col) => col.notNull())
27
+ .addColumn('output', 'text')
28
+ .addColumn('error', 'text')
29
+ .addColumn('state', 'text', (col) => col.defaultTo('{}'))
30
+ .addColumn('inline', 'boolean', (col) => col.defaultTo(false))
31
+ .addColumn('graph_hash', 'text')
32
+ .addColumn('wire', 'text')
33
+ .addColumn('created_at', 'timestamp', (col) => col.defaultTo(sql `CURRENT_TIMESTAMP`).notNull())
34
+ .addColumn('updated_at', 'timestamp', (col) => col.defaultTo(sql `CURRENT_TIMESTAMP`).notNull())
35
+ .execute();
36
+ await this.db.schema
37
+ .createTable('workflow_step')
38
+ .ifNotExists()
39
+ .addColumn('workflow_step_id', 'text', (col) => col
40
+ .primaryKey()
41
+ .defaultTo(sql `${sql.raw("'" + crypto.randomUUID() + "'")}`))
42
+ .addColumn('workflow_run_id', 'text', (col) => col
43
+ .notNull()
44
+ .references('workflow_runs.workflow_run_id')
45
+ .onDelete('cascade'))
46
+ .addColumn('step_name', 'text', (col) => col.notNull())
47
+ .addColumn('rpc_name', 'text')
48
+ .addColumn('data', 'text')
49
+ .addColumn('status', 'text', (col) => col.notNull().defaultTo('pending'))
50
+ .addColumn('result', 'text')
51
+ .addColumn('error', 'text')
52
+ .addColumn('branch_taken', 'text')
53
+ .addColumn('retries', 'integer')
54
+ .addColumn('retry_delay', 'text')
55
+ .addColumn('created_at', 'timestamp', (col) => col.defaultTo(sql `CURRENT_TIMESTAMP`).notNull())
56
+ .addColumn('updated_at', 'timestamp', (col) => col.defaultTo(sql `CURRENT_TIMESTAMP`).notNull())
57
+ .addUniqueConstraint('workflow_step_run_name_unique', [
58
+ 'workflow_run_id',
59
+ 'step_name',
60
+ ])
61
+ .execute();
62
+ await this.db.schema
63
+ .createTable('workflow_step_history')
64
+ .ifNotExists()
65
+ .addColumn('history_id', 'text', (col) => col
66
+ .primaryKey()
67
+ .defaultTo(sql `${sql.raw("'" + crypto.randomUUID() + "'")}`))
68
+ .addColumn('workflow_step_id', 'text', (col) => col
69
+ .notNull()
70
+ .references('workflow_step.workflow_step_id')
71
+ .onDelete('cascade'))
72
+ .addColumn('status', 'text', (col) => col.notNull())
73
+ .addColumn('result', 'text')
74
+ .addColumn('error', 'text')
75
+ .addColumn('created_at', 'timestamp', (col) => col.defaultTo(sql `CURRENT_TIMESTAMP`).notNull())
76
+ .addColumn('running_at', 'timestamp')
77
+ .addColumn('scheduled_at', 'timestamp')
78
+ .addColumn('succeeded_at', 'timestamp')
79
+ .addColumn('failed_at', 'timestamp')
80
+ .execute();
81
+ await this.db.schema
82
+ .createTable('workflow_versions')
83
+ .ifNotExists()
84
+ .addColumn('workflow_name', 'text', (col) => col.notNull())
85
+ .addColumn('graph_hash', 'text', (col) => col.notNull())
86
+ .addColumn('graph', 'text', (col) => col.notNull())
87
+ .addColumn('source', 'text', (col) => col.notNull())
88
+ .addColumn('created_at', 'timestamp', (col) => col.defaultTo(sql `CURRENT_TIMESTAMP`).notNull())
89
+ .addPrimaryKeyConstraint('workflow_versions_pk', [
90
+ 'workflow_name',
91
+ 'graph_hash',
92
+ ])
93
+ .execute();
94
+ this.initialized = true;
95
+ }
96
+ async createRun(workflowName, input, inline, graphHash, wire) {
97
+ const id = crypto.randomUUID();
98
+ await this.db
99
+ .insertInto('workflow_runs')
100
+ .values({
101
+ workflow_run_id: id,
102
+ workflow: workflowName,
103
+ status: 'running',
104
+ input: JSON.stringify(input),
105
+ inline,
106
+ graph_hash: graphHash,
107
+ wire: JSON.stringify(wire),
108
+ })
109
+ .execute();
110
+ return id;
111
+ }
112
+ async getRun(id) {
113
+ return this.runService.getRun(id);
114
+ }
115
+ async updateRunStatus(id, status, output, error) {
116
+ await this.db
117
+ .updateTable('workflow_runs')
118
+ .set({
119
+ status,
120
+ output: output ? JSON.stringify(output) : null,
121
+ error: error ? JSON.stringify(error) : null,
122
+ updated_at: new Date(),
123
+ })
124
+ .where('workflow_run_id', '=', id)
125
+ .execute();
126
+ }
127
+ async insertStepState(runId, stepName, rpcName, data, stepOptions) {
128
+ const stepId = crypto.randomUUID();
129
+ const now = new Date();
130
+ await this.db
131
+ .insertInto('workflow_step')
132
+ .values({
133
+ workflow_step_id: stepId,
134
+ workflow_run_id: runId,
135
+ step_name: stepName,
136
+ rpc_name: rpcName,
137
+ data: data != null ? JSON.stringify(data) : null,
138
+ status: 'pending',
139
+ retries: stepOptions?.retries ?? null,
140
+ retry_delay: stepOptions?.retryDelay?.toString() ?? null,
141
+ created_at: now,
142
+ updated_at: now,
143
+ })
144
+ .execute();
145
+ await this.insertHistoryRecord(stepId, 'pending');
146
+ return {
147
+ stepId,
148
+ status: 'pending',
149
+ result: undefined,
150
+ error: undefined,
151
+ attemptCount: 1,
152
+ retries: stepOptions?.retries,
153
+ retryDelay: stepOptions?.retryDelay?.toString(),
154
+ createdAt: now,
155
+ updatedAt: now,
156
+ };
157
+ }
158
+ async getStepState(runId, stepName) {
159
+ const row = await this.db
160
+ .selectFrom('workflow_step as s')
161
+ .select([
162
+ 's.workflow_step_id',
163
+ 's.status',
164
+ 's.result',
165
+ 's.error',
166
+ 's.retries',
167
+ 's.retry_delay',
168
+ 's.created_at',
169
+ 's.updated_at',
170
+ ])
171
+ .select((eb) => eb
172
+ .selectFrom('workflow_step_history')
173
+ .select(eb.fn.countAll().as('cnt'))
174
+ .whereRef('workflow_step_history.workflow_step_id', '=', 's.workflow_step_id')
175
+ .as('attempt_count'))
176
+ .where('s.workflow_run_id', '=', runId)
177
+ .where('s.step_name', '=', stepName)
178
+ .executeTakeFirst();
179
+ if (!row) {
180
+ throw new Error(`Step not found: runId=${runId}, stepName=${stepName}. Use insertStepState to create it.`);
181
+ }
182
+ return {
183
+ stepId: row.workflow_step_id,
184
+ status: row.status,
185
+ result: parseJson(row.result),
186
+ error: parseJson(row.error),
187
+ attemptCount: Number(row.attempt_count),
188
+ retries: row.retries != null ? Number(row.retries) : undefined,
189
+ retryDelay: row.retry_delay ?? undefined,
190
+ createdAt: new Date(row.created_at),
191
+ updatedAt: new Date(row.updated_at),
192
+ };
193
+ }
194
+ async getRunHistory(runId) {
195
+ return this.runService.getRunHistory(runId);
196
+ }
197
+ async setStepRunning(stepId) {
198
+ await this.db
199
+ .updateTable('workflow_step')
200
+ .set({ status: 'running', updated_at: new Date() })
201
+ .where('workflow_step_id', '=', stepId)
202
+ .execute();
203
+ const latestHistory = await this.db
204
+ .selectFrom('workflow_step_history')
205
+ .select('history_id')
206
+ .where('workflow_step_id', '=', stepId)
207
+ .orderBy('created_at', 'desc')
208
+ .limit(1)
209
+ .executeTakeFirst();
210
+ if (latestHistory) {
211
+ await this.db
212
+ .updateTable('workflow_step_history')
213
+ .set({ status: 'running' })
214
+ .where('history_id', '=', latestHistory.history_id)
215
+ .execute();
216
+ }
217
+ }
218
+ async setStepScheduled(stepId) {
219
+ await this.db
220
+ .updateTable('workflow_step')
221
+ .set({ status: 'scheduled', updated_at: new Date() })
222
+ .where('workflow_step_id', '=', stepId)
223
+ .execute();
224
+ }
225
+ async insertHistoryRecord(stepId, status, result, error) {
226
+ const now = new Date();
227
+ const values = {
228
+ history_id: crypto.randomUUID(),
229
+ workflow_step_id: stepId,
230
+ status,
231
+ result: result != null ? JSON.stringify(result) : null,
232
+ error: error != null ? JSON.stringify(error) : null,
233
+ created_at: now,
234
+ };
235
+ const timestampField = this.getTimestampFieldForStatus(status);
236
+ if (timestampField !== 'created_at') {
237
+ values[timestampField] = now;
238
+ }
239
+ await this.db
240
+ .insertInto('workflow_step_history')
241
+ .values(values)
242
+ .execute();
243
+ }
244
+ getTimestampFieldForStatus(status) {
245
+ switch (status) {
246
+ case 'running':
247
+ return 'running_at';
248
+ case 'scheduled':
249
+ return 'scheduled_at';
250
+ case 'succeeded':
251
+ return 'succeeded_at';
252
+ case 'failed':
253
+ return 'failed_at';
254
+ default:
255
+ return 'created_at';
256
+ }
257
+ }
258
+ async setStepResult(stepId, result) {
259
+ const resultJson = JSON.stringify(result);
260
+ await this.db
261
+ .updateTable('workflow_step')
262
+ .set({
263
+ status: 'succeeded',
264
+ result: resultJson,
265
+ error: null,
266
+ updated_at: new Date(),
267
+ })
268
+ .where('workflow_step_id', '=', stepId)
269
+ .execute();
270
+ const latestHistory = await this.db
271
+ .selectFrom('workflow_step_history')
272
+ .select('history_id')
273
+ .where('workflow_step_id', '=', stepId)
274
+ .orderBy('created_at', 'desc')
275
+ .limit(1)
276
+ .executeTakeFirst();
277
+ if (latestHistory) {
278
+ await this.db
279
+ .updateTable('workflow_step_history')
280
+ .set({ status: 'succeeded', result: resultJson })
281
+ .where('history_id', '=', latestHistory.history_id)
282
+ .execute();
283
+ }
284
+ }
285
+ async setStepError(stepId, error) {
286
+ const serializedError = {
287
+ message: error.message,
288
+ stack: error.stack,
289
+ code: error.code,
290
+ };
291
+ const errorJson = JSON.stringify(serializedError);
292
+ await this.db
293
+ .updateTable('workflow_step')
294
+ .set({
295
+ status: 'failed',
296
+ error: errorJson,
297
+ result: null,
298
+ updated_at: new Date(),
299
+ })
300
+ .where('workflow_step_id', '=', stepId)
301
+ .execute();
302
+ const latestHistory = await this.db
303
+ .selectFrom('workflow_step_history')
304
+ .select('history_id')
305
+ .where('workflow_step_id', '=', stepId)
306
+ .orderBy('created_at', 'desc')
307
+ .limit(1)
308
+ .executeTakeFirst();
309
+ if (latestHistory) {
310
+ await this.db
311
+ .updateTable('workflow_step_history')
312
+ .set({ status: 'failed', error: errorJson })
313
+ .where('history_id', '=', latestHistory.history_id)
314
+ .execute();
315
+ }
316
+ }
317
+ async createRetryAttempt(stepId, status) {
318
+ await this.db
319
+ .updateTable('workflow_step')
320
+ .set({ status, result: null, error: null, updated_at: new Date() })
321
+ .where('workflow_step_id', '=', stepId)
322
+ .execute();
323
+ await this.insertHistoryRecord(stepId, status);
324
+ const row = await this.db
325
+ .selectFrom('workflow_step as s')
326
+ .select([
327
+ 's.workflow_step_id',
328
+ 's.status',
329
+ 's.result',
330
+ 's.error',
331
+ 's.retries',
332
+ 's.retry_delay',
333
+ 's.created_at',
334
+ 's.updated_at',
335
+ ])
336
+ .select((eb) => eb
337
+ .selectFrom('workflow_step_history')
338
+ .select(eb.fn.countAll().as('cnt'))
339
+ .whereRef('workflow_step_history.workflow_step_id', '=', 's.workflow_step_id')
340
+ .as('attempt_count'))
341
+ .where('s.workflow_step_id', '=', stepId)
342
+ .executeTakeFirstOrThrow();
343
+ return {
344
+ stepId: row.workflow_step_id,
345
+ status: row.status,
346
+ result: parseJson(row.result),
347
+ error: parseJson(row.error),
348
+ attemptCount: Number(row.attempt_count),
349
+ retries: row.retries != null ? Number(row.retries) : undefined,
350
+ retryDelay: row.retry_delay ?? undefined,
351
+ createdAt: new Date(row.created_at),
352
+ updatedAt: new Date(row.updated_at),
353
+ };
354
+ }
355
+ async withRunLock(id, fn) {
356
+ return this.db.transaction().execute(async (trx) => {
357
+ await trx
358
+ .selectFrom('workflow_runs')
359
+ .select('workflow_run_id')
360
+ .where('workflow_run_id', '=', id)
361
+ .forUpdate()
362
+ .executeTakeFirst();
363
+ return fn();
364
+ });
365
+ }
366
+ async withStepLock(runId, stepName, fn) {
367
+ return this.db.transaction().execute(async (trx) => {
368
+ await trx
369
+ .selectFrom('workflow_step')
370
+ .select('workflow_step_id')
371
+ .where('workflow_run_id', '=', runId)
372
+ .where('step_name', '=', stepName)
373
+ .forUpdate()
374
+ .executeTakeFirst();
375
+ return fn();
376
+ });
377
+ }
378
+ async getCompletedGraphState(runId) {
379
+ const results = await this.db
380
+ .selectFrom('workflow_step as ws')
381
+ .select(['ws.step_name', 'ws.status', 'ws.branch_taken', 'ws.retries'])
382
+ .select((eb) => eb
383
+ .selectFrom('workflow_step_history as h')
384
+ .select(eb.fn.countAll().as('cnt'))
385
+ .whereRef('h.workflow_step_id', '=', 'ws.workflow_step_id')
386
+ .as('attempt_count'))
387
+ .where('ws.workflow_run_id', '=', runId)
388
+ .where('ws.status', 'in', ['succeeded', 'failed'])
389
+ .execute();
390
+ const completedNodeIds = [];
391
+ const failedNodeIds = [];
392
+ const branchKeys = {};
393
+ for (const row of results) {
394
+ const nodeId = row.step_name;
395
+ if (row.status === 'succeeded') {
396
+ completedNodeIds.push(nodeId);
397
+ if (row.branch_taken) {
398
+ branchKeys[nodeId] = row.branch_taken;
399
+ }
400
+ }
401
+ else if (row.status === 'failed') {
402
+ const maxAttempts = (row.retries ?? 0) + 1;
403
+ if (Number(row.attempt_count) >= maxAttempts) {
404
+ failedNodeIds.push(nodeId);
405
+ }
406
+ }
407
+ }
408
+ return { completedNodeIds, failedNodeIds, branchKeys };
409
+ }
410
+ async getNodesWithoutSteps(runId, nodeIds) {
411
+ if (nodeIds.length === 0)
412
+ return [];
413
+ const result = await this.db
414
+ .selectFrom('workflow_step')
415
+ .select('step_name')
416
+ .where('workflow_run_id', '=', runId)
417
+ .where('step_name', 'in', nodeIds)
418
+ .execute();
419
+ const existingStepNames = new Set(result.map((r) => r.step_name));
420
+ return nodeIds.filter((id) => !existingStepNames.has(id));
421
+ }
422
+ async getNodeResults(runId, nodeIds) {
423
+ if (nodeIds.length === 0)
424
+ return {};
425
+ const result = await this.db
426
+ .selectFrom('workflow_step')
427
+ .select(['step_name', 'result'])
428
+ .where('workflow_run_id', '=', runId)
429
+ .where('step_name', 'in', nodeIds)
430
+ .where('status', '=', 'succeeded')
431
+ .execute();
432
+ const results = {};
433
+ for (const row of result) {
434
+ results[row.step_name] = parseJson(row.result);
435
+ }
436
+ return results;
437
+ }
438
+ async setBranchTaken(stepId, branchKey) {
439
+ await this.db
440
+ .updateTable('workflow_step')
441
+ .set({ branch_taken: branchKey, updated_at: new Date() })
442
+ .where('workflow_step_id', '=', stepId)
443
+ .execute();
444
+ }
445
+ async updateRunState(runId, name, value) {
446
+ const row = await this.db
447
+ .selectFrom('workflow_runs')
448
+ .select('state')
449
+ .where('workflow_run_id', '=', runId)
450
+ .executeTakeFirst();
451
+ const state = parseJson(row?.state) ?? {};
452
+ state[name] = value;
453
+ await this.db
454
+ .updateTable('workflow_runs')
455
+ .set({ state: JSON.stringify(state), updated_at: new Date() })
456
+ .where('workflow_run_id', '=', runId)
457
+ .execute();
458
+ }
459
+ async getRunState(runId) {
460
+ const row = await this.db
461
+ .selectFrom('workflow_runs')
462
+ .select('state')
463
+ .where('workflow_run_id', '=', runId)
464
+ .executeTakeFirst();
465
+ if (!row)
466
+ return {};
467
+ return parseJson(row.state) ?? {};
468
+ }
469
+ async upsertWorkflowVersion(name, graphHash, graph, source) {
470
+ await this.db
471
+ .insertInto('workflow_versions')
472
+ .values({
473
+ workflow_name: name,
474
+ graph_hash: graphHash,
475
+ graph: JSON.stringify(graph),
476
+ source,
477
+ })
478
+ .onConflict((oc) => oc.columns(['workflow_name', 'graph_hash']).doNothing())
479
+ .execute();
480
+ }
481
+ async getWorkflowVersion(name, graphHash) {
482
+ return this.runService.getWorkflowVersion(name, graphHash);
483
+ }
484
+ async close() { }
485
+ }
@@ -1 +1 @@
1
- {"root":["../src/index.ts","../src/pikku-kysely.ts","../bin/pikku-kysely-pure.ts"],"version":"5.9.3"}
1
+ {"root":["../src/index.ts","../src/kysely-agent-run-service.ts","../src/kysely-ai-storage-service.ts","../src/kysely-channel-store.ts","../src/kysely-deployment-service.ts","../src/kysely-eventhub-store.ts","../src/kysely-json.ts","../src/kysely-tables.ts","../src/kysely-workflow-run-service.ts","../src/kysely-workflow-service.ts","../src/pikku-kysely.ts","../bin/pikku-kysely-pure.ts"],"version":"5.9.3"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@pikku/kysely",
3
- "version": "0.11.0",
3
+ "version": "0.12.0",
4
4
  "author": "yasser.fadl@gmail.com",
5
5
  "license": "MIT",
6
6
  "module": "dist/src/index.js",
@@ -13,21 +13,27 @@
13
13
  "tsc": "tsc",
14
14
  "ncu": "npx npm-check-updates",
15
15
  "build": "tsc -b",
16
+ "test": "bash run-tests.sh",
17
+ "test:watch": "bash run-tests.sh --watch",
18
+ "test:coverage": "bash run-tests.sh --coverage",
16
19
  "release": "npm run build && npm test"
17
20
  },
18
21
  "peerDependencies": {
19
- "@pikku/core": "^0.11.0"
22
+ "@pikku/core": "^0.12.0"
20
23
  },
21
24
  "dependencies": {
22
- "kysely": "^0.28.8",
25
+ "kysely": "^0.28.11",
23
26
  "kysely-postgres-js": "^3.0.0",
24
- "postgres": "^3.4.7"
27
+ "postgres": "^3.4.8"
25
28
  },
26
29
  "devDependencies": {
27
- "kysely": "^0.28.8",
30
+ "@types/better-sqlite3": "^7.6.13",
31
+ "better-sqlite3": "^12.6.2",
32
+ "kysely": "^0.28.11",
28
33
  "kysely-codegen": "^0.19.0",
34
+ "kysely-plugin-serialize": "^0.8.2",
29
35
  "kysely-postgres-js": "^3.0.0",
30
- "postgres": "^3.4.7",
36
+ "postgres": "^3.4.8",
31
37
  "typescript": "^5.9"
32
38
  }
33
39
  }
package/src/index.ts CHANGED
@@ -1 +1,13 @@
1
1
  export { PikkuKysely } from './pikku-kysely.js'
2
+
3
+ export { KyselyChannelStore } from './kysely-channel-store.js'
4
+ export { KyselyEventHubStore } from './kysely-eventhub-store.js'
5
+ export { KyselyWorkflowService } from './kysely-workflow-service.js'
6
+ export { KyselyWorkflowRunService } from './kysely-workflow-run-service.js'
7
+ export { KyselyDeploymentService } from './kysely-deployment-service.js'
8
+ export { KyselyAIStorageService } from './kysely-ai-storage-service.js'
9
+ export { KyselyAgentRunService } from './kysely-agent-run-service.js'
10
+
11
+ export type { KyselyPikkuDB } from './kysely-tables.js'
12
+ export type { WorkflowRunService } from '@pikku/core/workflow'
13
+ export type { AgentRunService, AgentRunRow } from '@pikku/core/ai-agent'