@pikku/kysely 0.11.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -1
- package/dist/src/index.d.ts +10 -0
- package/dist/src/index.js +7 -0
- package/dist/src/kysely-agent-run-service.d.ts +19 -0
- package/dist/src/kysely-agent-run-service.js +171 -0
- package/dist/src/kysely-ai-storage-service.d.ts +37 -0
- package/dist/src/kysely-ai-storage-service.js +586 -0
- package/dist/src/kysely-channel-store.d.ts +17 -0
- package/dist/src/kysely-channel-store.js +80 -0
- package/dist/src/kysely-deployment-service.d.ts +17 -0
- package/dist/src/kysely-deployment-service.js +128 -0
- package/dist/src/kysely-eventhub-store.d.ts +13 -0
- package/dist/src/kysely-eventhub-store.js +45 -0
- package/dist/src/kysely-json.d.ts +1 -0
- package/dist/src/kysely-json.js +7 -0
- package/dist/src/kysely-tables.d.ts +136 -0
- package/dist/src/kysely-tables.js +1 -0
- package/dist/src/kysely-workflow-run-service.d.ts +29 -0
- package/dist/src/kysely-workflow-run-service.js +194 -0
- package/dist/src/kysely-workflow-service.d.ts +47 -0
- package/dist/src/kysely-workflow-service.js +485 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +12 -6
- package/src/index.ts +12 -0
- package/src/kysely-agent-run-service.ts +205 -0
- package/src/kysely-ai-storage-service.ts +713 -0
- package/src/kysely-channel-store.ts +109 -0
- package/src/kysely-deployment-service.ts +171 -0
- package/src/kysely-eventhub-store.ts +53 -0
- package/src/kysely-json.ts +5 -0
- package/src/kysely-services.test.ts +800 -0
- package/src/kysely-tables.ts +150 -0
- package/src/kysely-workflow-run-service.ts +242 -0
- package/src/kysely-workflow-service.ts +642 -0
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import type { Generated } from 'kysely'
|
|
2
|
+
|
|
3
|
+
export interface ChannelsTable {
|
|
4
|
+
channel_id: string
|
|
5
|
+
channel_name: string
|
|
6
|
+
created_at: Generated<Date>
|
|
7
|
+
opening_data: string
|
|
8
|
+
user_session: string | null
|
|
9
|
+
last_wire: Generated<Date>
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface ChannelSubscriptionsTable {
|
|
13
|
+
channel_id: string
|
|
14
|
+
topic: string
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface WorkflowRunsTable {
|
|
18
|
+
workflow_run_id: Generated<string>
|
|
19
|
+
workflow: string
|
|
20
|
+
status: string
|
|
21
|
+
input: string
|
|
22
|
+
output: string | null
|
|
23
|
+
error: string | null
|
|
24
|
+
state: Generated<string>
|
|
25
|
+
inline: Generated<boolean>
|
|
26
|
+
graph_hash: string | null
|
|
27
|
+
wire: string | null
|
|
28
|
+
created_at: Generated<Date>
|
|
29
|
+
updated_at: Generated<Date>
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export interface WorkflowStepTable {
|
|
33
|
+
workflow_step_id: Generated<string>
|
|
34
|
+
workflow_run_id: string
|
|
35
|
+
step_name: string
|
|
36
|
+
rpc_name: string | null
|
|
37
|
+
data: string | null
|
|
38
|
+
status: Generated<string>
|
|
39
|
+
result: string | null
|
|
40
|
+
error: string | null
|
|
41
|
+
branch_taken: string | null
|
|
42
|
+
retries: number | null
|
|
43
|
+
retry_delay: string | null
|
|
44
|
+
created_at: Generated<Date>
|
|
45
|
+
updated_at: Generated<Date>
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export interface WorkflowStepHistoryTable {
|
|
49
|
+
history_id: Generated<string>
|
|
50
|
+
workflow_step_id: string
|
|
51
|
+
status: string
|
|
52
|
+
result: string | null
|
|
53
|
+
error: string | null
|
|
54
|
+
created_at: Generated<Date>
|
|
55
|
+
running_at: Date | null
|
|
56
|
+
scheduled_at: Date | null
|
|
57
|
+
succeeded_at: Date | null
|
|
58
|
+
failed_at: Date | null
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export interface WorkflowVersionsTable {
|
|
62
|
+
workflow_name: string
|
|
63
|
+
graph_hash: string
|
|
64
|
+
graph: string
|
|
65
|
+
source: string
|
|
66
|
+
created_at: Generated<Date>
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export interface AIThreadsTable {
|
|
70
|
+
id: string
|
|
71
|
+
resource_id: string
|
|
72
|
+
title: string | null
|
|
73
|
+
metadata: string | null
|
|
74
|
+
created_at: Generated<Date>
|
|
75
|
+
updated_at: Generated<Date>
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export interface AIMessageTable {
|
|
79
|
+
id: string
|
|
80
|
+
thread_id: string
|
|
81
|
+
role: string
|
|
82
|
+
content: string | null
|
|
83
|
+
created_at: Generated<Date>
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
export interface AIToolCallTable {
|
|
87
|
+
id: string
|
|
88
|
+
thread_id: string
|
|
89
|
+
message_id: string
|
|
90
|
+
run_id: string | null
|
|
91
|
+
tool_name: string
|
|
92
|
+
args: string
|
|
93
|
+
result: string | null
|
|
94
|
+
approval_status: string | null
|
|
95
|
+
approval_type: string | null
|
|
96
|
+
agent_run_id: string | null
|
|
97
|
+
display_tool_name: string | null
|
|
98
|
+
display_args: string | null
|
|
99
|
+
created_at: Generated<Date>
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
export interface AIWorkingMemoryTable {
|
|
103
|
+
id: string
|
|
104
|
+
scope: string
|
|
105
|
+
data: string
|
|
106
|
+
updated_at: Generated<Date>
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
export interface AIRunTable {
|
|
110
|
+
run_id: Generated<string>
|
|
111
|
+
agent_name: string
|
|
112
|
+
thread_id: string
|
|
113
|
+
resource_id: string
|
|
114
|
+
status: Generated<string>
|
|
115
|
+
suspend_reason: string | null
|
|
116
|
+
missing_rpcs: string | null
|
|
117
|
+
usage_input_tokens: Generated<number>
|
|
118
|
+
usage_output_tokens: Generated<number>
|
|
119
|
+
usage_model: Generated<string>
|
|
120
|
+
created_at: Generated<Date>
|
|
121
|
+
updated_at: Generated<Date>
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
export interface PikkuDeploymentsTable {
|
|
125
|
+
deployment_id: string
|
|
126
|
+
endpoint: string
|
|
127
|
+
last_heartbeat: Generated<Date>
|
|
128
|
+
created_at: Generated<Date>
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
export interface PikkuDeploymentFunctionsTable {
|
|
132
|
+
deployment_id: string
|
|
133
|
+
function_name: string
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
export interface KyselyPikkuDB {
|
|
137
|
+
channels: ChannelsTable
|
|
138
|
+
channel_subscriptions: ChannelSubscriptionsTable
|
|
139
|
+
workflow_runs: WorkflowRunsTable
|
|
140
|
+
workflow_step: WorkflowStepTable
|
|
141
|
+
workflow_step_history: WorkflowStepHistoryTable
|
|
142
|
+
workflow_versions: WorkflowVersionsTable
|
|
143
|
+
ai_threads: AIThreadsTable
|
|
144
|
+
ai_message: AIMessageTable
|
|
145
|
+
ai_tool_call: AIToolCallTable
|
|
146
|
+
ai_working_memory: AIWorkingMemoryTable
|
|
147
|
+
ai_run: AIRunTable
|
|
148
|
+
pikku_deployments: PikkuDeploymentsTable
|
|
149
|
+
pikku_deployment_functions: PikkuDeploymentFunctionsTable
|
|
150
|
+
}
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
WorkflowRun,
|
|
3
|
+
StepState,
|
|
4
|
+
WorkflowStatus,
|
|
5
|
+
WorkflowRunService,
|
|
6
|
+
} from '@pikku/core/workflow'
|
|
7
|
+
import { Kysely } from 'kysely'
|
|
8
|
+
import type { KyselyPikkuDB } from './kysely-tables.js'
|
|
9
|
+
import { parseJson } from './kysely-json.js'
|
|
10
|
+
|
|
11
|
+
export class KyselyWorkflowRunService implements WorkflowRunService {
|
|
12
|
+
constructor(private db: Kysely<KyselyPikkuDB>) {}
|
|
13
|
+
|
|
14
|
+
async listRuns(options?: {
|
|
15
|
+
workflowName?: string
|
|
16
|
+
status?: string
|
|
17
|
+
limit?: number
|
|
18
|
+
offset?: number
|
|
19
|
+
}): Promise<WorkflowRun[]> {
|
|
20
|
+
const { workflowName, status, limit = 50, offset = 0 } = options ?? {}
|
|
21
|
+
|
|
22
|
+
let query = this.db
|
|
23
|
+
.selectFrom('workflow_runs')
|
|
24
|
+
.select([
|
|
25
|
+
'workflow_run_id',
|
|
26
|
+
'workflow',
|
|
27
|
+
'status',
|
|
28
|
+
'input',
|
|
29
|
+
'output',
|
|
30
|
+
'error',
|
|
31
|
+
'inline',
|
|
32
|
+
'graph_hash',
|
|
33
|
+
'wire',
|
|
34
|
+
'created_at',
|
|
35
|
+
'updated_at',
|
|
36
|
+
])
|
|
37
|
+
|
|
38
|
+
if (workflowName) {
|
|
39
|
+
query = query.where('workflow', '=', workflowName)
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (status) {
|
|
43
|
+
query = query.where('status', '=', status)
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const result = await query
|
|
47
|
+
.orderBy('created_at', 'desc')
|
|
48
|
+
.limit(limit)
|
|
49
|
+
.offset(offset)
|
|
50
|
+
.execute()
|
|
51
|
+
|
|
52
|
+
return result.map((row) => this.mapRunRow(row))
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async getRun(id: string): Promise<WorkflowRun | null> {
|
|
56
|
+
const row = await this.db
|
|
57
|
+
.selectFrom('workflow_runs')
|
|
58
|
+
.select([
|
|
59
|
+
'workflow_run_id',
|
|
60
|
+
'workflow',
|
|
61
|
+
'status',
|
|
62
|
+
'input',
|
|
63
|
+
'output',
|
|
64
|
+
'error',
|
|
65
|
+
'inline',
|
|
66
|
+
'graph_hash',
|
|
67
|
+
'wire',
|
|
68
|
+
'created_at',
|
|
69
|
+
'updated_at',
|
|
70
|
+
])
|
|
71
|
+
.where('workflow_run_id', '=', id)
|
|
72
|
+
.executeTakeFirst()
|
|
73
|
+
|
|
74
|
+
if (!row) return null
|
|
75
|
+
return this.mapRunRow(row)
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async getRunSteps(
|
|
79
|
+
runId: string
|
|
80
|
+
): Promise<
|
|
81
|
+
Array<StepState & { stepName: string; rpcName?: string; data?: any }>
|
|
82
|
+
> {
|
|
83
|
+
const result = await this.db
|
|
84
|
+
.selectFrom('workflow_step as s')
|
|
85
|
+
.select([
|
|
86
|
+
's.workflow_step_id',
|
|
87
|
+
's.step_name',
|
|
88
|
+
's.rpc_name',
|
|
89
|
+
's.data',
|
|
90
|
+
's.status',
|
|
91
|
+
's.result',
|
|
92
|
+
's.error',
|
|
93
|
+
's.retries',
|
|
94
|
+
's.retry_delay',
|
|
95
|
+
's.created_at',
|
|
96
|
+
's.updated_at',
|
|
97
|
+
])
|
|
98
|
+
.select((eb) =>
|
|
99
|
+
eb
|
|
100
|
+
.selectFrom('workflow_step_history')
|
|
101
|
+
.select(eb.fn.countAll<number>().as('cnt'))
|
|
102
|
+
.whereRef(
|
|
103
|
+
'workflow_step_history.workflow_step_id',
|
|
104
|
+
'=',
|
|
105
|
+
's.workflow_step_id'
|
|
106
|
+
)
|
|
107
|
+
.as('attempt_count')
|
|
108
|
+
)
|
|
109
|
+
.where('s.workflow_run_id', '=', runId)
|
|
110
|
+
.orderBy('s.created_at', 'asc')
|
|
111
|
+
.execute()
|
|
112
|
+
|
|
113
|
+
return result.map((row) => ({
|
|
114
|
+
stepId: row.workflow_step_id,
|
|
115
|
+
stepName: row.step_name,
|
|
116
|
+
rpcName: row.rpc_name ?? undefined,
|
|
117
|
+
data: parseJson(row.data),
|
|
118
|
+
status: row.status as StepState['status'],
|
|
119
|
+
result: parseJson(row.result),
|
|
120
|
+
error: parseJson(row.error),
|
|
121
|
+
attemptCount: Number(row.attempt_count || 1),
|
|
122
|
+
retries: row.retries != null ? Number(row.retries) : undefined,
|
|
123
|
+
retryDelay: row.retry_delay ?? undefined,
|
|
124
|
+
createdAt: new Date(row.created_at as unknown as string),
|
|
125
|
+
updatedAt: new Date(row.updated_at as unknown as string),
|
|
126
|
+
}))
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
async getRunHistory(
|
|
130
|
+
runId: string
|
|
131
|
+
): Promise<Array<StepState & { stepName: string }>> {
|
|
132
|
+
const result = await this.db
|
|
133
|
+
.selectFrom('workflow_step as s')
|
|
134
|
+
.innerJoin(
|
|
135
|
+
'workflow_step_history as h',
|
|
136
|
+
'h.workflow_step_id',
|
|
137
|
+
's.workflow_step_id'
|
|
138
|
+
)
|
|
139
|
+
.select([
|
|
140
|
+
's.workflow_step_id',
|
|
141
|
+
's.step_name',
|
|
142
|
+
's.retries',
|
|
143
|
+
's.retry_delay',
|
|
144
|
+
'h.status',
|
|
145
|
+
'h.result',
|
|
146
|
+
'h.error',
|
|
147
|
+
'h.created_at',
|
|
148
|
+
'h.running_at',
|
|
149
|
+
'h.scheduled_at',
|
|
150
|
+
'h.succeeded_at',
|
|
151
|
+
'h.failed_at',
|
|
152
|
+
])
|
|
153
|
+
.where('s.workflow_run_id', '=', runId)
|
|
154
|
+
.orderBy('h.created_at', 'asc')
|
|
155
|
+
.execute()
|
|
156
|
+
|
|
157
|
+
let attemptCounters: Record<string, number> = {}
|
|
158
|
+
return result.map((row) => {
|
|
159
|
+
const stepId = row.workflow_step_id
|
|
160
|
+
attemptCounters[stepId] = (attemptCounters[stepId] ?? 0) + 1
|
|
161
|
+
|
|
162
|
+
return {
|
|
163
|
+
stepId,
|
|
164
|
+
stepName: row.step_name,
|
|
165
|
+
status: row.status as StepState['status'],
|
|
166
|
+
result: parseJson(row.result),
|
|
167
|
+
error: parseJson(row.error),
|
|
168
|
+
attemptCount: attemptCounters[stepId]!,
|
|
169
|
+
retries: row.retries != null ? Number(row.retries) : undefined,
|
|
170
|
+
retryDelay: row.retry_delay ?? undefined,
|
|
171
|
+
createdAt: new Date(row.created_at as unknown as string),
|
|
172
|
+
updatedAt: new Date(row.created_at as unknown as string),
|
|
173
|
+
runningAt: row.running_at
|
|
174
|
+
? new Date(row.running_at as unknown as string)
|
|
175
|
+
: undefined,
|
|
176
|
+
scheduledAt: row.scheduled_at
|
|
177
|
+
? new Date(row.scheduled_at as unknown as string)
|
|
178
|
+
: undefined,
|
|
179
|
+
succeededAt: row.succeeded_at
|
|
180
|
+
? new Date(row.succeeded_at as unknown as string)
|
|
181
|
+
: undefined,
|
|
182
|
+
failedAt: row.failed_at
|
|
183
|
+
? new Date(row.failed_at as unknown as string)
|
|
184
|
+
: undefined,
|
|
185
|
+
}
|
|
186
|
+
})
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
async getDistinctWorkflowNames(): Promise<string[]> {
|
|
190
|
+
const result = await this.db
|
|
191
|
+
.selectFrom('workflow_runs')
|
|
192
|
+
.select('workflow')
|
|
193
|
+
.distinct()
|
|
194
|
+
.orderBy('workflow')
|
|
195
|
+
.execute()
|
|
196
|
+
|
|
197
|
+
return result.map((row) => row.workflow)
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
async getWorkflowVersion(
|
|
201
|
+
name: string,
|
|
202
|
+
graphHash: string
|
|
203
|
+
): Promise<{ graph: any; source: string } | null> {
|
|
204
|
+
const row = await this.db
|
|
205
|
+
.selectFrom('workflow_versions')
|
|
206
|
+
.select(['graph', 'source'])
|
|
207
|
+
.where('workflow_name', '=', name)
|
|
208
|
+
.where('graph_hash', '=', graphHash)
|
|
209
|
+
.executeTakeFirst()
|
|
210
|
+
|
|
211
|
+
if (!row) return null
|
|
212
|
+
return {
|
|
213
|
+
graph: parseJson(row.graph),
|
|
214
|
+
source: row.source,
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
async deleteRun(id: string): Promise<boolean> {
|
|
219
|
+
const result = await this.db
|
|
220
|
+
.deleteFrom('workflow_runs')
|
|
221
|
+
.where('workflow_run_id', '=', id)
|
|
222
|
+
.executeTakeFirst()
|
|
223
|
+
|
|
224
|
+
return BigInt(result.numDeletedRows) > 0n
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
private mapRunRow(row: any): WorkflowRun {
|
|
228
|
+
return {
|
|
229
|
+
id: row.workflow_run_id as string,
|
|
230
|
+
workflow: row.workflow as string,
|
|
231
|
+
status: row.status as WorkflowStatus,
|
|
232
|
+
input: parseJson(row.input),
|
|
233
|
+
output: parseJson(row.output),
|
|
234
|
+
error: parseJson(row.error),
|
|
235
|
+
inline: row.inline as boolean | undefined,
|
|
236
|
+
graphHash: row.graph_hash as string | undefined,
|
|
237
|
+
wire: parseJson(row.wire) ?? { type: 'unknown' },
|
|
238
|
+
createdAt: new Date(row.created_at as string),
|
|
239
|
+
updatedAt: new Date(row.updated_at as string),
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|