opencastle 0.22.0 → 0.23.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/convoy/engine.d.ts +1 -0
- package/dist/cli/convoy/engine.d.ts.map +1 -1
- package/dist/cli/convoy/engine.js +1 -0
- package/dist/cli/convoy/engine.js.map +1 -1
- package/dist/cli/convoy/export.d.ts +1 -0
- package/dist/cli/convoy/export.d.ts.map +1 -1
- package/dist/cli/convoy/export.js +34 -0
- package/dist/cli/convoy/export.js.map +1 -1
- package/dist/cli/convoy/pipeline.d.ts +35 -0
- package/dist/cli/convoy/pipeline.d.ts.map +1 -0
- package/dist/cli/convoy/pipeline.js +353 -0
- package/dist/cli/convoy/pipeline.js.map +1 -0
- package/dist/cli/convoy/pipeline.test.d.ts +2 -0
- package/dist/cli/convoy/pipeline.test.d.ts.map +1 -0
- package/dist/cli/convoy/pipeline.test.js +778 -0
- package/dist/cli/convoy/pipeline.test.js.map +1 -0
- package/dist/cli/convoy/store.d.ts +14 -2
- package/dist/cli/convoy/store.d.ts.map +1 -1
- package/dist/cli/convoy/store.js +84 -5
- package/dist/cli/convoy/store.js.map +1 -1
- package/dist/cli/convoy/store.test.js +216 -7
- package/dist/cli/convoy/store.test.js.map +1 -1
- package/dist/cli/convoy/types.d.ts +15 -0
- package/dist/cli/convoy/types.d.ts.map +1 -1
- package/dist/cli/dashboard.d.ts.map +1 -1
- package/dist/cli/dashboard.js +1 -0
- package/dist/cli/dashboard.js.map +1 -1
- package/dist/cli/init.d.ts.map +1 -1
- package/dist/cli/init.js +8 -1
- package/dist/cli/init.js.map +1 -1
- package/dist/cli/run/schema.d.ts +5 -1
- package/dist/cli/run/schema.d.ts.map +1 -1
- package/dist/cli/run/schema.js +41 -8
- package/dist/cli/run/schema.js.map +1 -1
- package/dist/cli/run/schema.test.js +194 -5
- package/dist/cli/run/schema.test.js.map +1 -1
- package/dist/cli/run.d.ts.map +1 -1
- package/dist/cli/run.js +143 -3
- package/dist/cli/run.js.map +1 -1
- package/dist/cli/types.d.ts +3 -1
- package/dist/cli/types.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/cli/convoy/engine.ts +2 -0
- package/src/cli/convoy/export.ts +41 -0
- package/src/cli/convoy/pipeline.test.ts +939 -0
- package/src/cli/convoy/pipeline.ts +430 -0
- package/src/cli/convoy/store.test.ts +239 -7
- package/src/cli/convoy/store.ts +110 -7
- package/src/cli/convoy/types.ts +17 -0
- package/src/cli/dashboard.ts +1 -0
- package/src/cli/init.ts +9 -1
- package/src/cli/run/schema.test.ts +244 -5
- package/src/cli/run/schema.ts +49 -8
- package/src/cli/run.ts +142 -3
- package/src/cli/types.ts +3 -1
- package/src/dashboard/dist/_astro/{index.DyyaCW8L.css → index.Cq68OHaZ.css} +1 -1
- package/src/dashboard/dist/index.html +214 -2
- package/src/dashboard/node_modules/.vite/deps/_metadata.json +6 -6
- package/src/dashboard/src/pages/index.astro +230 -1
- package/src/dashboard/src/styles/dashboard.css +116 -0
- package/src/orchestrator/customizations/KNOWN-ISSUES.md +1 -1
- package/src/orchestrator/skills/decomposition/SKILL.md +1 -0
- package/src/orchestrator/skills/orchestration-protocols/SKILL.md +32 -1
|
@@ -0,0 +1,430 @@
|
|
|
1
|
+
import { readFile } from 'node:fs/promises'
|
|
2
|
+
import { mkdirSync } from 'node:fs'
|
|
3
|
+
import { resolve, dirname, relative, isAbsolute, sep } from 'node:path'
|
|
4
|
+
import { execFile as execFileCb } from 'node:child_process'
|
|
5
|
+
import { promisify } from 'node:util'
|
|
6
|
+
import type { TaskSpec, AgentAdapter } from '../types.js'
|
|
7
|
+
import { parseTaskSpecText } from '../run/schema.js'
|
|
8
|
+
import { createConvoyStore } from './store.js'
|
|
9
|
+
import {
|
|
10
|
+
createConvoyEngine,
|
|
11
|
+
type ConvoyEngine,
|
|
12
|
+
type ConvoyResult,
|
|
13
|
+
type ConvoyEngineOptions,
|
|
14
|
+
} from './engine.js'
|
|
15
|
+
import { exportPipelineToNdjson } from './export.js'
|
|
16
|
+
import type { PipelineStatus } from './types.js'
|
|
17
|
+
import { formatDuration } from '../run/executor.js'
|
|
18
|
+
|
|
19
|
+
const execFile = promisify(execFileCb)
|
|
20
|
+
|
|
21
|
+
// ── Public interfaces ─────────────────────────────────────────────────────────
|
|
22
|
+
|
|
23
|
+
export interface PipelineResult {
|
|
24
|
+
pipelineId: string
|
|
25
|
+
status: PipelineStatus
|
|
26
|
+
convoyResults: ConvoyResult[]
|
|
27
|
+
summary: {
|
|
28
|
+
totalConvoys: number
|
|
29
|
+
completed: number
|
|
30
|
+
failed: number
|
|
31
|
+
skipped: number
|
|
32
|
+
}
|
|
33
|
+
duration: string
|
|
34
|
+
cost?: { total_tokens: number }
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export interface PipelineOrchestrator {
|
|
38
|
+
run(): Promise<PipelineResult>
|
|
39
|
+
resume(pipelineId: string): Promise<PipelineResult>
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export interface PipelineOrchestratorOptions {
|
|
43
|
+
spec: TaskSpec
|
|
44
|
+
specYaml: string
|
|
45
|
+
adapter: AgentAdapter
|
|
46
|
+
basePath?: string
|
|
47
|
+
dbPath?: string
|
|
48
|
+
logsDir?: string
|
|
49
|
+
verbose?: boolean
|
|
50
|
+
/** Injectable engine factory (used in tests). */
|
|
51
|
+
_createConvoyEngine?: (opts: ConvoyEngineOptions) => ConvoyEngine
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// ── Internal helpers ──────────────────────────────────────────────────────────
|
|
55
|
+
|
|
56
|
+
function aggregateTokens(results: ConvoyResult[]): number | undefined {
|
|
57
|
+
let total: number | undefined
|
|
58
|
+
for (const r of results) {
|
|
59
|
+
if (r.cost?.total_tokens != null) {
|
|
60
|
+
total = (total ?? 0) + r.cost.total_tokens
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return total
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function buildSummary(results: ConvoyResult[], skippedCount: number) {
|
|
67
|
+
const completed = results.filter(r => r.status === 'done').length
|
|
68
|
+
const failed = results.filter(
|
|
69
|
+
r => r.status === 'failed' || r.status === 'gate-failed',
|
|
70
|
+
).length
|
|
71
|
+
return {
|
|
72
|
+
totalConvoys: results.length + skippedCount,
|
|
73
|
+
completed,
|
|
74
|
+
failed,
|
|
75
|
+
skipped: skippedCount,
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// ── Factory ───────────────────────────────────────────────────────────────────
|
|
80
|
+
|
|
81
|
+
export function createPipelineOrchestrator(
|
|
82
|
+
options: PipelineOrchestratorOptions,
|
|
83
|
+
): PipelineOrchestrator {
|
|
84
|
+
const { spec, specYaml, adapter, verbose = false } = options
|
|
85
|
+
const basePath = resolve(options.basePath ?? process.cwd())
|
|
86
|
+
const dbPath = options.dbPath ?? resolve(basePath, '.opencastle', 'convoy.db')
|
|
87
|
+
const engineFactory = options._createConvoyEngine ?? createConvoyEngine
|
|
88
|
+
|
|
89
|
+
async function getCurrentBranch(): Promise<string> {
|
|
90
|
+
try {
|
|
91
|
+
const { stdout } = await execFile('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {
|
|
92
|
+
cwd: basePath,
|
|
93
|
+
})
|
|
94
|
+
return stdout.trim()
|
|
95
|
+
} catch {
|
|
96
|
+
return 'main'
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/** Validate and resolve a convoy spec path, preventing path traversal. */
|
|
101
|
+
function resolveSpecPath(specPath: string): string {
|
|
102
|
+
if (isAbsolute(specPath)) {
|
|
103
|
+
throw new Error(`Convoy spec path must be relative: "${specPath}"`)
|
|
104
|
+
}
|
|
105
|
+
const absPath = resolve(basePath, specPath)
|
|
106
|
+
const rel = relative(basePath, absPath)
|
|
107
|
+
if (rel.startsWith('..') || rel.startsWith('..' + sep)) {
|
|
108
|
+
throw new Error(`Convoy spec path escapes project directory: "${specPath}"`)
|
|
109
|
+
}
|
|
110
|
+
return absPath
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/** Run a single convoy spec file as part of a pipeline, linking it with pipelineId. */
|
|
114
|
+
async function runConvoySpecFile(
|
|
115
|
+
specPath: string,
|
|
116
|
+
pipelineId: string,
|
|
117
|
+
branch: string,
|
|
118
|
+
): Promise<ConvoyResult> {
|
|
119
|
+
const absPath = resolveSpecPath(specPath)
|
|
120
|
+
const convoyYaml = await readFile(absPath, 'utf8')
|
|
121
|
+
const convoySpec = parseTaskSpecText(convoyYaml)
|
|
122
|
+
const overriddenSpec: TaskSpec = { ...convoySpec, branch }
|
|
123
|
+
|
|
124
|
+
const engine = engineFactory({
|
|
125
|
+
spec: overriddenSpec,
|
|
126
|
+
specYaml: convoyYaml,
|
|
127
|
+
adapter,
|
|
128
|
+
basePath,
|
|
129
|
+
dbPath,
|
|
130
|
+
logsDir: options.logsDir,
|
|
131
|
+
verbose,
|
|
132
|
+
pipelineId,
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
return engine.run()
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
async function run(): Promise<PipelineResult> {
|
|
139
|
+
const startTime = Date.now()
|
|
140
|
+
const pipelineId = `pipeline-${startTime}`
|
|
141
|
+
const branch = spec.branch ?? (await getCurrentBranch())
|
|
142
|
+
const convoySpecs = spec.depends_on_convoy ?? []
|
|
143
|
+
|
|
144
|
+
mkdirSync(dirname(dbPath), { recursive: true })
|
|
145
|
+
const store = createConvoyStore(dbPath)
|
|
146
|
+
try {
|
|
147
|
+
store.insertPipeline({
|
|
148
|
+
id: pipelineId,
|
|
149
|
+
name: spec.name,
|
|
150
|
+
status: 'pending',
|
|
151
|
+
branch,
|
|
152
|
+
spec_yaml: specYaml,
|
|
153
|
+
convoy_specs: JSON.stringify(convoySpecs),
|
|
154
|
+
created_at: new Date().toISOString(),
|
|
155
|
+
})
|
|
156
|
+
store.updatePipelineStatus(pipelineId, 'running', {
|
|
157
|
+
started_at: new Date().toISOString(),
|
|
158
|
+
})
|
|
159
|
+
} finally {
|
|
160
|
+
store.close()
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const convoyResults: ConvoyResult[] = []
|
|
164
|
+
let skippedCount = 0
|
|
165
|
+
let pipelineHalted = false
|
|
166
|
+
|
|
167
|
+
try {
|
|
168
|
+
for (const specPath of convoySpecs) {
|
|
169
|
+
if (pipelineHalted) {
|
|
170
|
+
skippedCount++
|
|
171
|
+
continue
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
let convoyResult: ConvoyResult
|
|
175
|
+
try {
|
|
176
|
+
convoyResult = await runConvoySpecFile(specPath, pipelineId, branch)
|
|
177
|
+
} catch (err) {
|
|
178
|
+
if (verbose) {
|
|
179
|
+
process.stderr.write(
|
|
180
|
+
`Pipeline error loading convoy spec "${specPath}": ${(err as Error).message}\n`,
|
|
181
|
+
)
|
|
182
|
+
}
|
|
183
|
+
// Treat spec load failure as a convoy failure
|
|
184
|
+
convoyResult = {
|
|
185
|
+
convoyId: `failed-${specPath}`,
|
|
186
|
+
status: 'failed',
|
|
187
|
+
summary: { total: 0, done: 0, failed: 1, skipped: 0, timedOut: 0 },
|
|
188
|
+
duration: '0ms',
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
convoyResults.push(convoyResult)
|
|
192
|
+
|
|
193
|
+
const isFailed =
|
|
194
|
+
convoyResult.status === 'failed' || convoyResult.status === 'gate-failed'
|
|
195
|
+
if (isFailed && spec.on_failure === 'stop') {
|
|
196
|
+
pipelineHalted = true
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Hybrid pipeline: if the spec itself has tasks, run them as a final convoy
|
|
201
|
+
if (!pipelineHalted && spec.tasks && spec.tasks.length > 0) {
|
|
202
|
+
const hybridEngine = engineFactory({
|
|
203
|
+
spec: { ...spec, branch },
|
|
204
|
+
specYaml,
|
|
205
|
+
adapter,
|
|
206
|
+
basePath,
|
|
207
|
+
dbPath,
|
|
208
|
+
logsDir: options.logsDir,
|
|
209
|
+
verbose,
|
|
210
|
+
pipelineId,
|
|
211
|
+
})
|
|
212
|
+
const hybridResult = await hybridEngine.run()
|
|
213
|
+
convoyResults.push(hybridResult)
|
|
214
|
+
}
|
|
215
|
+
} catch (err) {
|
|
216
|
+
// Unexpected error — finalize pipeline as failed
|
|
217
|
+
const failStore = createConvoyStore(dbPath)
|
|
218
|
+
try {
|
|
219
|
+
failStore.updatePipelineStatus(pipelineId, 'failed', {
|
|
220
|
+
finished_at: new Date().toISOString(),
|
|
221
|
+
})
|
|
222
|
+
} finally {
|
|
223
|
+
failStore.close()
|
|
224
|
+
}
|
|
225
|
+
throw err
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const totalTokens = aggregateTokens(convoyResults)
|
|
229
|
+
const summary = buildSummary(convoyResults, skippedCount)
|
|
230
|
+
const finalStatus: PipelineStatus = summary.failed > 0 ? 'failed' : 'done'
|
|
231
|
+
const duration = formatDuration(Date.now() - startTime)
|
|
232
|
+
|
|
233
|
+
const updateStore = createConvoyStore(dbPath)
|
|
234
|
+
try {
|
|
235
|
+
updateStore.updatePipelineStatus(pipelineId, finalStatus, {
|
|
236
|
+
finished_at: new Date().toISOString(),
|
|
237
|
+
total_tokens: totalTokens ?? null,
|
|
238
|
+
})
|
|
239
|
+
} finally {
|
|
240
|
+
updateStore.close()
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
try {
|
|
244
|
+
const exportStore = createConvoyStore(dbPath)
|
|
245
|
+
try {
|
|
246
|
+
await exportPipelineToNdjson(exportStore, pipelineId, options.logsDir)
|
|
247
|
+
} finally {
|
|
248
|
+
exportStore.close()
|
|
249
|
+
}
|
|
250
|
+
} catch { /* silent */ }
|
|
251
|
+
|
|
252
|
+
return {
|
|
253
|
+
pipelineId,
|
|
254
|
+
status: finalStatus,
|
|
255
|
+
convoyResults,
|
|
256
|
+
summary,
|
|
257
|
+
duration,
|
|
258
|
+
cost: totalTokens != null ? { total_tokens: totalTokens } : undefined,
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
async function resume(pipelineId: string): Promise<PipelineResult> {
|
|
263
|
+
const startTime = Date.now()
|
|
264
|
+
|
|
265
|
+
const pipelineStore = createConvoyStore(dbPath)
|
|
266
|
+
let pipeline
|
|
267
|
+
try {
|
|
268
|
+
pipeline = pipelineStore.getPipeline(pipelineId)
|
|
269
|
+
} finally {
|
|
270
|
+
pipelineStore.close()
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
if (!pipeline) {
|
|
274
|
+
throw new Error(`Pipeline "${pipelineId}" not found in store`)
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
const convoySpecs: string[] = JSON.parse(pipeline.convoy_specs) as string[]
|
|
278
|
+
const branch = pipeline.branch ?? spec.branch ?? (await getCurrentBranch())
|
|
279
|
+
|
|
280
|
+
// Load all convoys linked to this pipeline, sorted by creation time
|
|
281
|
+
const convoyStore = createConvoyStore(dbPath)
|
|
282
|
+
let existingConvoys
|
|
283
|
+
try {
|
|
284
|
+
existingConvoys = convoyStore
|
|
285
|
+
.getConvoysByPipeline(pipelineId)
|
|
286
|
+
.sort((a, b) => a.created_at.localeCompare(b.created_at))
|
|
287
|
+
} finally {
|
|
288
|
+
convoyStore.close()
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const convoyResults: ConvoyResult[] = []
|
|
292
|
+
let skippedCount = 0
|
|
293
|
+
let pipelineHalted = false
|
|
294
|
+
let existingIdx = 0
|
|
295
|
+
|
|
296
|
+
try {
|
|
297
|
+
for (const specPath of convoySpecs) {
|
|
298
|
+
const existing = existingConvoys[existingIdx]
|
|
299
|
+
|
|
300
|
+
if (existing && existing.status === 'done') {
|
|
301
|
+
// Already completed — reconstruct synthetic result
|
|
302
|
+
const taskStore = createConvoyStore(dbPath)
|
|
303
|
+
let tasks
|
|
304
|
+
try {
|
|
305
|
+
tasks = taskStore.getTasksByConvoy(existing.id)
|
|
306
|
+
} finally {
|
|
307
|
+
taskStore.close()
|
|
308
|
+
}
|
|
309
|
+
convoyResults.push({
|
|
310
|
+
convoyId: existing.id,
|
|
311
|
+
status: existing.status,
|
|
312
|
+
summary: {
|
|
313
|
+
total: tasks.length,
|
|
314
|
+
done: tasks.filter(t => t.status === 'done').length,
|
|
315
|
+
failed: tasks.filter(t => t.status === 'failed').length,
|
|
316
|
+
skipped: tasks.filter(t => t.status === 'skipped').length,
|
|
317
|
+
timedOut: tasks.filter(t => t.status === 'timed-out').length,
|
|
318
|
+
},
|
|
319
|
+
duration: '0ms',
|
|
320
|
+
cost:
|
|
321
|
+
existing.total_tokens != null
|
|
322
|
+
? { total_tokens: existing.total_tokens }
|
|
323
|
+
: undefined,
|
|
324
|
+
})
|
|
325
|
+
existingIdx++
|
|
326
|
+
continue
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
if (pipelineHalted) {
|
|
330
|
+
skippedCount++
|
|
331
|
+
continue
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
let convoyResult: ConvoyResult
|
|
335
|
+
|
|
336
|
+
if (existing && existing.status === 'running') {
|
|
337
|
+
// Resume the in-progress convoy
|
|
338
|
+
const absPath = resolveSpecPath(specPath)
|
|
339
|
+
const convoyYaml = await readFile(absPath, 'utf8')
|
|
340
|
+
const convoySpec = parseTaskSpecText(convoyYaml)
|
|
341
|
+
const overriddenSpec: TaskSpec = { ...convoySpec, branch }
|
|
342
|
+
|
|
343
|
+
const resumeEngine = engineFactory({
|
|
344
|
+
spec: overriddenSpec,
|
|
345
|
+
specYaml: convoyYaml,
|
|
346
|
+
adapter,
|
|
347
|
+
basePath,
|
|
348
|
+
dbPath,
|
|
349
|
+
logsDir: options.logsDir,
|
|
350
|
+
verbose,
|
|
351
|
+
pipelineId,
|
|
352
|
+
})
|
|
353
|
+
convoyResult = await resumeEngine.resume(existing.id)
|
|
354
|
+
existingIdx++
|
|
355
|
+
} else {
|
|
356
|
+
// Run fresh
|
|
357
|
+
try {
|
|
358
|
+
convoyResult = await runConvoySpecFile(specPath, pipelineId, branch)
|
|
359
|
+
} catch (err) {
|
|
360
|
+
if (verbose) {
|
|
361
|
+
process.stderr.write(
|
|
362
|
+
`Pipeline error loading convoy spec "${specPath}": ${(err as Error).message}\n`,
|
|
363
|
+
)
|
|
364
|
+
}
|
|
365
|
+
convoyResult = {
|
|
366
|
+
convoyId: `failed-${specPath}`,
|
|
367
|
+
status: 'failed',
|
|
368
|
+
summary: { total: 0, done: 0, failed: 1, skipped: 0, timedOut: 0 },
|
|
369
|
+
duration: '0ms',
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
convoyResults.push(convoyResult)
|
|
375
|
+
|
|
376
|
+
const isFailed =
|
|
377
|
+
convoyResult.status === 'failed' || convoyResult.status === 'gate-failed'
|
|
378
|
+
if (isFailed && spec.on_failure === 'stop') {
|
|
379
|
+
pipelineHalted = true
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
} catch (err) {
|
|
383
|
+
// Unexpected error — finalize pipeline as failed
|
|
384
|
+
const failStore = createConvoyStore(dbPath)
|
|
385
|
+
try {
|
|
386
|
+
failStore.updatePipelineStatus(pipelineId, 'failed', {
|
|
387
|
+
finished_at: new Date().toISOString(),
|
|
388
|
+
})
|
|
389
|
+
} finally {
|
|
390
|
+
failStore.close()
|
|
391
|
+
}
|
|
392
|
+
throw err
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
const totalTokens = aggregateTokens(convoyResults)
|
|
396
|
+
const summary = buildSummary(convoyResults, skippedCount)
|
|
397
|
+
const finalStatus: PipelineStatus = summary.failed > 0 ? 'failed' : 'done'
|
|
398
|
+
const duration = formatDuration(Date.now() - startTime)
|
|
399
|
+
|
|
400
|
+
const updateStore = createConvoyStore(dbPath)
|
|
401
|
+
try {
|
|
402
|
+
updateStore.updatePipelineStatus(pipelineId, finalStatus, {
|
|
403
|
+
finished_at: new Date().toISOString(),
|
|
404
|
+
total_tokens: totalTokens ?? null,
|
|
405
|
+
})
|
|
406
|
+
} finally {
|
|
407
|
+
updateStore.close()
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
try {
|
|
411
|
+
const exportStore = createConvoyStore(dbPath)
|
|
412
|
+
try {
|
|
413
|
+
await exportPipelineToNdjson(exportStore, pipelineId, options.logsDir)
|
|
414
|
+
} finally {
|
|
415
|
+
exportStore.close()
|
|
416
|
+
}
|
|
417
|
+
} catch { /* silent */ }
|
|
418
|
+
|
|
419
|
+
return {
|
|
420
|
+
pipelineId,
|
|
421
|
+
status: finalStatus,
|
|
422
|
+
convoyResults,
|
|
423
|
+
summary,
|
|
424
|
+
duration,
|
|
425
|
+
cost: totalTokens != null ? { total_tokens: totalTokens } : undefined,
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
return { run, resume }
|
|
430
|
+
}
|