@lota-sdk/core 0.4.1 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,371 @@
1
+ import { ArtifactRecordSchema, GetArtifactResultSchema, PublishArtifactArgsSchema } from '@lota-sdk/shared'
2
+ import type {
3
+ ArtifactRecord,
4
+ ArtifactReference,
5
+ ArtifactStatus,
6
+ ArtifactVersionSummary,
7
+ GetArtifactResult,
8
+ PublishArtifactArgs,
9
+ } from '@lota-sdk/shared'
10
+
11
+ import type { RecordIdInput } from '../db/record-id'
12
+ import { ensureRecordId, recordIdToString } from '../db/record-id'
13
+ import { databaseService } from '../db/service'
14
+ import type { DatabaseTransaction } from '../db/service'
15
+ import { TABLES } from '../db/tables'
16
+ import { generatedDocumentStorageService } from '../storage/generated-document-storage.service'
17
+ import { toIsoDateTimeString } from '../utils/date-time'
18
+
19
+ const ARTIFACT_PUBLISH_MAX_ATTEMPTS = 5
20
+
21
+ function describePublishInputShape(value: unknown): string {
22
+ if (typeof value === 'string') {
23
+ return `string(${value.length})`
24
+ }
25
+
26
+ if (Array.isArray(value)) {
27
+ const firstEntry: unknown = value[0]
28
+ const firstType =
29
+ firstEntry === undefined
30
+ ? 'empty'
31
+ : Array.isArray(firstEntry)
32
+ ? 'array'
33
+ : firstEntry === null
34
+ ? 'null'
35
+ : typeof firstEntry
36
+ return `array(len=${value.length}, first=${firstType})`
37
+ }
38
+
39
+ if (value === null) {
40
+ return 'null'
41
+ }
42
+
43
+ if (typeof value === 'object') {
44
+ return `object(keys=${Object.keys(value).sort().join(',')})`
45
+ }
46
+
47
+ return typeof value
48
+ }
49
+
50
+ function formatPublishError(error: unknown): string {
51
+ if (error instanceof Error && error.message.trim()) {
52
+ return error.message
53
+ }
54
+
55
+ return typeof error === 'string' ? error : JSON.stringify(error)
56
+ }
57
+
58
+ function slugify(value: string): string {
59
+ const normalized = value
60
+ .trim()
61
+ .toLowerCase()
62
+ .replace(/[^a-z0-9]+/g, '-')
63
+ .replace(/^-+|-+$/g, '')
64
+ return normalized || 'artifact'
65
+ }
66
+
67
+ function encodeStorageSegment(value: string): string {
68
+ return Buffer.from(value, 'utf8').toString('base64url') || 'artifact'
69
+ }
70
+
71
+ function hashCanonicalSegment(value: string): string {
72
+ return new Bun.CryptoHasher('sha256').update(value.trim()).digest('hex')
73
+ }
74
+
75
+ function buildCanonicalKey(
76
+ params: Pick<
77
+ PublishArtifactArgs,
78
+ 'canonicalKey' | 'title' | 'deliverableName' | 'sourcePlanNodeId' | 'sourcePlanRunId'
79
+ >,
80
+ ): string {
81
+ const explicit = params.canonicalKey?.trim()
82
+ if (explicit) {
83
+ return explicit
84
+ }
85
+
86
+ if (params.deliverableName?.trim()) {
87
+ const parts = ['plan-deliverable', `artifact-${hashCanonicalSegment(params.deliverableName)}`]
88
+ if (params.sourcePlanRunId?.trim()) {
89
+ parts.push(`run-${hashCanonicalSegment(params.sourcePlanRunId)}`)
90
+ }
91
+ if (params.sourcePlanNodeId?.trim()) {
92
+ parts.push(`node-${hashCanonicalSegment(params.sourcePlanNodeId)}`)
93
+ }
94
+ return parts.join(':')
95
+ }
96
+
97
+ return `${slugify(params.title)}-${Bun.randomUUIDv7()}`
98
+ }
99
+
100
+ function buildArtifactRelativePath(params: {
101
+ canonicalKey: string
102
+ version: number
103
+ title: string
104
+ publishAttemptId: string
105
+ }): string {
106
+ return `${encodeStorageSegment(params.canonicalKey)}/v${params.version}/${params.publishAttemptId}-${slugify(params.title)}.md`
107
+ }
108
+
109
+ function extractInternalReferences(content: string): ArtifactReference[] {
110
+ const matches = new Set<string>()
111
+ const markdownLinkPattern = /\]\((venturos:\/\/(?:artifact|document)\/[A-Za-z0-9:_-]+)\)/g
112
+ const bareLinkPattern = /\b(venturos:\/\/(?:artifact|document)\/[A-Za-z0-9:_-]+)\b/g
113
+
114
+ for (const pattern of [markdownLinkPattern, bareLinkPattern]) {
115
+ for (const match of content.matchAll(pattern)) {
116
+ const uri = match[1]?.trim()
117
+ if (uri) {
118
+ matches.add(uri)
119
+ }
120
+ }
121
+ }
122
+
123
+ return [...matches]
124
+ .map((uri) => {
125
+ if (uri.startsWith('venturos://artifact/')) {
126
+ return { uri, targetType: 'artifact' as const, targetId: uri.slice('venturos://artifact/'.length) }
127
+ }
128
+ if (uri.startsWith('venturos://document/')) {
129
+ return { uri, targetType: 'document' as const, targetId: uri.slice('venturos://document/'.length) }
130
+ }
131
+ return null
132
+ })
133
+ .filter((reference): reference is ArtifactReference => reference !== null)
134
+ }
135
+
136
+ function toVersionSummary(record: ArtifactRecord): ArtifactVersionSummary {
137
+ return {
138
+ id: recordIdToString(record.id, TABLES.ARTIFACT),
139
+ title: record.title,
140
+ version: record.version,
141
+ status: record.status,
142
+ createdAt: toIsoDateTimeString(record.createdAt),
143
+ }
144
+ }
145
+
146
+ function isRetryablePublishError(error: unknown): boolean {
147
+ if (!(error instanceof Error)) {
148
+ return false
149
+ }
150
+
151
+ const message = error.message.toLowerCase()
152
+ return (
153
+ message.includes('conflict') ||
154
+ message.includes('duplicate') ||
155
+ message.includes('unique') ||
156
+ message.includes('already exists')
157
+ )
158
+ }
159
+
160
+ interface PublishArtifactTransactionOptions {
161
+ onStorageWrite?: (storageKey: string) => void
162
+ onStorageCleanup?: (storageKey: string) => void
163
+ }
164
+
165
+ class ArtifactService {
166
+ private async listArtifactsForCanonicalKeyInTransaction(params: {
167
+ tx: DatabaseTransaction
168
+ organizationId: string
169
+ canonicalKey: string
170
+ }): Promise<ArtifactRecord[]> {
171
+ const records = await params.tx.query({
172
+ query: `SELECT * FROM ${TABLES.ARTIFACT} WHERE organizationId = $organizationId AND canonicalKey = $canonicalKey ORDER BY version DESC`,
173
+ bindings: {
174
+ organizationId: ensureRecordId(params.organizationId, TABLES.ORGANIZATION),
175
+ canonicalKey: params.canonicalKey,
176
+ },
177
+ })
178
+
179
+ return ArtifactRecordSchema.array().parse(records)
180
+ }
181
+
182
+ async publishArtifactInTransaction(
183
+ args: PublishArtifactArgs,
184
+ tx: DatabaseTransaction,
185
+ options: PublishArtifactTransactionOptions = {},
186
+ ): Promise<ArtifactRecord> {
187
+ let params: PublishArtifactArgs
188
+ try {
189
+ params = PublishArtifactArgsSchema.parse(args)
190
+ } catch (error) {
191
+ throw new Error(
192
+ `artifact service transaction input parse failed (${describePublishInputShape(args)}): ${formatPublishError(error)}`,
193
+ )
194
+ }
195
+ const organizationRef = ensureRecordId(params.organizationId, TABLES.ORGANIZATION)
196
+ const canonicalKey = buildCanonicalKey(params)
197
+ const existing = await this.listArtifactsForCanonicalKeyInTransaction({
198
+ tx,
199
+ organizationId: params.organizationId,
200
+ canonicalKey,
201
+ })
202
+ const latestVersion = existing.reduce((max, record) => Math.max(max, record.version), 0)
203
+ const version = latestVersion + 1
204
+ const publishAttemptId = Bun.randomUUIDv7()
205
+ const references = extractInternalReferences(params.content)
206
+ const stored = await generatedDocumentStorageService.writeTextArtifact({
207
+ organizationId: params.organizationId,
208
+ namespace: 'artifacts',
209
+ relativePath: buildArtifactRelativePath({ canonicalKey, version, title: params.title, publishAttemptId }),
210
+ content: params.content,
211
+ mediaType: 'text/markdown',
212
+ })
213
+ options.onStorageWrite?.(stored.storageKey)
214
+
215
+ try {
216
+ const createdResult = await tx
217
+ .create(TABLES.ARTIFACT)
218
+ .content({
219
+ organizationId: organizationRef,
220
+ authorAgentId: params.authorAgentId,
221
+ title: params.title,
222
+ artifactKind: params.artifactKind,
223
+ templateId: params.templateId,
224
+ canonicalKey,
225
+ version,
226
+ status: 'active',
227
+ storageKey: stored.storageKey,
228
+ ...(params.description ? { description: params.description } : {}),
229
+ tags: [...new Set(params.tags)],
230
+ references,
231
+ ...(params.sourceThreadId ? { sourceThreadId: ensureRecordId(params.sourceThreadId, TABLES.THREAD) } : {}),
232
+ ...(params.sourcePlanRunId
233
+ ? { sourcePlanRunId: ensureRecordId(params.sourcePlanRunId, TABLES.PLAN_RUN) }
234
+ : {}),
235
+ ...(params.sourcePlanNodeId ? { sourcePlanNodeId: params.sourcePlanNodeId } : {}),
236
+ })
237
+ .output('after')
238
+ const createdRecord: unknown = Array.isArray(createdResult) ? createdResult.at(0) : createdResult
239
+ if (!createdRecord) {
240
+ throw new Error(`Artifact create returned no record for canonical key ${canonicalKey}.`)
241
+ }
242
+
243
+ const created = ArtifactRecordSchema.parse(createdRecord)
244
+
245
+ const previousActive = existing.find((record) => record.status === 'active')
246
+ if (previousActive) {
247
+ await tx
248
+ .update(ensureRecordId(previousActive.id, TABLES.ARTIFACT))
249
+ .merge({ status: 'superseded', supersededBy: ensureRecordId(created.id, TABLES.ARTIFACT) })
250
+ .output('after')
251
+ }
252
+
253
+ return created
254
+ } catch (error) {
255
+ await generatedDocumentStorageService.deleteTextArtifact(stored.storageKey).catch(() => undefined)
256
+ options.onStorageCleanup?.(stored.storageKey)
257
+ throw error
258
+ }
259
+ }
260
+
261
+ async publishArtifact(args: PublishArtifactArgs): Promise<ArtifactRecord> {
262
+ let params: PublishArtifactArgs
263
+ try {
264
+ params = PublishArtifactArgsSchema.parse(args)
265
+ } catch (error) {
266
+ throw new Error(
267
+ `artifact service input parse failed (${describePublishInputShape(args)}): ${formatPublishError(error)}`,
268
+ )
269
+ }
270
+
271
+ let lastError: unknown = null
272
+ for (let attempt = 1; attempt <= ARTIFACT_PUBLISH_MAX_ATTEMPTS; attempt += 1) {
273
+ let pendingStorageKey: string | null = null
274
+ try {
275
+ return await databaseService.withTransaction(
276
+ async (tx) =>
277
+ await this.publishArtifactInTransaction(params, tx, {
278
+ onStorageWrite: (storageKey) => {
279
+ pendingStorageKey = storageKey
280
+ },
281
+ onStorageCleanup: (storageKey) => {
282
+ if (pendingStorageKey === storageKey) {
283
+ pendingStorageKey = null
284
+ }
285
+ },
286
+ }),
287
+ )
288
+ } catch (error) {
289
+ const storageKeyToCleanup = pendingStorageKey
290
+ pendingStorageKey = null
291
+ if (storageKeyToCleanup !== null) {
292
+ await generatedDocumentStorageService.deleteTextArtifact(storageKeyToCleanup).catch(() => undefined)
293
+ }
294
+ lastError = error
295
+ if (!isRetryablePublishError(error) || attempt === ARTIFACT_PUBLISH_MAX_ATTEMPTS) {
296
+ throw error
297
+ }
298
+ }
299
+ }
300
+
301
+ throw lastError instanceof Error ? lastError : new Error('Artifact publish failed.')
302
+ }
303
+
304
+ async getArtifactRecord(artifactId: RecordIdInput): Promise<ArtifactRecord | null> {
305
+ return await databaseService.findOne(
306
+ TABLES.ARTIFACT,
307
+ { id: ensureRecordId(artifactId, TABLES.ARTIFACT) },
308
+ ArtifactRecordSchema,
309
+ )
310
+ }
311
+
312
+ async listArtifacts(params: {
313
+ organizationId: RecordIdInput
314
+ canonicalKey?: string
315
+ status?: ArtifactStatus
316
+ includeNonActive?: boolean
317
+ sourceThreadId?: RecordIdInput
318
+ limit?: number
319
+ }): Promise<ArtifactRecord[]> {
320
+ const records = await databaseService.findMany(
321
+ TABLES.ARTIFACT,
322
+ {
323
+ organizationId: ensureRecordId(params.organizationId, TABLES.ORGANIZATION),
324
+ ...(params.canonicalKey ? { canonicalKey: params.canonicalKey } : {}),
325
+ ...(params.sourceThreadId ? { sourceThreadId: ensureRecordId(params.sourceThreadId, TABLES.THREAD) } : {}),
326
+ },
327
+ ArtifactRecordSchema,
328
+ { orderBy: 'createdAt', orderDir: 'DESC', ...(params.limit ? { limit: params.limit } : {}) },
329
+ )
330
+
331
+ if (params.status) {
332
+ return records.filter((record) => record.status === params.status)
333
+ }
334
+
335
+ if (params.includeNonActive) {
336
+ return records
337
+ }
338
+
339
+ return records.filter((record) => record.status === 'active')
340
+ }
341
+
342
+ async getArtifact(artifactId: RecordIdInput): Promise<GetArtifactResult> {
343
+ const artifact = await this.getArtifactRecord(artifactId)
344
+ if (!artifact) {
345
+ throw new Error(`Artifact not found: ${recordIdToString(artifactId, TABLES.ARTIFACT)}`)
346
+ }
347
+
348
+ const [content, versions, backlinks] = await Promise.all([
349
+ generatedDocumentStorageService.readTextArtifact(artifact.storageKey),
350
+ this.listArtifacts({
351
+ organizationId: artifact.organizationId,
352
+ canonicalKey: artifact.canonicalKey,
353
+ includeNonActive: true,
354
+ }),
355
+ this.listBacklinks(artifact),
356
+ ])
357
+
358
+ return GetArtifactResultSchema.parse({ artifact, content, versions: versions.map(toVersionSummary), backlinks })
359
+ }
360
+
361
+ async listBacklinks(artifact: Pick<ArtifactRecord, 'id' | 'organizationId'>): Promise<ArtifactRecord[]> {
362
+ const artifactId = recordIdToString(artifact.id, TABLES.ARTIFACT)
363
+ const records = await this.listArtifacts({ organizationId: artifact.organizationId, includeNonActive: true })
364
+
365
+ return records.filter((record) =>
366
+ record.references.some((reference) => reference.targetType === 'artifact' && reference.targetId === artifactId),
367
+ )
368
+ }
369
+ }
370
+
371
+ export const artifactService = new ArtifactService()
@@ -306,12 +306,14 @@ class AutonomousJobService {
306
306
  jobId: buildAutonomousAtJobId(jobId),
307
307
  })
308
308
 
309
- await databaseService.update(
310
- TABLES.AUTONOMOUS_JOB_RUN,
311
- queuedRun.id,
312
- { queueJobId: ensureRecordId(enqueueResult.queueJobId, TABLES.QUEUE_JOB) },
313
- AutonomousJobRunRowSchema,
314
- )
309
+ if (enqueueResult.queueJobId) {
310
+ await databaseService.update(
311
+ TABLES.AUTONOMOUS_JOB_RUN,
312
+ queuedRun.id,
313
+ { queueJobId: ensureRecordId(enqueueResult.queueJobId, TABLES.QUEUE_JOB) },
314
+ AutonomousJobRunRowSchema,
315
+ )
316
+ }
315
317
  } else {
316
318
  const { upsertAutonomousJobScheduler } = await import('../queues/autonomous-job.queue')
317
319
  await upsertAutonomousJobScheduler({ autonomousJobId: jobId, schedule: row.schedule })
@@ -477,12 +479,14 @@ class AutonomousJobService {
477
479
  jobId: buildAutonomousManualJobId(recordIdToString(row.id, TABLES.AUTONOMOUS_JOB)),
478
480
  })
479
481
 
480
- await databaseService.update(
481
- TABLES.AUTONOMOUS_JOB_RUN,
482
- queuedRun.id,
483
- { queueJobId: ensureRecordId(enqueueResult.queueJobId, TABLES.QUEUE_JOB) },
484
- AutonomousJobRunRowSchema,
485
- )
482
+ if (enqueueResult.queueJobId) {
483
+ await databaseService.update(
484
+ TABLES.AUTONOMOUS_JOB_RUN,
485
+ queuedRun.id,
486
+ { queueJobId: ensureRecordId(enqueueResult.queueJobId, TABLES.QUEUE_JOB) },
487
+ AutonomousJobRunRowSchema,
488
+ )
489
+ }
486
490
 
487
491
  return this.toPublicRun(await this.getRunRow(queuedRun.id))
488
492
  }
@@ -15,6 +15,7 @@ import type {
15
15
  } from '@lota-sdk/shared'
16
16
  import {
17
17
  PlanCheckpointSchema,
18
+ PlanDraftSchema,
18
19
  PlanEventSchema,
19
20
  PlanNodeRunSchema,
20
21
  PlanNodeSpecRecordSchema,
@@ -247,7 +248,7 @@ class ExecutionPlanService {
247
248
  leadAgentId: string
248
249
  input: PlanDraft
249
250
  }): Promise<ExecutionPlanToolResultData> {
250
- const preparedDraft = planBuilderService.prepareDraft(params.input)
251
+ const preparedDraft = planBuilderService.prepareDraft(PlanDraftSchema.parse(params.input))
251
252
  const validation = planValidatorService.validateDraft(preparedDraft)
252
253
  if (validation.blocking.length > 0) {
253
254
  throw new Error(`Plan draft failed validation: ${aggregateBlockingIssues(validation.blocking)}`)
@@ -336,18 +337,8 @@ class ExecutionPlanService {
336
337
  }
337
338
 
338
339
  const activeSpec = await planRunService.getPlanSpecById(activeRun.planSpecId)
339
- const preparedDraft = planBuilderService.prepareDraft({
340
- title: params.input.title,
341
- objective: params.input.objective,
342
- nodes: params.input.nodes,
343
- edges: params.input.edges,
344
- entryNodeIds: params.input.entryNodeIds,
345
- schemas: params.input.schemas,
346
- defaultExecutionVisibility: params.input.defaultExecutionVisibility,
347
- executionMode: params.input.executionMode,
348
- schedule: params.input.schedule,
349
- dependencies: params.input.dependencies,
350
- })
340
+ const { runId: _runId, reason: _reason, ...draftInput } = params.input
341
+ const preparedDraft = planBuilderService.prepareDraft(PlanDraftSchema.parse(draftInput))
351
342
  const validation = planValidatorService.validateDraft(preparedDraft)
352
343
  if (validation.blocking.length > 0) {
353
344
  throw new Error(`Plan draft failed validation: ${aggregateBlockingIssues(validation.blocking)}`)
@@ -1,6 +1,7 @@
1
1
  export * from './adaptive-playbook.service'
2
2
  export * from './agent-executor.service'
3
3
  export * from './agent-activity.service'
4
+ export * from './artifact.service'
4
5
  export * from './artifact-provenance.service'
5
6
  export * from './attachment.service'
6
7
  export * from './autonomous-job.service'
@@ -74,7 +74,11 @@ function toArtifactSubmission(artifact: PlanArtifactRecord): PlanArtifactSubmiss
74
74
  name: artifact.name,
75
75
  kind: artifact.kind,
76
76
  ...(artifact.description ? { description: artifact.description } : {}),
77
+ ...(artifact.content !== undefined ? { content: artifact.content } : {}),
77
78
  ...(artifact.payload !== undefined ? { payload: artifact.payload } : {}),
79
+ ...(artifact.publishedArtifactId
80
+ ? { publishedArtifactId: recordIdToString(artifact.publishedArtifactId, TABLES.ARTIFACT) }
81
+ : {}),
78
82
  }
79
83
  }
80
84
 
@@ -28,9 +28,15 @@ class PlanArtifactService {
28
28
  nodeId: params.nodeId,
29
29
  name: artifact.name,
30
30
  kind: artifact.kind,
31
- pointer: `artifact://${params.nodeId}/${artifact.name}`,
31
+ pointer: artifact.publishedArtifactId
32
+ ? `venturos://artifact/${artifact.publishedArtifactId}`
33
+ : `artifact://${params.nodeId}/${artifact.name}`,
32
34
  ...(artifact.description ? { description: artifact.description } : {}),
35
+ ...(artifact.content ? { content: artifact.content } : {}),
33
36
  ...(artifact.payload ? { payload: artifact.payload } : {}),
37
+ ...(artifact.publishedArtifactId
38
+ ? { publishedArtifactId: ensureRecordId(artifact.publishedArtifactId, TABLES.ARTIFACT) }
39
+ : {}),
34
40
  })
35
41
  .output('after')
36
42
 
@@ -1,6 +1,8 @@
1
1
  import type { PlanArtifactRecord, PlanDependency } from '@lota-sdk/shared'
2
2
 
3
3
  import { serverLogger } from '../config/logger'
4
+ import { recordIdToString } from '../db/record-id'
5
+ import { TABLES } from '../db/tables'
4
6
  import type { PlanValidationIssueInput } from './plan-validator.service'
5
7
 
6
8
  export interface DependencyResolutionResult {
@@ -13,7 +15,7 @@ class PlanCoordinationService {
13
15
  /**
14
16
  * Resolve cross-plan artifact dependencies.
15
17
  * For each dependency:
16
- * 1. Find the source plan by title in the thread
18
+ * 1. Find the source plan by spec id in the thread
17
19
  * 2. Find the artifact by (nodeId, artifactName) in that plan's run
18
20
  * 3. Check staleness if maxStalenessMs set
19
21
  * 4. Based on triggerMode:
@@ -30,14 +32,14 @@ class PlanCoordinationService {
30
32
  const resolved = new Map<string, PlanArtifactRecord>()
31
33
  const unresolved: PlanDependency[] = []
32
34
  const notifications: DependencyResolutionResult['notifications'] = []
35
+ const specs = await planRunService.listPlanSpecsByThread(params.threadId)
33
36
 
34
37
  for (const dep of params.dependencies) {
35
- const depKey = `${dep.sourcePlanTitle}:${dep.sourceNodeId}:${dep.artifactName}`
38
+ const depKey = `${dep.sourcePlanSpecId}:${dep.sourceNodeId}:${dep.artifactName}`
36
39
 
37
- const specs = await planRunService.listPlanSpecsByThread(params.threadId)
38
- const sourceSpec = specs.find((s) => s.title === dep.sourcePlanTitle)
40
+ const sourceSpec = specs.find((s) => recordIdToString(s.id, TABLES.PLAN_SPEC) === dep.sourcePlanSpecId)
39
41
  if (!sourceSpec) {
40
- const reason = `Source plan "${dep.sourcePlanTitle}" not found in thread.`
42
+ const reason = `Source plan "${dep.sourcePlanSpecId}" not found in thread.`
41
43
  if (dep.triggerMode === 'block') {
42
44
  unresolved.push(dep)
43
45
  } else if (dep.triggerMode === 'notify') {
@@ -51,7 +53,7 @@ class PlanCoordinationService {
51
53
  const runs = await planRunService.listRunsBySpec(sourceSpec.id)
52
54
  const activeRun = runs.find((r) => r.status === 'completed' || r.status === 'running')
53
55
  if (!activeRun) {
54
- const reason = `No active run found for plan "${dep.sourcePlanTitle}".`
56
+ const reason = `No active run found for plan "${sourceSpec.title}".`
55
57
  if (dep.triggerMode === 'block') {
56
58
  unresolved.push(dep)
57
59
  } else if (dep.triggerMode === 'notify') {
@@ -65,7 +67,7 @@ class PlanCoordinationService {
65
67
  const artifact = artifacts.find((a) => a.nodeId === dep.sourceNodeId && a.name === dep.artifactName)
66
68
 
67
69
  if (!artifact) {
68
- const reason = `Artifact "${dep.artifactName}" not found on node "${dep.sourceNodeId}" in plan "${dep.sourcePlanTitle}".`
70
+ const reason = `Artifact "${dep.artifactName}" not found on node "${dep.sourceNodeId}" in plan "${sourceSpec.title}".`
69
71
  if (dep.triggerMode === 'block') {
70
72
  unresolved.push(dep)
71
73
  } else if (dep.triggerMode === 'notify') {
@@ -76,7 +78,7 @@ class PlanCoordinationService {
76
78
  }
77
79
 
78
80
  if (dep.maxStalenessMs && this.isStale(artifact, dep.maxStalenessMs)) {
79
- const reason = `Artifact "${dep.artifactName}" from plan "${dep.sourcePlanTitle}" is stale.`
81
+ const reason = `Artifact "${dep.artifactName}" from plan "${sourceSpec.title}" is stale.`
80
82
  if (dep.triggerMode === 'block') {
81
83
  unresolved.push(dep)
82
84
  continue
@@ -102,23 +104,26 @@ class PlanCoordinationService {
102
104
 
103
105
  /**
104
106
  * Validate no circular dependencies exist using Kahn's algorithm.
105
- * Build adjacency: planTitle -> depends on planTitles
107
+ * Build adjacency: planSpecId -> depends on upstream planSpecIds
106
108
  * Run topological sort; if not all visited -> cycle exists.
107
109
  */
108
- validateNoCycles(specs: Array<{ title: string; dependencies?: PlanDependency[] }>): PlanValidationIssueInput[] {
110
+ validateNoCycles(
111
+ specs: Array<{ id: string; title?: string; dependencies?: PlanDependency[] }>,
112
+ ): PlanValidationIssueInput[] {
109
113
  const adj = new Map<string, Set<string>>()
110
114
  const inDegree = new Map<string, number>()
115
+ const labels = new Map(specs.map((spec) => [spec.id, spec.title ?? spec.id]))
111
116
 
112
117
  for (const spec of specs) {
113
- if (!adj.has(spec.title)) adj.set(spec.title, new Set())
114
- if (!inDegree.has(spec.title)) inDegree.set(spec.title, 0)
118
+ if (!adj.has(spec.id)) adj.set(spec.id, new Set())
119
+ if (!inDegree.has(spec.id)) inDegree.set(spec.id, 0)
115
120
 
116
121
  for (const dep of spec.dependencies ?? []) {
117
- if (!adj.has(dep.sourcePlanTitle)) adj.set(dep.sourcePlanTitle, new Set())
118
- if (!inDegree.has(dep.sourcePlanTitle)) inDegree.set(dep.sourcePlanTitle, 0)
122
+ if (!adj.has(dep.sourcePlanSpecId)) adj.set(dep.sourcePlanSpecId, new Set())
123
+ if (!inDegree.has(dep.sourcePlanSpecId)) inDegree.set(dep.sourcePlanSpecId, 0)
119
124
 
120
- adj.get(dep.sourcePlanTitle)?.add(spec.title)
121
- inDegree.set(spec.title, (inDegree.get(spec.title) ?? 0) + 1)
125
+ adj.get(dep.sourcePlanSpecId)?.add(spec.id)
126
+ inDegree.set(spec.id, (inDegree.get(spec.id) ?? 0) + 1)
122
127
  }
123
128
  }
124
129
 
@@ -136,14 +141,14 @@ class PlanCoordinationService {
136
141
  }
137
142
  }
138
143
 
139
- const unvisited = specs.filter((s) => !visited.has(s.title))
144
+ const unvisited = specs.filter((s) => !visited.has(s.id))
140
145
  if (unvisited.length === 0) return []
141
146
 
142
147
  return [
143
148
  {
144
149
  severity: 'blocking',
145
150
  code: 'circular_dependency',
146
- message: `Circular plan dependencies detected involving: ${unvisited.map((s) => s.title).join(', ')}`,
151
+ message: `Circular plan dependencies detected involving: ${unvisited.map((s) => labels.get(s.id) ?? s.id).join(', ')}`,
147
152
  },
148
153
  ]
149
154
  }