@geekbeer/minion 3.34.0 → 3.40.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/core/db/helpers.js +18 -0
  2. package/core/db/index.js +146 -0
  3. package/core/db/migrations/000_initial_schema.js +157 -0
  4. package/core/db/migrations/001_fts_trigram.js +78 -0
  5. package/core/db/migrations/002_emails_fts.js +41 -0
  6. package/core/db/migrations/003_memories_project_id.js +17 -0
  7. package/core/db/migrations/004_chat_sessions_workspace.js +18 -0
  8. package/core/db/migrations/005_todos_session_injection.js +19 -0
  9. package/core/db/migrations/006_daily_logs_workspace.js +69 -0
  10. package/core/db/migrations/007_workspace_scoping.js +29 -0
  11. package/core/db/migrations/008_todos_workspace.js +22 -0
  12. package/core/db/migrations/index.js +41 -0
  13. package/core/lib/config-warnings.js +16 -8
  14. package/core/lib/dag-step-poller.js +59 -16
  15. package/core/lib/end-of-day.js +30 -14
  16. package/core/lib/reflection-scheduler.js +23 -9
  17. package/core/lib/thread-watcher.js +3 -0
  18. package/core/routes/daily-logs.js +64 -27
  19. package/core/routes/routines.js +6 -2
  20. package/core/routes/skills.js +4 -0
  21. package/core/routes/todos.js +20 -7
  22. package/core/routes/workflows.js +17 -7
  23. package/core/stores/daily-log-store.js +61 -30
  24. package/core/stores/execution-store.js +40 -18
  25. package/core/stores/routine-store.js +32 -14
  26. package/core/stores/todo-store.js +37 -10
  27. package/core/stores/workflow-store.js +34 -13
  28. package/docs/api-reference.md +80 -36
  29. package/docs/task-guides.md +51 -4
  30. package/linux/routes/chat.js +16 -10
  31. package/linux/routes/directives.js +4 -0
  32. package/linux/routine-runner.js +24 -5
  33. package/linux/workflow-runner.js +38 -12
  34. package/package.json +4 -2
  35. package/rules/core.md +4 -0
  36. package/scripts/new-migration.js +53 -0
  37. package/win/routes/chat.js +16 -10
  38. package/win/routes/directives.js +4 -0
  39. package/win/routine-runner.js +2 -0
  40. package/win/workflow-runner.js +5 -3
  41. package/core/db.js +0 -583
@@ -107,19 +107,19 @@ async function executeWorkflowSession(workflow, executionId, skillNames, options
107
107
  contractContext += '| Field | Type | Required | Description |\n|-------|------|----------|-------------|\n'
108
108
  for (const f of ic.contract.fields || []) {
109
109
  const typeDisplay = f.type === 'array' && f.items ? `array<${f.items}>` : f.type
110
- contractContext += `| ${f.key} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
110
+ contractContext += `| ${f.name} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
111
111
  }
112
112
  }
113
113
  contractContext += '\n'
114
114
  }
115
115
  if (options.dagOutputContracts && options.dagOutputContracts.length > 0) {
116
- contractContext += '## Output Contract\nYour output MUST conform to the following contract(s). Include all required fields in your execution report.\n'
116
+ contractContext += '## Output Contract (REQUIRED)\nEnd your execution report with a "## Output Data" section containing a single `json` code block whose content conforms **exactly** to the contract(s) below. HQ validates this JSON on completion; any missing required field or type mismatch fails the node.\n\nIf the skill\'s natural output does not match this shape, add a transform node upstream instead of reshaping inside the skill.\n\n'
117
117
  for (const oc of options.dagOutputContracts) {
118
118
  contractContext += `### ${oc.contract_name}\n${oc.contract.description || ''}\n`
119
119
  contractContext += '| Field | Type | Required | Description |\n|-------|------|----------|-------------|\n'
120
120
  for (const f of oc.contract.fields || []) {
121
121
  const typeDisplay = f.type === 'array' && f.items ? `array<${f.items}>` : f.type
122
- contractContext += `| ${f.key} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
122
+ contractContext += `| ${f.name} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
123
123
  }
124
124
  }
125
125
  contractContext += '\n'
@@ -178,17 +178,34 @@ async function executeWorkflowSession(workflow, executionId, skillNames, options
178
178
  throw new Error('No LLM configured. Set a Primary plugin via /api/llm/config or LLM_COMMAND in minion.env')
179
179
  }
180
180
 
181
- // Create tmux session with the LLM command.
181
+ // Create the tmux session as an empty shell first, then configure it,
182
+ // then inject the command via send-keys.
183
+ //
184
+ // Why two steps instead of `tmux new-session -d <cmd>`: if the LLM
185
+ // command exits very quickly (auth failure, missing binary, etc.) the
186
+ // session dies before we can call `set-option remain-on-exit on`, and
187
+ // the subsequent set-option/pipe-pane calls fail with "no such
188
+ // window". DAG transform nodes exposed this race because ephemeral
189
+ // skills start immediately and some failure modes (invalid prompt,
190
+ // missing LLM config) return instantly.
191
+ //
192
+ // Writing the invocation to a script file also insulates the command
193
+ // from shell-escaping edge cases when send-keys types it into the
194
+ // session.
195
+ const execScript = path.join(os.tmpdir(), `minion-workflow-exec-${sessionName}.sh`)
196
+ await fs.writeFile(
197
+ execScript,
198
+ `#!/bin/bash\n${llmCommand}\necho $? > ${exitCodeFile}\n`,
199
+ 'utf-8',
200
+ )
201
+ await execAsync(`chmod +x "${execScript}"`)
202
+
182
203
  // PATH, HOME, DISPLAY, and minion secrets are already set in
183
204
  // process.env at server startup, so child processes inherit them automatically.
184
- const tmuxCommand = [
185
- 'tmux new-session -d',
186
- `-s "${sessionName}"`,
187
- '-x 200 -y 50',
188
- `"${llmCommand}; echo $? > ${exitCodeFile}"`,
189
- ].join(' ')
190
-
191
- await execAsync(tmuxCommand, { cwd: homeDir })
205
+ await execAsync(
206
+ `tmux new-session -d -s "${sessionName}" -x 200 -y 50`,
207
+ { cwd: homeDir },
208
+ )
192
209
 
193
210
  // Keep session alive after command completes (for debugging via terminal mirror)
194
211
  await execAsync(`tmux set-option -t "${sessionName}" remain-on-exit on`)
@@ -203,6 +220,13 @@ async function executeWorkflowSession(workflow, executionId, skillNames, options
203
220
  // Continue execution even if pipe-pane fails
204
221
  }
205
222
 
223
+ // Now that remain-on-exit and pipe-pane are in place, inject the
224
+ // actual command. A fast-failing command will still be captured in the
225
+ // log and the exit code file will be written.
226
+ await execAsync(
227
+ `tmux send-keys -t "${sessionName}" "bash ${execScript}" Enter`,
228
+ )
229
+
206
230
  console.log(`[WorkflowRunner] Started tmux session: ${sessionName}`)
207
231
 
208
232
  // Wait for session to complete (poll for exit code file)
@@ -309,6 +333,7 @@ async function runWorkflow(workflow, options = {}) {
309
333
  skill_name: pipelineSkillNames.join(' → '),
310
334
  workflow_id: workflow.id,
311
335
  workflow_name: workflow.name,
336
+ workspace_id: workflow.workspace_id || '',
312
337
  status: 'running',
313
338
  outcome: null,
314
339
  started_at: startedAt,
@@ -331,6 +356,7 @@ async function runWorkflow(workflow, options = {}) {
331
356
  skill_name: pipelineSkillNames.join(' → '),
332
357
  workflow_id: workflow.id,
333
358
  workflow_name: workflow.name,
359
+ workspace_id: workflow.workspace_id || '',
334
360
  status: result.success ? 'completed' : 'failed',
335
361
  outcome,
336
362
  started_at: startedAt,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@geekbeer/minion",
3
- "version": "3.34.0",
3
+ "version": "3.40.1",
4
4
  "description": "AI Agent runtime for Minion - manages status and skill deployment on VPS",
5
5
  "main": "linux/server.js",
6
6
  "bin": {
@@ -19,12 +19,14 @@
19
19
  "roles/",
20
20
  "docs/",
21
21
  "settings/",
22
+ "scripts/",
22
23
  ".env.example"
23
24
  ],
24
25
  "scripts": {
25
26
  "start": "node linux/server.js",
26
27
  "start:win": "node win/server.js",
27
- "postinstall": "node postinstall.js"
28
+ "postinstall": "node postinstall.js",
29
+ "db:migration:new": "node scripts/new-migration.js"
28
30
  },
29
31
  "dependencies": {
30
32
  "croner": "^9.0.0",
package/rules/core.md CHANGED
@@ -32,6 +32,9 @@ Minion
32
32
  - ノード/エッジ/contract の追加・更新は**個別API** (`/nodes` `/edges` `/contracts`) を使うこと。`PUT /dag-workflows/:id` による graph 全文PUTは型の取り違えが起きやすく、バリデーションエラーで 400 が返る
33
33
  - `edge.contract` は**単一のContract名(string)**のみ。配列は不可。複数の型構造を束ねたい場合は、それらを内包する複合Contractを1つ定義する
34
34
  - Contract内で `List<別Contract>` を表現するには `type: 'array'` + `items: "別Contract名"` を使う(詳細は `~/.minion/docs/api-reference.md` の「Contracts API」を参照)
35
+ - **Contract はランタイムで強制される型定義**。`node-complete` 報告時に outgoing edge の contract で `output_data` が検証され、違反はノード `failed` 扱い。スキルが contract に沿った `## Output Data` を出せない場合は **transform ノードをスキルと下流の間に挟んで整形**すること。スキル側の SKILL.md を各ワークフロー専用に改修するのは原則 NG(スキルは汎用資産)
36
+ - **transform ノードの I/O 型は edge の contract から自動導出**。incoming edge と outgoing edge にそれぞれ contract を必ず貼ること。`transform_instruction` は contract だけで意図が伝わらない場合の補足ヒント(任意)
37
+ - **fan_out の incoming edge に contract を貼る場合**、`fan_out_source` が指すフィールドが contract 内に `type='array'` として宣言されている必要がある(静的検証で弾かれる)
35
38
  - **Routine**: ミニオンスコープ。ミニオンローカルの定期タスク。
36
39
  - **Workspace**: ミニオンは複数のワークスペースに所属でき、スキルやプロジェクトはワークスペース単位でスコープされる。チャットセッションもワークスペース別に分離される。所属ワークスペースはハートビートで自動同期され、`hq list workspaces` で確認できる。
37
40
  - ミニオンは複数プロジェクトに `pm`、`engineer`、`accountant` として参加できる。
@@ -215,6 +218,7 @@ Routine 実行中は以下もtmuxセッション環境で利用可能:
215
218
  - **Todoは「1往復で完了する粒度」に分解して登録する。** 大きいタスクは複数のTodoに分ける。粒度を小さく保てば、完了マークを圧縮に奪われにくい。
216
219
  - **完了したら即座に done にマークする。** まとめて更新しない。`PUT /api/todos/:id` で `status=done`。
217
220
  - **チャットセッション内で作成するTodoには必ず `session_id` を含める。** プロンプト冒頭の `[現在のチャットセッションID]` の値を使う。紐づいた未完了Todoは次ターン以降に自動で再掲される(圧縮を跨いでも失われない)。
221
+ - **`POST /api/todos` は `workspace_id` が必須(v3.39.0〜)。** プロンプト冒頭の `[現在のワークスペース]` のID値を使う。未所属の場合は空文字 `""` を明示的に渡す。workspace_id無しのリクエストは400エラーになる。
218
222
  - **再掲されたTodoを見たら、着手前に「既に完了していないか」を確認する。** 完了済みなら done に更新、未完なら続行。
219
223
  - 同一Todoが規定回数以上再掲されても未完了のままの場合、再掲は自動停止する。進展しないTodoはブロッカーとして起票するか手動で `cancelled` にすること。
220
224
 
@@ -0,0 +1,53 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Scaffold a new DB migration file with a timestamp prefix.
4
+ *
5
+ * Usage: npm run db:migration:new <snake_case_name>
6
+ */
7
+
8
+ const fs = require('fs')
9
+ const path = require('path')
10
+
11
+ const name = process.argv[2]
12
+ if (!name) {
13
+ console.error('Usage: npm run db:migration:new <snake_case_name>')
14
+ process.exit(1)
15
+ }
16
+
17
+ if (!/^[a-z][a-z0-9_]*$/.test(name)) {
18
+ console.error(`Invalid name "${name}". Use snake_case (lowercase letters, digits, underscores).`)
19
+ process.exit(1)
20
+ }
21
+
22
+ const now = new Date()
23
+ const pad = n => String(n).padStart(2, '0')
24
+ const ts = `${now.getUTCFullYear()}${pad(now.getUTCMonth() + 1)}${pad(now.getUTCDate())}${pad(now.getUTCHours())}${pad(now.getUTCMinutes())}${pad(now.getUTCSeconds())}`
25
+ const version = Number(ts)
26
+ const filename = `${ts}_${name}.js`
27
+ const filepath = path.join(__dirname, '..', 'core', 'db', 'migrations', filename)
28
+
29
+ if (fs.existsSync(filepath)) {
30
+ console.error(`File already exists: ${filepath}`)
31
+ process.exit(1)
32
+ }
33
+
34
+ const template = `/**
35
+ * TODO: describe what this migration does and why.
36
+ */
37
+
38
+ module.exports = {
39
+ version: ${version},
40
+ name: '${name}',
41
+
42
+ up(db, { hasColumn, tableExists }) {
43
+ // Add an idempotent guard so re-runs (e.g., after a partial failure) are safe:
44
+ // if (hasColumn(db, 'some_table', 'some_column')) return
45
+ //
46
+ // Then apply the change:
47
+ // db.exec(\`ALTER TABLE some_table ADD COLUMN some_column TEXT\`)
48
+ },
49
+ }
50
+ `
51
+
52
+ fs.writeFileSync(filepath, template)
53
+ console.log(`Created ${path.relative(process.cwd(), filepath)}`)
@@ -299,16 +299,21 @@ ${indexed}`
299
299
  return { success: true, carry_over: carryOver }
300
300
  })
301
301
 
302
- // POST /api/chat/end-of-day - Generate daily log + extract memories
302
+ // POST /api/chat/end-of-day - Generate daily log + extract memories for one workspace
303
303
  fastify.post('/api/chat/end-of-day', async (request, reply) => {
304
304
  if (!verifyToken(request)) {
305
305
  reply.code(401)
306
306
  return { success: false, error: 'Unauthorized' }
307
307
  }
308
308
 
309
- const { clear_session = false } = request.body || {}
309
+ const { workspace_id, clear_session = false } = request.body || {}
310
+ if (workspace_id !== '' && typeof workspace_id !== 'string') {
311
+ reply.code(400)
312
+ return { success: false, error: 'workspace_id is required' }
313
+ }
310
314
 
311
315
  const result = await runEndOfDay({
316
+ workspaceId: workspace_id,
312
317
  runQuickLlmCall,
313
318
  clearSession: clear_session,
314
319
  })
@@ -381,14 +386,14 @@ async function buildContextPrefix(message, context, sessionId, workspaceId) {
381
386
  '# メモリ詳細(IDを指定)',
382
387
  `curl -H "Authorization: Bearer $API_TOKEN" ${baseUrl}/api/memory/{id}`,
383
388
  '',
384
- '# デイリーログ検索',
385
- `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs?search=キーワード"`,
389
+ '# デイリーログ検索(workspace_idは「現在のワークスペース」のIDを指定。未所属なら空文字)',
390
+ `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs?workspace_id=現在のWSのID&search=キーワード"`,
386
391
  '',
387
- '# デイリーログ一覧',
388
- `curl -H "Authorization: Bearer $API_TOKEN" ${baseUrl}/api/daily-logs`,
392
+ '# デイリーログ一覧(現在のワークスペース)',
393
+ `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs?workspace_id=現在のWSのID"`,
389
394
  '',
390
395
  '# 特定日のデイリーログ取得',
391
- `curl -H "Authorization: Bearer $API_TOKEN" ${baseUrl}/api/daily-logs/YYYY-MM-DD`,
396
+ `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs/YYYY-MM-DD?workspace_id=現在のWSのID"`,
392
397
  '```',
393
398
  '',
394
399
  '参照すべきタイミング:',
@@ -429,10 +434,10 @@ async function buildContextPrefix(message, context, sessionId, workspaceId) {
429
434
  '',
430
435
  'ToDo APIの使い方:',
431
436
  '```bash',
432
- '# ToDo作成(session_idは後でセッションIDが判明してから設定)',
437
+ '# ToDo作成(workspace_idは必須。現在のワークスペースのIDを指定。未所属の場合は空文字 "")',
433
438
  `curl -X POST http://localhost:${port}/api/todos \\`,
434
439
  ' -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/json" \\',
435
- ' -d \'{"title": "ステップの説明", "session_id": "SESSION_ID", "priority": "normal"}\'',
440
+ ' -d \'{"title": "ステップの説明", "workspace_id": "現在のWSのID", "session_id": "SESSION_ID", "priority": "normal"}\'',
436
441
  '',
437
442
  '# ToDo完了',
438
443
  `curl -X PUT http://localhost:${port}/api/todos/{id} \\`,
@@ -502,7 +507,8 @@ async function buildContextPrefix(message, context, sessionId, workspaceId) {
502
507
  ` hq dag remove-edge ${context.dagWorkflowId} <edge-id> # エッジ削除`,
503
508
  ` hq dag validate ${context.dagWorkflowId} # ドラフト検証(公開せず)`,
504
509
  ` hq publish dag-workflow ${context.dagWorkflowId} # 公開`,
505
- `Contract編集時の重要な規則: edge.contract は単一Contract名(string)のみ、配列不可。List<X> は type:"array" + items:"X" で表現。詳細は ~/.minion/docs/api-reference.md の「Contracts API」参照。`,
510
+ `Contract編集時の重要な規則: edge.contract は単一Contract名(string)のみ、配列不可。List<X> は type:"array" + items:"X" で表現。**contract.fields[] の各要素は { "name": "...", "type": "...", ... } の形式で、"key" ではなく "name" を使うこと**(JSON Schema等の慣習に引きずられないように)。詳細は ~/.minion/docs/api-reference.md の「Contracts API」参照。`,
511
+ `Contract はランタイムで強制される型: node-complete 時に outgoing edge の contract で output_data が検証され、違反はノード failed 扱い。transform は contract 同士のブリッジ(I/O 型は edge の contract から自動導出、transform_instruction は optional hint)。スキルが contract に沿った ## Output Data を出せない場合はスキルと下流の間に transform を挟んで整形すること。`,
506
512
  `graph JSON 全文PUTは非推奨: hq put dag-workflow ${context.dagWorkflowId} <body.json>`,
507
513
  `新規作成は: hq create dag-workflow <body.json>`,
508
514
  `プロジェクト内の DAG ワークフロー一覧: hq list dag-workflows ${context.projectId}`,
@@ -67,12 +67,14 @@ async function directiveRoutes(fastify) {
67
67
  const startedAt = new Date().toISOString()
68
68
  const logFile = logManager.getLogPath(effectiveExecutionId)
69
69
  const workflowName = context?.workflow_name || skill_name
70
+ const workspaceId = context?.workspace_id || ''
70
71
 
71
72
  await executionStore.save({
72
73
  id: effectiveExecutionId,
73
74
  skill_name,
74
75
  workflow_id: null,
75
76
  workflow_name: workflowName,
77
+ workspace_id: workspaceId,
76
78
  status: 'running',
77
79
  outcome: null,
78
80
  started_at: startedAt,
@@ -91,6 +93,7 @@ async function directiveRoutes(fastify) {
91
93
  id: effectiveExecutionId,
92
94
  name: workflowName,
93
95
  pipeline_skill_names: [skill_name],
96
+ workspace_id: workspaceId,
94
97
  }, { skipExecutionReport: true })
95
98
 
96
99
  console.log(`[Directive] Execution completed: ${skill_name} (success: ${result.execution_id ? 'yes' : 'no'})`)
@@ -101,6 +104,7 @@ async function directiveRoutes(fastify) {
101
104
  skill_name,
102
105
  workflow_id: null,
103
106
  workflow_name: workflowName,
107
+ workspace_id: workspaceId,
104
108
  status: 'failed',
105
109
  outcome: 'failure',
106
110
  started_at: startedAt,
@@ -208,6 +208,7 @@ async function runRoutine(routine) {
208
208
  skill_name: pipelineSkillNames.join(' -> '),
209
209
  routine_id: routine.id,
210
210
  routine_name: routine.name,
211
+ workspace_id: routine.workspace_id || '',
211
212
  status: 'running',
212
213
  outcome: null,
213
214
  started_at: startedAt,
@@ -225,6 +226,7 @@ async function runRoutine(routine) {
225
226
  skill_name: pipelineSkillNames.join(' -> '),
226
227
  routine_id: routine.id,
227
228
  routine_name: routine.name,
229
+ workspace_id: routine.workspace_id || '',
228
230
  status: result.success ? 'completed' : 'failed',
229
231
  outcome: result.success ? null : 'failure',
230
232
  started_at: startedAt,
@@ -114,19 +114,19 @@ async function executeWorkflowSession(workflow, executionId, skillNames, options
114
114
  contractContext += '| Field | Type | Required | Description |\n|-------|------|----------|-------------|\n'
115
115
  for (const f of ic.contract.fields || []) {
116
116
  const typeDisplay = f.type === 'array' && f.items ? `array<${f.items}>` : f.type
117
- contractContext += `| ${f.key} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
117
+ contractContext += `| ${f.name} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
118
118
  }
119
119
  }
120
120
  contractContext += '\n'
121
121
  }
122
122
  if (options.dagOutputContracts && options.dagOutputContracts.length > 0) {
123
- contractContext += '## Output Contract\nYour output MUST conform to the following contract(s). Include all required fields in your execution report.\n'
123
+ contractContext += '## Output Contract (REQUIRED)\nEnd your execution report with a "## Output Data" section containing a single `json` code block whose content conforms **exactly** to the contract(s) below. HQ validates this JSON on completion; any missing required field or type mismatch fails the node.\n\nIf the skill\'s natural output does not match this shape, add a transform node upstream instead of reshaping inside the skill.\n\n'
124
124
  for (const oc of options.dagOutputContracts) {
125
125
  contractContext += `### ${oc.contract_name}\n${oc.contract.description || ''}\n`
126
126
  contractContext += '| Field | Type | Required | Description |\n|-------|------|----------|-------------|\n'
127
127
  for (const f of oc.contract.fields || []) {
128
128
  const typeDisplay = f.type === 'array' && f.items ? `array<${f.items}>` : f.type
129
- contractContext += `| ${f.key} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
129
+ contractContext += `| ${f.name} | ${typeDisplay} | ${f.required ? 'Yes' : 'No'} | ${f.description || ''} |\n`
130
130
  }
131
131
  }
132
132
  contractContext += '\n'
@@ -303,6 +303,7 @@ async function runWorkflow(workflow, options = {}) {
303
303
  skill_name: pipelineSkillNames.join(' -> '),
304
304
  workflow_id: workflow.id,
305
305
  workflow_name: workflow.name,
306
+ workspace_id: workflow.workspace_id || '',
306
307
  status: 'running',
307
308
  outcome: null,
308
309
  started_at: startedAt,
@@ -322,6 +323,7 @@ async function runWorkflow(workflow, options = {}) {
322
323
  skill_name: pipelineSkillNames.join(' -> '),
323
324
  workflow_id: workflow.id,
324
325
  workflow_name: workflow.name,
326
+ workspace_id: workflow.workspace_id || '',
325
327
  status: result.success ? 'completed' : 'failed',
326
328
  outcome,
327
329
  started_at: startedAt,