@geekbeer/minion 3.34.0 → 3.40.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/core/db/helpers.js +18 -0
  2. package/core/db/index.js +146 -0
  3. package/core/db/migrations/000_initial_schema.js +157 -0
  4. package/core/db/migrations/001_fts_trigram.js +78 -0
  5. package/core/db/migrations/002_emails_fts.js +41 -0
  6. package/core/db/migrations/003_memories_project_id.js +17 -0
  7. package/core/db/migrations/004_chat_sessions_workspace.js +18 -0
  8. package/core/db/migrations/005_todos_session_injection.js +19 -0
  9. package/core/db/migrations/006_daily_logs_workspace.js +69 -0
  10. package/core/db/migrations/007_workspace_scoping.js +29 -0
  11. package/core/db/migrations/008_todos_workspace.js +22 -0
  12. package/core/db/migrations/index.js +41 -0
  13. package/core/lib/config-warnings.js +16 -8
  14. package/core/lib/dag-step-poller.js +59 -16
  15. package/core/lib/end-of-day.js +30 -14
  16. package/core/lib/reflection-scheduler.js +23 -9
  17. package/core/lib/thread-watcher.js +3 -0
  18. package/core/routes/daily-logs.js +64 -27
  19. package/core/routes/routines.js +6 -2
  20. package/core/routes/skills.js +4 -0
  21. package/core/routes/todos.js +20 -7
  22. package/core/routes/workflows.js +17 -7
  23. package/core/stores/daily-log-store.js +61 -30
  24. package/core/stores/execution-store.js +40 -18
  25. package/core/stores/routine-store.js +32 -14
  26. package/core/stores/todo-store.js +37 -10
  27. package/core/stores/workflow-store.js +34 -13
  28. package/docs/api-reference.md +80 -36
  29. package/docs/task-guides.md +51 -4
  30. package/linux/routes/chat.js +16 -10
  31. package/linux/routes/directives.js +4 -0
  32. package/linux/routine-runner.js +24 -5
  33. package/linux/workflow-runner.js +38 -12
  34. package/package.json +4 -2
  35. package/rules/core.md +4 -0
  36. package/scripts/new-migration.js +53 -0
  37. package/win/routes/chat.js +16 -10
  38. package/win/routes/directives.js +4 -0
  39. package/win/routine-runner.js +2 -0
  40. package/win/workflow-runner.js +5 -3
  41. package/core/db.js +0 -583
@@ -2,32 +2,45 @@
2
2
  * Workflow Store (SQLite)
3
3
  * Persists workflow configurations to local SQLite database.
4
4
  * Allows minion to continue operating when HQ is offline.
5
+ *
6
+ * Each workflow is scoped by workspace_id (added in v3.38.0). '' means unassigned/legacy.
5
7
  */
6
8
 
7
9
  const crypto = require('crypto')
8
10
  const { getDb } = require('../db')
9
11
 
12
+ function normalizeWorkspaceId(workspaceId) {
13
+ return workspaceId == null ? '' : String(workspaceId)
14
+ }
15
+
10
16
  /**
11
- * Load all workflows
17
+ * Load workflows. If workspaceId is provided, filter to that workspace;
18
+ * otherwise returns all (used by the runner to load cron jobs across all workspaces).
19
+ * @param {string} [workspaceId] - Optional workspace filter
12
20
  * @returns {Array} Array of workflow objects
13
21
  */
14
- function load() {
22
+ function load(workspaceId) {
15
23
  const db = getDb()
16
- const rows = db.prepare('SELECT data FROM workflows ORDER BY name').all()
24
+ const rows = workspaceId !== undefined
25
+ ? db.prepare('SELECT data FROM workflows WHERE workspace_id = ? ORDER BY name').all(normalizeWorkspaceId(workspaceId))
26
+ : db.prepare('SELECT data FROM workflows ORDER BY name').all()
17
27
  return rows.map(r => JSON.parse(r.data))
18
28
  }
19
29
 
20
30
  /**
21
- * Save workflows (replace all)
31
+ * Save workflows (replace all). Each workflow's workspace_id is taken from
32
+ * `w.workspace_id` if present, else ''.
22
33
  * @param {Array} workflows - Array of workflow objects
23
34
  */
24
35
  function save(workflows) {
25
36
  const db = getDb()
26
37
  const tx = db.transaction(() => {
27
38
  db.prepare('DELETE FROM workflows').run()
28
- const insert = db.prepare('INSERT INTO workflows (id, name, data) VALUES (?, ?, ?)')
39
+ const insert = db.prepare('INSERT INTO workflows (id, name, workspace_id, data) VALUES (?, ?, ?, ?)')
29
40
  for (const w of workflows) {
30
- insert.run(w.id, w.name, JSON.stringify(w))
41
+ const wsId = normalizeWorkspaceId(w.workspace_id)
42
+ const stored = { ...w, workspace_id: wsId }
43
+ insert.run(w.id, w.name, wsId, JSON.stringify(stored))
31
44
  }
32
45
  })
33
46
  tx()
@@ -49,13 +62,14 @@ function updateLastRun(workflowId) {
49
62
  }
50
63
 
51
64
  /**
52
- * Find a workflow by name
65
+ * Find a workflow by name (cross-workspace; names are HQ-assigned slugs
66
+ * and are not guaranteed unique across workspaces, so first match wins).
53
67
  * @param {string} name - Workflow name (slug)
54
68
  * @returns {object|null} Workflow object or null
55
69
  */
56
70
  function findByName(name) {
57
71
  const db = getDb()
58
- const row = db.prepare('SELECT data FROM workflows WHERE name = ?').get(name)
72
+ const row = db.prepare('SELECT data FROM workflows WHERE name = ? ORDER BY name LIMIT 1').get(name)
59
73
  return row ? JSON.parse(row.data) : null
60
74
  }
61
75
 
@@ -63,11 +77,12 @@ function findByName(name) {
63
77
  * Upsert a workflow by name.
64
78
  * If exists: updates definition only (preserves schedule/local state).
65
79
  * If new: creates with inactive schedule.
66
- * @param {object} workflowData - { name, pipeline_skill_names, pipeline_steps, content, project_id }
67
- * @returns {Array} Updated workflows array
80
+ * @param {object} workflowData - { name, pipeline_skill_names, pipeline_steps, content, project_id, workspace_id }
81
+ * @returns {Array} All workflows (unfiltered)
68
82
  */
69
83
  function upsertByName(workflowData) {
70
84
  const db = getDb()
85
+ const wsId = normalizeWorkspaceId(workflowData.workspace_id)
71
86
  const existing = db.prepare('SELECT data FROM workflows WHERE name = ?').get(workflowData.name)
72
87
 
73
88
  if (existing) {
@@ -82,7 +97,12 @@ function upsertByName(workflowData) {
82
97
  if (workflowData.pipeline_steps !== undefined) {
83
98
  workflow.pipeline_steps = workflowData.pipeline_steps
84
99
  }
85
- db.prepare('UPDATE workflows SET data = ? WHERE id = ?').run(JSON.stringify(workflow), workflow.id)
100
+ if (workflowData.workspace_id !== undefined) {
101
+ workflow.workspace_id = wsId
102
+ }
103
+ db.prepare('UPDATE workflows SET workspace_id = ?, data = ? WHERE id = ?').run(
104
+ normalizeWorkspaceId(workflow.workspace_id), JSON.stringify(workflow), workflow.id
105
+ )
86
106
  } else {
87
107
  const workflow = {
88
108
  id: crypto.randomUUID(),
@@ -91,12 +111,13 @@ function upsertByName(workflowData) {
91
111
  pipeline_steps: workflowData.pipeline_steps || [],
92
112
  content: workflowData.content || '',
93
113
  project_id: workflowData.project_id || null,
114
+ workspace_id: wsId,
94
115
  cron_expression: '',
95
116
  is_active: false,
96
117
  last_run: null,
97
118
  }
98
- db.prepare('INSERT INTO workflows (id, name, data) VALUES (?, ?, ?)').run(
99
- workflow.id, workflow.name, JSON.stringify(workflow)
119
+ db.prepare('INSERT INTO workflows (id, name, workspace_id, data) VALUES (?, ?, ?, ?)').run(
120
+ workflow.id, workflow.name, wsId, JSON.stringify(workflow)
100
121
  )
101
122
  }
102
123
 
@@ -54,22 +54,40 @@ hq list workspaces
54
54
 
55
55
  ### Workflows
56
56
 
57
+ v3.38.0 以降、workflows はワークスペース単位でスコープされる。各workflowレコードは `workspace_id` を持ち、未指定は `""`(未所属/legacy)として扱われる。
58
+
57
59
  | Method | Endpoint | Description |
58
60
  |--------|----------|-------------|
59
- | GET | `/api/workflows` | List all local workflows with next_run |
60
- | POST | `/api/workflows` | Receive/upsert workflows (from HQ or local) |
61
+ | GET | `/api/workflows` | List local workflows with next_run. Optional `?workspace_id=` filter (omit for cross-workspace view) |
62
+ | POST | `/api/workflows` | Receive/upsert workflows (from HQ). Each incoming workflow should carry `workspace_id` |
61
63
  | POST | `/api/workflows/push/:name` | Push local workflow to HQ |
62
- | POST | `/api/workflows/fetch/:name` | Fetch workflow from HQ and deploy locally (+ pipeline skills) |
64
+ | POST | `/api/workflows/fetch/:name` | Fetch workflow from HQ and deploy locally (+ pipeline skills). HQ response includes `workspace_id` |
63
65
  | GET | `/api/workflows/remote` | List workflows on HQ |
64
66
  | DELETE | `/api/workflows/:id` | Remove a local workflow |
65
67
  | POST | `/api/workflows/trigger` | Manual trigger. Body: `{workflow_id}` |
66
68
 
69
+ ### Routines
70
+
71
+ v3.38.0 以降、routines はワークスペース単位でスコープされる。各routineレコードは `workspace_id` を持ち、未指定は `""`(未所属/legacy)として扱われる。
72
+
73
+ | Method | Endpoint | Description |
74
+ |--------|----------|-------------|
75
+ | GET | `/api/routines` | List local routines with next_run. Optional `?workspace_id=` filter (omit for cross-workspace view) |
76
+ | POST | `/api/routines` | Receive/upsert routines. Each incoming routine should carry `workspace_id` |
77
+ | POST | `/api/routines/sync` | Pull routines from HQ (HQ currently returns empty — routines are minion-local) |
78
+ | PUT | `/api/routines/:id/schedule` | Update cron/is_active |
79
+ | DELETE | `/api/routines/:id` | Remove a routine |
80
+ | POST | `/api/routines/bulk-toggle` | Set is_active for all routines |
81
+ | POST | `/api/routines/trigger` | Manual trigger. Body: `{routine_id}` or `{routine_name}` |
82
+
67
83
  ### Executions
68
84
 
85
+ v3.38.0 以降、executions は親 workflow / routine から `workspace_id` を継承する。
86
+
69
87
  | Method | Endpoint | Description |
70
88
  |--------|----------|-------------|
71
- | GET | `/api/executions` | List execution history (`?limit=50`, `?workflow_id=`) |
72
- | GET | `/api/executions/:id` | Get single execution |
89
+ | GET | `/api/executions` | List execution history (`?limit=50`, `?workflow_id=`, optional `?workspace_id=`) |
90
+ | GET | `/api/executions/:id` | Get single execution (includes `workspace_id`) |
73
91
  | GET | `/api/executions/:id/log` | Get execution log content (`?tail=N`) |
74
92
  | POST | `/api/executions/:id/outcome` | Report outcome (no auth). Body: `{outcome, summary?, details?}` |
75
93
 
@@ -176,22 +194,26 @@ Response (list):
176
194
 
177
195
  ### Daily Logs (Short-term Memory)
178
196
 
179
- Daily conversation summaries stored in SQLite (`$DATA_DIR/minion.db`).
180
- Generated via end-of-day processing or manual creation.
181
- Full-text search supported via FTS5.
197
+ Daily conversation summaries stored in SQLite (`$DATA_DIR/minion.db`), **scoped by workspace**.
198
+ One entry per `(workspace_id, date)` multiple workspaces can have distinct logs for the same day.
199
+ Generated via end-of-day processing or manual creation. Full-text search supported via FTS5.
200
+
201
+ All endpoints require a `workspace_id` identifying the scope. Pass an empty string `""` to address
202
+ legacy / unassigned logs created before workspace scoping (v3.37.0).
182
203
 
183
204
  | Method | Endpoint | Description |
184
205
  |--------|----------|-------------|
185
- | GET | `/api/daily-logs` | List all logs (date + size, newest first) |
186
- | GET | `/api/daily-logs?search=keyword` | Full-text search on log content (FTS5) |
187
- | POST | `/api/daily-logs` | Create a daily log. Body: `{date, content}` |
188
- | GET | `/api/daily-logs/:date` | Get a specific day's log content |
189
- | PUT | `/api/daily-logs/:date` | Update a daily log. Body: `{content}` |
190
- | DELETE | `/api/daily-logs/:date` | Delete a specific day's log |
206
+ | GET | `/api/daily-logs?workspace_id=<id>` | List logs for a workspace (date + size, newest first) |
207
+ | GET | `/api/daily-logs?workspace_id=<id>&search=keyword` | FTS5 search scoped to a workspace |
208
+ | POST | `/api/daily-logs` | Create a daily log. Body: `{workspace_id, date, content}` |
209
+ | GET | `/api/daily-logs/:date?workspace_id=<id>` | Get a specific day's log for a workspace |
210
+ | PUT | `/api/daily-logs/:date` | Update a daily log. Body: `{workspace_id, content}` |
211
+ | DELETE | `/api/daily-logs/:date?workspace_id=<id>` | Delete a specific day's log |
191
212
 
192
213
  POST body:
193
214
  ```json
194
215
  {
216
+ "workspace_id": "ws_abc123",
195
217
  "date": "2026-03-12",
196
218
  "content": "## 今日やったこと\n- Feature X を実装\n- Bug Y を修正"
197
219
  }
@@ -200,6 +222,7 @@ POST body:
200
222
  PUT body:
201
223
  ```json
202
224
  {
225
+ "workspace_id": "ws_abc123",
203
226
  "content": "## 今日やったこと\n- Feature X を実装(更新版)"
204
227
  }
205
228
  ```
@@ -219,16 +242,26 @@ Response (list):
219
242
 
220
243
  | Method | Endpoint | Description |
221
244
  |--------|----------|-------------|
222
- | POST | `/api/chat/end-of-day` | Generate daily log + extract memories from conversation |
245
+ | POST | `/api/chat/end-of-day` | Generate daily log for one workspace + extract memories from its conversation |
223
246
 
224
- Body: `{ "clear_session": false }` (optional, defaults to false)
247
+ Body:
248
+ ```json
249
+ {
250
+ "workspace_id": "ws_abc123",
251
+ "clear_session": false
252
+ }
253
+ ```
254
+
255
+ `workspace_id` is required. If no conversation exists for that workspace that day, a stub log
256
+ ("本日、このワークスペースでの会話はありませんでした。") is saved so idle days are still recorded.
225
257
 
226
258
  Response:
227
259
  ```json
228
260
  {
229
261
  "success": true,
230
262
  "daily_log": "2026-03-12",
231
- "memory_entries_added": 2
263
+ "memory_entries_added": 2,
264
+ "had_conversation": true
232
265
  }
233
266
  ```
234
267
 
@@ -255,16 +288,22 @@ curl -X PUT /api/config/env \
255
288
  The scheduler starts automatically on server boot if `REFLECTION_TIME` is configured.
256
289
  Changes via the config API take effect immediately (no restart required).
257
290
 
291
+ On each fire, the scheduler iterates all known workspaces (plus the `""` legacy bucket) and runs
292
+ end-of-day processing per workspace. Workspaces without conversation that day get a stub log so
293
+ the idle day is recorded.
294
+
258
295
  ### Todos
259
296
 
260
297
  ミニオンローカルのToDoリスト。SQLiteに永続化され、HQにも同期される。
261
298
 
299
+ v3.39.0 以降、todos はワークスペース単位でスコープされる。`POST /api/todos` 時に `workspace_id` が必須(未所属/legacy なら `""` を明示)。
300
+
262
301
  | Method | Endpoint | Description |
263
302
  |--------|----------|-------------|
264
- | GET | `/api/todos` | List todos. Query: `status`, `priority`, `project_id`, `source_type`, `session_id`, `limit` |
265
- | GET | `/api/todos/summary` | Status counts |
266
- | GET | `/api/todos/:id` | Get single todo |
267
- | POST | `/api/todos` | Create. Body: `{title, description?, priority?, source_type?, source_id?, project_id?, due_at?, session_id?, data?}` |
303
+ | GET | `/api/todos` | List todos. Query: `status`, `priority`, `project_id`, `source_type`, `session_id`, optional `workspace_id`, `limit` |
304
+ | GET | `/api/todos/summary` | Status counts. Optional `?workspace_id=` scopes the counts |
305
+ | GET | `/api/todos/:id` | Get single todo (includes `workspace_id`) |
306
+ | POST | `/api/todos` | Create. Body: `{title, workspace_id, description?, priority?, source_type?, source_id?, project_id?, due_at?, session_id?, data?}` — `workspace_id` is **required** |
268
307
  | PUT | `/api/todos/:id` | Update any field including `status` |
269
308
  | DELETE | `/api/todos/:id` | Delete |
270
309
 
@@ -276,10 +315,10 @@ Changes via the config API take effect immediately (no restart required).
276
315
 
277
316
  **セッション紐づけ例**:
278
317
  ```bash
279
- # 作成時にsession_idを指定すると、このセッションのチャットに自動で再掲される
318
+ # 作成時はworkspace_id必須(未所属/legacyなら"")。session_idを指定すると、このセッションのチャットに自動で再掲される
280
319
  curl -X POST -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/json" \
281
320
  http://localhost:8080/api/todos \
282
- -d '{"title": "レポートを保存", "session_id": "'$SESSION_ID'", "priority": "high"}'
321
+ -d '{"title": "レポートを保存", "workspace_id": "'$WORKSPACE_ID'", "session_id": "'$SESSION_ID'", "priority": "high"}'
283
322
 
284
323
  # 完了マーク(即座に更新すること)
285
324
  curl -X PUT -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/json" \
@@ -298,9 +337,11 @@ curl -X PUT -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/
298
337
 
299
338
  Allowed keys: `LLM_COMMAND`, `REFLECTION_TIME`
300
339
 
301
- ### LLM Plugins (opt-in)
340
+ ### LLM Plugins
302
341
 
303
- プラグイン方式の LLM 設定。`primary` を設定すると有効化される。未設定の場合は従来の `LLM_COMMAND` 経路で動作する。設定は `~/minion/llm/config.json` にファイルとして保存される (env var に依存しないため quote 破損バグの影響を受けない)。
342
+ プラグイン方式の LLM 設定。ワークフロー/ルーティン実行には `primary` の設定が必須。`primary` が未設定の場合、またはその名前が `enabled` に含まれていない場合はダッシュボードにエラーが表示される (heartbeat 経由)。設定は `~/minion/llm/config.json` にファイルとして保存される (env var に依存しないため quote 破損バグの影響を受けない)。
343
+
344
+ > **Note:** 旧 `LLM_COMMAND` 経路は obsolete。近々削除予定なので、新規ミニオンは必ず LLM プラグインを設定すること。
304
345
 
305
346
  | Method | Endpoint | Description |
306
347
  |--------|----------|-------------|
@@ -1110,7 +1151,7 @@ POST `/api/minion/dag-workflows/:id/publish` (body なし):
1110
1151
  | `fan_out` | `fan_out_source`, `template` | `template` は sub-graph `{ nodes, edges }` |
1111
1152
  | `join` | `join_mode`, `aggregation` | |
1112
1153
  | `conditional` | `condition_type`, `branches` or `default_branch` | |
1113
- | `transform` | `transform_instruction`, `assigned_role` | |
1154
+ | `transform` | `assigned_role`, incoming edge 1 本 + outgoing edge 1 本 (両方 `contract` 必須) | `transform_instruction` は optional hint。I/O 型は edge の contract から自動導出される |
1114
1155
 
1115
1156
  **review ノード追加の例:**
1116
1157
  ```json
@@ -1207,7 +1248,7 @@ POST `/api/minion/dag-workflows/:id/publish` (body なし):
1207
1248
  {
1208
1249
  description: string // contract の説明
1209
1250
  fields: [{
1210
- key: string // フィールド名
1251
+ name: string // フィールド名(※ "key" ではない)
1211
1252
  type: "string" | "number" | "boolean" | "url" | "array" | "object" // フィールド型
1212
1253
  description: string // フィールドの説明
1213
1254
  required?: boolean // 必須フラグ (省略時 false)
@@ -1216,6 +1257,8 @@ POST `/api/minion/dag-workflows/:id/publish` (body なし):
1216
1257
  }
1217
1258
  ```
1218
1259
 
1260
+ **⚠️ 重要: 各 field には必ず `name` プロパティを使用すること(`key` ではない)**。他のスキーマ言語(JSON Schema、OpenAPI 等)の慣習に引きずられて `key` と書かないように注意。構造バリデーションで弾かれる(400エラー)。
1261
+
1219
1262
  **注意:**
1220
1263
  - エッジに設定する `contract` は `graph.contracts` に存在する名前でなければならない(存在しない名前を指定すると 400 エラー)
1221
1264
  - **エッジが参照できる Contract は 1 つのみ**。`edge.contract` に配列を渡すことは不可(400エラー)。複数の型構造を束ねたい場合は、それらを束ねた複合Contractを1つ定義してから参照すること
@@ -1232,16 +1275,16 @@ POST `/api/minion/dag-workflows/:id/publish` (body なし):
1232
1275
  "Article": {
1233
1276
  "description": "個別の記事",
1234
1277
  "fields": [
1235
- { "key": "title", "type": "string", "required": true, "description": "タイトル" },
1236
- { "key": "url", "type": "url", "description": "記事URL" }
1278
+ { "name": "title", "type": "string", "required": true, "description": "タイトル" },
1279
+ { "name": "url", "type": "url", "description": "記事URL" }
1237
1280
  ]
1238
1281
  },
1239
1282
  "NewsCollection": {
1240
1283
  "description": "収集されたニュース全体",
1241
1284
  "fields": [
1242
- { "key": "articles", "type": "array", "items": "Article", "required": true, "description": "記事リスト" },
1243
- { "key": "collected_at", "type": "string", "required": true, "description": "収集日時" },
1244
- { "key": "count", "type": "number", "required": true, "description": "件数" }
1285
+ { "name": "articles", "type": "array", "items": "Article", "required": true, "description": "記事リスト" },
1286
+ { "name": "collected_at", "type": "string", "required": true, "description": "収集日時" },
1287
+ { "name": "count", "type": "number", "required": true, "description": "件数" }
1245
1288
  ]
1246
1289
  }
1247
1290
  },
@@ -1265,8 +1308,8 @@ POST `/api/minion/dag-workflows/:id/publish` (body なし):
1265
1308
  "prototype": {
1266
1309
  "description": "プロトタイプ成果物",
1267
1310
  "fields": [
1268
- { "key": "git_url", "type": "url", "description": "リポジトリURL", "required": true },
1269
- { "key": "preview_url", "type": "url", "description": "プレビューURL" }
1311
+ { "name": "git_url", "type": "url", "description": "リポジトリURL", "required": true },
1312
+ { "name": "preview_url", "type": "url", "description": "プレビューURL" }
1270
1313
  ]
1271
1314
  }
1272
1315
  }
@@ -1281,7 +1324,7 @@ POST `/api/minion/dag-workflows/:id/publish` (body なし):
1281
1324
  "contract": {
1282
1325
  "description": "プロトタイプ成果物",
1283
1326
  "fields": [
1284
- { "key": "git_url", "type": "url", "description": "リポジトリURL", "required": true }
1327
+ { "name": "git_url", "type": "url", "description": "リポジトリURL", "required": true }
1285
1328
  ]
1286
1329
  }
1287
1330
  }
@@ -1528,6 +1571,7 @@ Body:
1528
1571
  - `status: completed` で `requires_review` なノードはサーバ側で `review_status=review_pending` になりカスケードは停止(レビュー承認まで下流は生成されない)
1529
1572
  - `status: failed` でもカスケードは走る(fan-out join が `on_failure=ignore|collect` で集約できるため)
1530
1573
  - `output_data` は下流ノードの `input_data` に伝播する。**スキル実行時はスキル本文の「## Output Data」セクションの JSON コードブロックを抽出して `output_data` に載せる規約**(ミニオンの `dag-node-executor` がこの抽出を行う)
1574
+ - **Contract runtime validation**: 報告時に outgoing edge の `contract` で `output_data` が検証される。違反があれば HQ はノードを `failed` に書き換え、`contract_violations` カラムに構造化済み違反を保存し、`output_summary` に詳細を追記する。スキル・transform 共通。contract が貼られていない edge の先については検証スキップ
1531
1575
 
1532
1576
  Response:
1533
1577
  ```json
@@ -1589,7 +1633,7 @@ DAG ワークフローの graph は以下の構造で保存される(`dag_work
1589
1633
  | `start` | エントリポイント | ❌ (内部) |
1590
1634
  | `end` | 終端 | ❌ (内部) |
1591
1635
  | `skill` | スキル実行。`skill_version_id` と `assigned_role` が必須 | ✅ |
1592
- | `transform` | LLM によるデータ変換。`transform_instruction` が必須 | ✅ |
1636
+ | `transform` | contract 同士のブリッジ。I/O 型は incoming/outgoing edge の `contract` から自動導出、出力は HQ が contract validate。`transform_instruction` optional hint | ✅ |
1593
1637
  | `review` | レビューゲート。`approved` / `revision_requested` で分岐 | ❌ (内部) |
1594
1638
  | `fan_out` | 配列入力をテンプレートsub-graphに展開して並列実行。子が全て settle すると自ノードが completed に遷移 | ❌ (内部) |
1595
1639
  | `join` | N本の上流エッジを待ち合わせる汎用バリア。fan_out とは独立 | ❌ (内部) |
@@ -455,17 +455,64 @@ fan_out_source = ".items"
455
455
 
456
456
  ミニオンから見ると、fan-out 内の skill/transform ノードも通常どおり `pending-nodes` に返ってくる(`scope_path` が空でない点だけが違い)。
457
457
 
458
- ### Transform ノード
458
+ ### Transform ノード(contract 駆動)
459
459
 
460
- Transform ノードは LLM を使って input_data output_data に変換する軽量ノード。`transform_instruction` に自然言語で変換指示を書く。ミニオン側では `transform_instruction` を本文とする一時的なスキルを組み立てて実行する。
460
+ Transform ノードは **contract 同士のブリッジ** となる軽量ノード。入出力の shape は **incoming/outgoing edge に貼った contract から自動導出** され、LLM はその Output Contract に適合する JSON を生成する。HQ `node-complete` 報告時に output を contract で検証し、違反ならノードを failed にする。
461
461
 
462
+ **静的バリデーション要件:**
463
+
464
+ - incoming edge がちょうど 1 本で、**必ず contract を持つ**
465
+ - outgoing edge がちょうど 1 本で、**必ず contract を持つ**
466
+ - `transform_instruction` は **optional**(contract だけで意図が明確なら空欄で OK)
467
+ - `assigned_role` は必須
468
+
469
+ **典型用途: 「スキルの Markdown レポート出力 → 下流 contract の shape に整形」**
470
+
471
+ ```
472
+ incoming edge contract: compe-search-result (items: array<compe-item>, total_count, search_criteria)
473
+ outgoing edge contract: selected-items (items: array<compe-item>)
474
+
475
+ transform_instruction (optional): "上位5件のみを items に残してください"
476
+ input_data (upstream skill output): { "_raw": "# 検索レポート\n..." }
477
+ ↓ (LLM は Output Contract を見てプロンプト通りに整形)
478
+ output_data: { "items": [ {...}, {...}, {...}, {...}, {...} ] }
479
+ ↓ HQ が contract で検証、OK なら cascade 続行
462
480
  ```
463
- transform_instruction: "items 配列から title が 'Item B' のエントリを除外し、残りを返してください"
481
+
482
+ **典型用途: 配列フィルタ**
483
+
484
+ ```
485
+ transform_instruction: "items 配列から title が 'Item B' のエントリを除外"
464
486
  input_data: { "items": [{ "title": "Item A" }, { "title": "Item B" }, { "title": "Item C" }] }
465
- (LLM)
487
+
466
488
  output_data: { "items": [{ "title": "Item A" }, { "title": "Item C" }] }
467
489
  ```
468
490
 
491
+ ### DAG 構築時のノード選定フロー
492
+
493
+ スキルは汎用的で再利用可能な資産であり、ワークフロー固有の contract に合わせて SKILL.md を改修することは原則行わない。代わりに以下のフローで判断する:
494
+
495
+ 1. スキルノードの outgoing edge に contract を貼る
496
+ 2. そのスキルが contract に沿った `## Output Data` JSON ブロックを出力できるか確認
497
+ - **Yes**: そのまま接続。runtime validation が成功すれば cascade 続行
498
+ - **No**: スキルと下流ノードの間に **transform ノードを挟む**。 transform の outgoing edge に下流向け contract を貼り、incoming edge はスキルからの「実質 `_raw` だけ」の契約にする(あるいは contract を定義しない中間エッジとして置く構成)
499
+ 3. fan_out ノードの前では特に注意: incoming edge contract の `fan_out_source` が指すフィールドが `type='array'` で宣言されている必要がある(そうでなければ validator がエラーを出す)
500
+
501
+ ### 契約違反時の挙動
502
+
503
+ `node-complete` で outgoing edge の contract に `output_data` が適合しない場合:
504
+
505
+ - ノードは `status='failed'`, `outcome='failure'` として記録される
506
+ - `contract_violations` カラムに違反詳細(path / expected / actual / message)が JSON で保存される
507
+ - `output_summary` の末尾にも人間可読な違反メッセージが追記される
508
+ - 下流の join / fan-out は `on_failure` policy に従って進む(`ignore` / `collect` / `fail_all`)
509
+
510
+ **よくある違反:**
511
+
512
+ - skill が `## Output Data` セクションを出力しなかった → `{ _raw: "..." }` フォールバックとなり、required field がすべて未定義で failed
513
+ - skill 出力の型が contract と違う(`string` が required だが `number` が返った等)
514
+ - transform の出力 JSON が broken(JSON.parse 失敗→`_raw` → 同じく required 欠落)
515
+
469
516
  ### Review ノードとリビジョン
470
517
 
471
518
  Review ノードはレビューゲート。`review_status=review_pending` で下流カスケードが停止する。レビュアーが:
@@ -229,16 +229,21 @@ ${indexed}`
229
229
  return { success: true, carry_over: carryOver }
230
230
  })
231
231
 
232
- // POST /api/chat/end-of-day - Generate daily log + extract memories
232
+ // POST /api/chat/end-of-day - Generate daily log + extract memories for one workspace
233
233
  fastify.post('/api/chat/end-of-day', async (request, reply) => {
234
234
  if (!verifyToken(request)) {
235
235
  reply.code(401)
236
236
  return { success: false, error: 'Unauthorized' }
237
237
  }
238
238
 
239
- const { clear_session = false } = request.body || {}
239
+ const { workspace_id, clear_session = false } = request.body || {}
240
+ if (workspace_id !== '' && typeof workspace_id !== 'string') {
241
+ reply.code(400)
242
+ return { success: false, error: 'workspace_id is required' }
243
+ }
240
244
 
241
245
  const result = await runEndOfDay({
246
+ workspaceId: workspace_id,
242
247
  runQuickLlmCall,
243
248
  clearSession: clear_session,
244
249
  })
@@ -318,14 +323,14 @@ async function buildContextPrefix(message, context, sessionId, workspaceId) {
318
323
  '# メモリ詳細(IDを指定)',
319
324
  `curl -H "Authorization: Bearer $API_TOKEN" ${baseUrl}/api/memory/{id}`,
320
325
  '',
321
- '# デイリーログ検索',
322
- `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs?search=キーワード"`,
326
+ '# デイリーログ検索(workspace_idは「現在のワークスペース」のIDを指定。未所属なら空文字)',
327
+ `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs?workspace_id=現在のWSのID&search=キーワード"`,
323
328
  '',
324
- '# デイリーログ一覧',
325
- `curl -H "Authorization: Bearer $API_TOKEN" ${baseUrl}/api/daily-logs`,
329
+ '# デイリーログ一覧(現在のワークスペース)',
330
+ `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs?workspace_id=現在のWSのID"`,
326
331
  '',
327
332
  '# 特定日のデイリーログ取得',
328
- `curl -H "Authorization: Bearer $API_TOKEN" ${baseUrl}/api/daily-logs/YYYY-MM-DD`,
333
+ `curl -H "Authorization: Bearer $API_TOKEN" "${baseUrl}/api/daily-logs/YYYY-MM-DD?workspace_id=現在のWSのID"`,
329
334
  '```',
330
335
  '',
331
336
  '参照すべきタイミング:',
@@ -366,10 +371,10 @@ async function buildContextPrefix(message, context, sessionId, workspaceId) {
366
371
  '',
367
372
  'ToDo APIの使い方:',
368
373
  '```bash',
369
- '# ToDo作成(session_idは後でセッションIDが判明してから設定)',
374
+ '# ToDo作成(workspace_idは必須。現在のワークスペースのIDを指定。未所属の場合は空文字 "")',
370
375
  `curl -X POST http://localhost:${port}/api/todos \\`,
371
376
  ' -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/json" \\',
372
- ' -d \'{"title": "ステップの説明", "session_id": "SESSION_ID", "priority": "normal"}\'',
377
+ ' -d \'{"title": "ステップの説明", "workspace_id": "現在のWSのID", "session_id": "SESSION_ID", "priority": "normal"}\'',
373
378
  '',
374
379
  '# ToDo完了',
375
380
  `curl -X PUT http://localhost:${port}/api/todos/{id} \\`,
@@ -441,7 +446,8 @@ async function buildContextPrefix(message, context, sessionId, workspaceId) {
441
446
  ` hq dag remove-edge ${context.dagWorkflowId} <edge-id> # エッジ削除`,
442
447
  ` hq dag validate ${context.dagWorkflowId} # ドラフト検証(公開せず)`,
443
448
  ` hq publish dag-workflow ${context.dagWorkflowId} # 公開`,
444
- `Contract編集時の重要な規則: edge.contract は単一Contract名(string)のみ、配列不可。List<X> は type:"array" + items:"X" で表現。詳細は ~/.minion/docs/api-reference.md の「Contracts API」参照。`,
449
+ `Contract編集時の重要な規則: edge.contract は単一Contract名(string)のみ、配列不可。List<X> は type:"array" + items:"X" で表現。**contract.fields[] の各要素は { "name": "...", "type": "...", ... } の形式で、"key" ではなく "name" を使うこと**(JSON Schema等の慣習に引きずられないように)。詳細は ~/.minion/docs/api-reference.md の「Contracts API」参照。`,
450
+ `Contract はランタイムで強制される型: node-complete 時に outgoing edge の contract で output_data が検証され、違反はノード failed 扱い。transform は contract 同士のブリッジ(I/O 型は edge の contract から自動導出、transform_instruction は optional hint)。スキルが contract に沿った ## Output Data を出せない場合はスキルと下流の間に transform を挟んで整形すること。`,
445
451
  `graph JSON 全文PUTは非推奨: hq put dag-workflow ${context.dagWorkflowId} <body.json>`,
446
452
  `新規作成は: hq create dag-workflow <body.json>`,
447
453
  `プロジェクト内の DAG ワークフロー一覧: hq list dag-workflows ${context.projectId}`,
@@ -87,6 +87,7 @@ async function directiveRoutes(fastify) {
87
87
  const startedAt = new Date().toISOString()
88
88
  const logFile = logManager.getLogPath(effectiveExecutionId)
89
89
  const workflowName = context?.workflow_name || skill_name
90
+ const workspaceId = context?.workspace_id || ''
90
91
 
91
92
  // Save initial execution record
92
93
  await executionStore.save({
@@ -94,6 +95,7 @@ async function directiveRoutes(fastify) {
94
95
  skill_name,
95
96
  workflow_id: null,
96
97
  workflow_name: workflowName,
98
+ workspace_id: workspaceId,
97
99
  status: 'running',
98
100
  outcome: null,
99
101
  started_at: startedAt,
@@ -117,6 +119,7 @@ async function directiveRoutes(fastify) {
117
119
  id: effectiveExecutionId,
118
120
  name: workflowName,
119
121
  pipeline_skill_names: [skill_name],
122
+ workspace_id: workspaceId,
120
123
  }, { skipExecutionReport: true })
121
124
 
122
125
  console.log(`[Directive] Execution completed: ${skill_name} (success: ${result.execution_id ? 'yes' : 'no'})`)
@@ -127,6 +130,7 @@ async function directiveRoutes(fastify) {
127
130
  skill_name,
128
131
  workflow_id: null,
129
132
  workflow_name: workflowName,
133
+ workspace_id: workspaceId,
130
134
  status: 'failed',
131
135
  outcome: 'failure',
132
136
  started_at: startedAt,
@@ -128,10 +128,22 @@ async function executeRoutineSession(routine, executionId, skillNames) {
128
128
  throw new Error('No LLM configured. Set a Primary plugin via /api/llm/config or LLM_COMMAND in minion.env')
129
129
  }
130
130
 
131
- // Create tmux session with the LLM command.
132
- // PATH, HOME, DISPLAY, and minion secrets are already set in
133
- // process.env at server startup, so child processes inherit them automatically.
134
- // Per-execution identifiers are passed via -e flags for the session environment.
131
+ // Create the tmux session as an empty shell first, then configure
132
+ // remain-on-exit, then inject the command via send-keys. This avoids
133
+ // the race where a fast-failing LLM command tears down the session
134
+ // before set-option can run (see workflow-runner.js for the full
135
+ // explanation — the same fix applies here).
136
+ //
137
+ // Per-execution identifiers are passed via -e flags so the session
138
+ // shell inherits them; send-keys runs inside that shell.
139
+ const execScript = path.join(os.tmpdir(), `minion-routine-exec-${sessionName}.sh`)
140
+ await fs.writeFile(
141
+ execScript,
142
+ `#!/bin/bash\n${llmCommand}\necho $? > ${exitCodeFile}\n`,
143
+ 'utf-8',
144
+ )
145
+ await execAsync(`chmod +x "${execScript}"`)
146
+
135
147
  const tmuxCommand = [
136
148
  'tmux new-session -d',
137
149
  `-s "${sessionName}"`,
@@ -139,7 +151,6 @@ async function executeRoutineSession(routine, executionId, skillNames) {
139
151
  `-e "MINION_EXECUTION_ID=${executionId}"`,
140
152
  `-e "MINION_ROUTINE_ID=${routine.id}"`,
141
153
  `-e "MINION_ROUTINE_NAME=${routine.name.replace(/"/g, '\\"')}"`,
142
- `"${llmCommand}; echo $? > ${exitCodeFile}"`,
143
154
  ].join(' ')
144
155
 
145
156
  await execAsync(tmuxCommand, { cwd: homeDir })
@@ -155,6 +166,12 @@ async function executeRoutineSession(routine, executionId, skillNames) {
155
166
  console.error(`[RoutineRunner] Failed to set up pipe-pane: ${err.message}`)
156
167
  }
157
168
 
169
+ // Now that remain-on-exit and pipe-pane are in place, inject the
170
+ // actual command.
171
+ await execAsync(
172
+ `tmux send-keys -t "${sessionName}" "bash ${execScript}" Enter`,
173
+ )
174
+
158
175
  console.log(`[RoutineRunner] Started tmux session: ${sessionName}`)
159
176
 
160
177
  // Wait for session to complete (poll for exit code file)
@@ -250,6 +267,7 @@ async function runRoutine(routine) {
250
267
  skill_name: pipelineSkillNames.join(' → '),
251
268
  routine_id: routine.id,
252
269
  routine_name: routine.name,
270
+ workspace_id: routine.workspace_id || '',
253
271
  status: 'running',
254
272
  outcome: null,
255
273
  started_at: startedAt,
@@ -270,6 +288,7 @@ async function runRoutine(routine) {
270
288
  skill_name: pipelineSkillNames.join(' → '),
271
289
  routine_id: routine.id,
272
290
  routine_name: routine.name,
291
+ workspace_id: routine.workspace_id || '',
273
292
  status: result.success ? 'completed' : 'failed',
274
293
  outcome: result.success ? null : 'failure',
275
294
  started_at: startedAt,