foundry-mcp 0.8.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of foundry-mcp might be problematic. Click here for more details.
- foundry_mcp/__init__.py +13 -0
- foundry_mcp/cli/__init__.py +67 -0
- foundry_mcp/cli/__main__.py +9 -0
- foundry_mcp/cli/agent.py +96 -0
- foundry_mcp/cli/commands/__init__.py +37 -0
- foundry_mcp/cli/commands/cache.py +137 -0
- foundry_mcp/cli/commands/dashboard.py +148 -0
- foundry_mcp/cli/commands/dev.py +446 -0
- foundry_mcp/cli/commands/journal.py +377 -0
- foundry_mcp/cli/commands/lifecycle.py +274 -0
- foundry_mcp/cli/commands/modify.py +824 -0
- foundry_mcp/cli/commands/plan.py +640 -0
- foundry_mcp/cli/commands/pr.py +393 -0
- foundry_mcp/cli/commands/review.py +667 -0
- foundry_mcp/cli/commands/session.py +472 -0
- foundry_mcp/cli/commands/specs.py +686 -0
- foundry_mcp/cli/commands/tasks.py +807 -0
- foundry_mcp/cli/commands/testing.py +676 -0
- foundry_mcp/cli/commands/validate.py +982 -0
- foundry_mcp/cli/config.py +98 -0
- foundry_mcp/cli/context.py +298 -0
- foundry_mcp/cli/logging.py +212 -0
- foundry_mcp/cli/main.py +44 -0
- foundry_mcp/cli/output.py +122 -0
- foundry_mcp/cli/registry.py +110 -0
- foundry_mcp/cli/resilience.py +178 -0
- foundry_mcp/cli/transcript.py +217 -0
- foundry_mcp/config.py +1454 -0
- foundry_mcp/core/__init__.py +144 -0
- foundry_mcp/core/ai_consultation.py +1773 -0
- foundry_mcp/core/batch_operations.py +1202 -0
- foundry_mcp/core/cache.py +195 -0
- foundry_mcp/core/capabilities.py +446 -0
- foundry_mcp/core/concurrency.py +898 -0
- foundry_mcp/core/context.py +540 -0
- foundry_mcp/core/discovery.py +1603 -0
- foundry_mcp/core/error_collection.py +728 -0
- foundry_mcp/core/error_store.py +592 -0
- foundry_mcp/core/health.py +749 -0
- foundry_mcp/core/intake.py +933 -0
- foundry_mcp/core/journal.py +700 -0
- foundry_mcp/core/lifecycle.py +412 -0
- foundry_mcp/core/llm_config.py +1376 -0
- foundry_mcp/core/llm_patterns.py +510 -0
- foundry_mcp/core/llm_provider.py +1569 -0
- foundry_mcp/core/logging_config.py +374 -0
- foundry_mcp/core/metrics_persistence.py +584 -0
- foundry_mcp/core/metrics_registry.py +327 -0
- foundry_mcp/core/metrics_store.py +641 -0
- foundry_mcp/core/modifications.py +224 -0
- foundry_mcp/core/naming.py +146 -0
- foundry_mcp/core/observability.py +1216 -0
- foundry_mcp/core/otel.py +452 -0
- foundry_mcp/core/otel_stubs.py +264 -0
- foundry_mcp/core/pagination.py +255 -0
- foundry_mcp/core/progress.py +387 -0
- foundry_mcp/core/prometheus.py +564 -0
- foundry_mcp/core/prompts/__init__.py +464 -0
- foundry_mcp/core/prompts/fidelity_review.py +691 -0
- foundry_mcp/core/prompts/markdown_plan_review.py +515 -0
- foundry_mcp/core/prompts/plan_review.py +627 -0
- foundry_mcp/core/providers/__init__.py +237 -0
- foundry_mcp/core/providers/base.py +515 -0
- foundry_mcp/core/providers/claude.py +472 -0
- foundry_mcp/core/providers/codex.py +637 -0
- foundry_mcp/core/providers/cursor_agent.py +630 -0
- foundry_mcp/core/providers/detectors.py +515 -0
- foundry_mcp/core/providers/gemini.py +426 -0
- foundry_mcp/core/providers/opencode.py +718 -0
- foundry_mcp/core/providers/opencode_wrapper.js +308 -0
- foundry_mcp/core/providers/package-lock.json +24 -0
- foundry_mcp/core/providers/package.json +25 -0
- foundry_mcp/core/providers/registry.py +607 -0
- foundry_mcp/core/providers/test_provider.py +171 -0
- foundry_mcp/core/providers/validation.py +857 -0
- foundry_mcp/core/rate_limit.py +427 -0
- foundry_mcp/core/research/__init__.py +68 -0
- foundry_mcp/core/research/memory.py +528 -0
- foundry_mcp/core/research/models.py +1234 -0
- foundry_mcp/core/research/providers/__init__.py +40 -0
- foundry_mcp/core/research/providers/base.py +242 -0
- foundry_mcp/core/research/providers/google.py +507 -0
- foundry_mcp/core/research/providers/perplexity.py +442 -0
- foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
- foundry_mcp/core/research/providers/tavily.py +383 -0
- foundry_mcp/core/research/workflows/__init__.py +25 -0
- foundry_mcp/core/research/workflows/base.py +298 -0
- foundry_mcp/core/research/workflows/chat.py +271 -0
- foundry_mcp/core/research/workflows/consensus.py +539 -0
- foundry_mcp/core/research/workflows/deep_research.py +4142 -0
- foundry_mcp/core/research/workflows/ideate.py +682 -0
- foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
- foundry_mcp/core/resilience.py +600 -0
- foundry_mcp/core/responses.py +1624 -0
- foundry_mcp/core/review.py +366 -0
- foundry_mcp/core/security.py +438 -0
- foundry_mcp/core/spec.py +4119 -0
- foundry_mcp/core/task.py +2463 -0
- foundry_mcp/core/testing.py +839 -0
- foundry_mcp/core/validation.py +2357 -0
- foundry_mcp/dashboard/__init__.py +32 -0
- foundry_mcp/dashboard/app.py +119 -0
- foundry_mcp/dashboard/components/__init__.py +17 -0
- foundry_mcp/dashboard/components/cards.py +88 -0
- foundry_mcp/dashboard/components/charts.py +177 -0
- foundry_mcp/dashboard/components/filters.py +136 -0
- foundry_mcp/dashboard/components/tables.py +195 -0
- foundry_mcp/dashboard/data/__init__.py +11 -0
- foundry_mcp/dashboard/data/stores.py +433 -0
- foundry_mcp/dashboard/launcher.py +300 -0
- foundry_mcp/dashboard/views/__init__.py +12 -0
- foundry_mcp/dashboard/views/errors.py +217 -0
- foundry_mcp/dashboard/views/metrics.py +164 -0
- foundry_mcp/dashboard/views/overview.py +96 -0
- foundry_mcp/dashboard/views/providers.py +83 -0
- foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
- foundry_mcp/dashboard/views/tool_usage.py +139 -0
- foundry_mcp/prompts/__init__.py +9 -0
- foundry_mcp/prompts/workflows.py +525 -0
- foundry_mcp/resources/__init__.py +9 -0
- foundry_mcp/resources/specs.py +591 -0
- foundry_mcp/schemas/__init__.py +38 -0
- foundry_mcp/schemas/intake-schema.json +89 -0
- foundry_mcp/schemas/sdd-spec-schema.json +414 -0
- foundry_mcp/server.py +150 -0
- foundry_mcp/tools/__init__.py +10 -0
- foundry_mcp/tools/unified/__init__.py +92 -0
- foundry_mcp/tools/unified/authoring.py +3620 -0
- foundry_mcp/tools/unified/context_helpers.py +98 -0
- foundry_mcp/tools/unified/documentation_helpers.py +268 -0
- foundry_mcp/tools/unified/environment.py +1341 -0
- foundry_mcp/tools/unified/error.py +479 -0
- foundry_mcp/tools/unified/health.py +225 -0
- foundry_mcp/tools/unified/journal.py +841 -0
- foundry_mcp/tools/unified/lifecycle.py +640 -0
- foundry_mcp/tools/unified/metrics.py +777 -0
- foundry_mcp/tools/unified/plan.py +876 -0
- foundry_mcp/tools/unified/pr.py +294 -0
- foundry_mcp/tools/unified/provider.py +589 -0
- foundry_mcp/tools/unified/research.py +1283 -0
- foundry_mcp/tools/unified/review.py +1042 -0
- foundry_mcp/tools/unified/review_helpers.py +314 -0
- foundry_mcp/tools/unified/router.py +102 -0
- foundry_mcp/tools/unified/server.py +565 -0
- foundry_mcp/tools/unified/spec.py +1283 -0
- foundry_mcp/tools/unified/task.py +3846 -0
- foundry_mcp/tools/unified/test.py +431 -0
- foundry_mcp/tools/unified/verification.py +520 -0
- foundry_mcp-0.8.22.dist-info/METADATA +344 -0
- foundry_mcp-0.8.22.dist-info/RECORD +153 -0
- foundry_mcp-0.8.22.dist-info/WHEEL +4 -0
- foundry_mcp-0.8.22.dist-info/entry_points.txt +3 -0
- foundry_mcp-0.8.22.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,841 @@
|
|
|
1
|
+
"""Unified journal tool family with action routing and validation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from dataclasses import asdict, dataclass
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Dict, Mapping, Optional, Tuple, cast
|
|
9
|
+
|
|
10
|
+
from mcp.server.fastmcp import FastMCP
|
|
11
|
+
|
|
12
|
+
from foundry_mcp.config import ServerConfig
|
|
13
|
+
from foundry_mcp.core.journal import (
|
|
14
|
+
add_journal_entry,
|
|
15
|
+
find_unjournaled_tasks,
|
|
16
|
+
get_journal_entries,
|
|
17
|
+
)
|
|
18
|
+
from foundry_mcp.core.naming import canonical_tool
|
|
19
|
+
from foundry_mcp.core.pagination import (
|
|
20
|
+
CursorError,
|
|
21
|
+
decode_cursor,
|
|
22
|
+
encode_cursor,
|
|
23
|
+
normalize_page_size,
|
|
24
|
+
)
|
|
25
|
+
from foundry_mcp.core.responses import (
|
|
26
|
+
ErrorCode,
|
|
27
|
+
ErrorType,
|
|
28
|
+
error_response,
|
|
29
|
+
success_response,
|
|
30
|
+
)
|
|
31
|
+
from foundry_mcp.core.spec import find_specs_directory, load_spec, save_spec
|
|
32
|
+
from foundry_mcp.tools.unified.router import (
|
|
33
|
+
ActionDefinition,
|
|
34
|
+
ActionRouter,
|
|
35
|
+
ActionRouterError,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
logger = logging.getLogger(__name__)
|
|
39
|
+
|
|
40
|
+
_ALLOWED_ENTRY_TYPES = (
|
|
41
|
+
"status_change",
|
|
42
|
+
"deviation",
|
|
43
|
+
"blocker",
|
|
44
|
+
"decision",
|
|
45
|
+
"note",
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@dataclass
|
|
50
|
+
class JournalAddInput:
|
|
51
|
+
spec_id: str
|
|
52
|
+
title: str
|
|
53
|
+
content: str
|
|
54
|
+
entry_type: str
|
|
55
|
+
task_id: Optional[str]
|
|
56
|
+
workspace: Optional[str]
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass
|
|
60
|
+
class JournalListInput:
|
|
61
|
+
spec_id: str
|
|
62
|
+
task_id: Optional[str]
|
|
63
|
+
entry_type: Optional[str]
|
|
64
|
+
cursor: Optional[str]
|
|
65
|
+
limit: Optional[int]
|
|
66
|
+
workspace: Optional[str]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataclass
|
|
70
|
+
class JournalListUnjournaledInput:
|
|
71
|
+
spec_id: str
|
|
72
|
+
cursor: Optional[str]
|
|
73
|
+
limit: Optional[int]
|
|
74
|
+
workspace: Optional[str]
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
_ACTION_SUMMARY = {
|
|
78
|
+
"add": "Add a journal entry to a specification",
|
|
79
|
+
"list": "List journal entries with pagination",
|
|
80
|
+
"list-unjournaled": "List completed tasks missing journal entries",
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _validation_error(
|
|
85
|
+
field: str,
|
|
86
|
+
action: str,
|
|
87
|
+
message: str,
|
|
88
|
+
*,
|
|
89
|
+
code: ErrorCode = ErrorCode.INVALID_FORMAT,
|
|
90
|
+
remediation: Optional[str] = None,
|
|
91
|
+
) -> dict:
|
|
92
|
+
return asdict(
|
|
93
|
+
error_response(
|
|
94
|
+
f"Invalid field '{field}' for journal.{action}: {message}",
|
|
95
|
+
error_code=code,
|
|
96
|
+
error_type=ErrorType.VALIDATION,
|
|
97
|
+
remediation=remediation,
|
|
98
|
+
details={"field": field, "action": f"journal.{action}"},
|
|
99
|
+
)
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _missing_field(field: str, action: str) -> dict:
|
|
104
|
+
return _validation_error(
|
|
105
|
+
field,
|
|
106
|
+
action,
|
|
107
|
+
"Value is required",
|
|
108
|
+
code=ErrorCode.MISSING_REQUIRED,
|
|
109
|
+
remediation=f"Provide '{field}' when calling journal.{action}",
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _resolve_specs_dir(
|
|
114
|
+
config: ServerConfig, workspace: Optional[str]
|
|
115
|
+
) -> Tuple[Optional[Path], Optional[dict]]:
|
|
116
|
+
try:
|
|
117
|
+
specs_dir: Optional[Path] = (
|
|
118
|
+
find_specs_directory(workspace)
|
|
119
|
+
if workspace
|
|
120
|
+
else (config.specs_dir or find_specs_directory())
|
|
121
|
+
)
|
|
122
|
+
except Exception as exc: # pragma: no cover - defensive guard
|
|
123
|
+
logger.exception("Failed to resolve specs directory")
|
|
124
|
+
return None, asdict(
|
|
125
|
+
error_response(
|
|
126
|
+
f"Failed to resolve specs directory: {exc}",
|
|
127
|
+
error_code=ErrorCode.INTERNAL_ERROR,
|
|
128
|
+
error_type=ErrorType.INTERNAL,
|
|
129
|
+
remediation="Verify specs_dir configuration or pass a workspace path",
|
|
130
|
+
)
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
if not specs_dir:
|
|
134
|
+
return None, asdict(
|
|
135
|
+
error_response(
|
|
136
|
+
"No specs directory found",
|
|
137
|
+
error_code=ErrorCode.NOT_FOUND,
|
|
138
|
+
error_type=ErrorType.NOT_FOUND,
|
|
139
|
+
remediation="Set SDD_SPECS_DIR or provide a workspace path",
|
|
140
|
+
)
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
return specs_dir, None
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _load_spec_data(
|
|
147
|
+
*, spec_id: str, specs_dir: Path, action: str
|
|
148
|
+
) -> Tuple[Optional[Dict[str, Any]], Optional[dict]]:
|
|
149
|
+
try:
|
|
150
|
+
spec_data = load_spec(spec_id, specs_dir)
|
|
151
|
+
except Exception as exc: # pragma: no cover - defensive guard
|
|
152
|
+
logger.exception("Failed to load spec %s", spec_id)
|
|
153
|
+
return None, asdict(
|
|
154
|
+
error_response(
|
|
155
|
+
f"Failed to load spec '{spec_id}': {exc}",
|
|
156
|
+
error_code=ErrorCode.INTERNAL_ERROR,
|
|
157
|
+
error_type=ErrorType.INTERNAL,
|
|
158
|
+
remediation="Verify the spec file is accessible",
|
|
159
|
+
)
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
if not spec_data:
|
|
163
|
+
return None, asdict(
|
|
164
|
+
error_response(
|
|
165
|
+
f"Specification '{spec_id}' not found",
|
|
166
|
+
error_code=ErrorCode.SPEC_NOT_FOUND,
|
|
167
|
+
error_type=ErrorType.NOT_FOUND,
|
|
168
|
+
remediation='Run spec(action="list") to verify the spec ID',
|
|
169
|
+
details={"spec_id": spec_id, "action": f"journal.{action}"},
|
|
170
|
+
)
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
return cast(Dict[str, Any], spec_data), None
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def _persist_spec(
|
|
177
|
+
*, spec_id: str, spec_data: Dict[str, Any], specs_dir: Path
|
|
178
|
+
) -> Optional[dict]:
|
|
179
|
+
try:
|
|
180
|
+
if not save_spec(spec_id, spec_data, specs_dir):
|
|
181
|
+
return asdict(
|
|
182
|
+
error_response(
|
|
183
|
+
f"Failed to save spec '{spec_id}'",
|
|
184
|
+
error_code=ErrorCode.INTERNAL_ERROR,
|
|
185
|
+
error_type=ErrorType.INTERNAL,
|
|
186
|
+
remediation="Check filesystem permissions and retry",
|
|
187
|
+
)
|
|
188
|
+
)
|
|
189
|
+
except Exception as exc: # pragma: no cover - defensive guard
|
|
190
|
+
logger.exception("Failed to persist spec %s", spec_id)
|
|
191
|
+
return asdict(
|
|
192
|
+
error_response(
|
|
193
|
+
f"Failed to save spec '{spec_id}': {exc}",
|
|
194
|
+
error_code=ErrorCode.INTERNAL_ERROR,
|
|
195
|
+
error_type=ErrorType.INTERNAL,
|
|
196
|
+
remediation="Check filesystem permissions and retry",
|
|
197
|
+
)
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
return None
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def _validate_string(
|
|
204
|
+
value: Any,
|
|
205
|
+
*,
|
|
206
|
+
field: str,
|
|
207
|
+
action: str,
|
|
208
|
+
required: bool = False,
|
|
209
|
+
allow_empty: bool = False,
|
|
210
|
+
) -> Tuple[Optional[str], Optional[dict]]:
|
|
211
|
+
if value is None:
|
|
212
|
+
if required:
|
|
213
|
+
return None, _missing_field(field, action)
|
|
214
|
+
return None, None
|
|
215
|
+
|
|
216
|
+
if not isinstance(value, str):
|
|
217
|
+
return None, _validation_error(field, action, "Expected a string value")
|
|
218
|
+
|
|
219
|
+
if not allow_empty and not value.strip():
|
|
220
|
+
return None, _validation_error(
|
|
221
|
+
field, action, "Value must be a non-empty string"
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
return value, None
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _validate_entry_type(value: Any, *, action: str) -> Tuple[str, Optional[dict]]:
|
|
228
|
+
entry_type, error = _validate_string(
|
|
229
|
+
value, field="entry_type", action=action, required=False
|
|
230
|
+
)
|
|
231
|
+
if error:
|
|
232
|
+
return "", error
|
|
233
|
+
|
|
234
|
+
normalized = (entry_type or "note").strip()
|
|
235
|
+
if normalized not in _ALLOWED_ENTRY_TYPES:
|
|
236
|
+
allowed = ", ".join(_ALLOWED_ENTRY_TYPES)
|
|
237
|
+
return "", _validation_error(
|
|
238
|
+
"entry_type",
|
|
239
|
+
action,
|
|
240
|
+
f"Must be one of: {allowed}",
|
|
241
|
+
remediation=f"Provide one of: {allowed}",
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
return normalized, None
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def _validate_limit(
|
|
248
|
+
value: Any,
|
|
249
|
+
*,
|
|
250
|
+
field: str,
|
|
251
|
+
action: str,
|
|
252
|
+
) -> Tuple[Optional[int], Optional[dict]]:
|
|
253
|
+
if value is None:
|
|
254
|
+
return None, None
|
|
255
|
+
|
|
256
|
+
if isinstance(value, bool) or not isinstance(value, int):
|
|
257
|
+
return None, _validation_error(field, action, "Expected an integer value")
|
|
258
|
+
|
|
259
|
+
if value <= 0:
|
|
260
|
+
return None, _validation_error(
|
|
261
|
+
field,
|
|
262
|
+
action,
|
|
263
|
+
"Value must be greater than zero",
|
|
264
|
+
remediation="Provide a positive integer",
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
return value, None
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def _validate_cursor(
|
|
271
|
+
value: Any,
|
|
272
|
+
*,
|
|
273
|
+
field: str,
|
|
274
|
+
action: str,
|
|
275
|
+
) -> Tuple[Optional[str], Optional[dict]]:
|
|
276
|
+
if value is None:
|
|
277
|
+
return None, None
|
|
278
|
+
|
|
279
|
+
if not isinstance(value, str) or not value.strip():
|
|
280
|
+
return None, _validation_error(
|
|
281
|
+
field,
|
|
282
|
+
action,
|
|
283
|
+
"Cursor must be a non-empty string",
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
return value, None
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def _validate_add_payload(
|
|
290
|
+
payload: Mapping[str, Any],
|
|
291
|
+
) -> Tuple[Optional[JournalAddInput], Optional[dict]]:
|
|
292
|
+
action = "add"
|
|
293
|
+
spec_id, error = _validate_string(
|
|
294
|
+
payload.get("spec_id"), field="spec_id", action=action, required=True
|
|
295
|
+
)
|
|
296
|
+
if error:
|
|
297
|
+
return None, error
|
|
298
|
+
if spec_id is None:
|
|
299
|
+
return None, _missing_field("spec_id", action)
|
|
300
|
+
spec_id = cast(str, spec_id)
|
|
301
|
+
|
|
302
|
+
title, error = _validate_string(
|
|
303
|
+
payload.get("title"), field="title", action=action, required=True
|
|
304
|
+
)
|
|
305
|
+
if error:
|
|
306
|
+
return None, error
|
|
307
|
+
if title is None:
|
|
308
|
+
return None, _missing_field("title", action)
|
|
309
|
+
title = cast(str, title)
|
|
310
|
+
|
|
311
|
+
content, error = _validate_string(
|
|
312
|
+
payload.get("content"), field="content", action=action, required=True
|
|
313
|
+
)
|
|
314
|
+
if error:
|
|
315
|
+
return None, error
|
|
316
|
+
if content is None:
|
|
317
|
+
return None, _missing_field("content", action)
|
|
318
|
+
content = cast(str, content)
|
|
319
|
+
|
|
320
|
+
entry_type, error = _validate_entry_type(payload.get("entry_type"), action=action)
|
|
321
|
+
if error:
|
|
322
|
+
return None, error
|
|
323
|
+
|
|
324
|
+
task_id, error = _validate_string(
|
|
325
|
+
payload.get("task_id"),
|
|
326
|
+
field="task_id",
|
|
327
|
+
action=action,
|
|
328
|
+
required=False,
|
|
329
|
+
allow_empty=False,
|
|
330
|
+
)
|
|
331
|
+
if error:
|
|
332
|
+
return None, error
|
|
333
|
+
|
|
334
|
+
workspace, error = _validate_string(
|
|
335
|
+
payload.get("workspace"),
|
|
336
|
+
field="workspace",
|
|
337
|
+
action=action,
|
|
338
|
+
required=False,
|
|
339
|
+
allow_empty=False,
|
|
340
|
+
)
|
|
341
|
+
if error:
|
|
342
|
+
return None, error
|
|
343
|
+
|
|
344
|
+
return (
|
|
345
|
+
JournalAddInput(
|
|
346
|
+
spec_id=spec_id,
|
|
347
|
+
title=title,
|
|
348
|
+
content=content,
|
|
349
|
+
entry_type=entry_type,
|
|
350
|
+
task_id=task_id,
|
|
351
|
+
workspace=workspace,
|
|
352
|
+
),
|
|
353
|
+
None,
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
def _validate_list_payload(
|
|
358
|
+
payload: Mapping[str, Any],
|
|
359
|
+
) -> Tuple[Optional[JournalListInput], Optional[dict]]:
|
|
360
|
+
action = "list"
|
|
361
|
+
spec_id, error = _validate_string(
|
|
362
|
+
payload.get("spec_id"), field="spec_id", action=action, required=True
|
|
363
|
+
)
|
|
364
|
+
if error:
|
|
365
|
+
return None, error
|
|
366
|
+
if spec_id is None:
|
|
367
|
+
return None, _missing_field("spec_id", action)
|
|
368
|
+
|
|
369
|
+
task_id, error = _validate_string(
|
|
370
|
+
payload.get("task_id"), field="task_id", action=action, required=False
|
|
371
|
+
)
|
|
372
|
+
if error:
|
|
373
|
+
return None, error
|
|
374
|
+
|
|
375
|
+
entry_type_raw = payload.get("entry_type")
|
|
376
|
+
entry_type = None
|
|
377
|
+
if entry_type_raw is not None:
|
|
378
|
+
entry_type, error = _validate_entry_type(entry_type_raw, action=action)
|
|
379
|
+
if error:
|
|
380
|
+
return None, error
|
|
381
|
+
|
|
382
|
+
limit, error = _validate_limit(payload.get("limit"), field="limit", action=action)
|
|
383
|
+
if error:
|
|
384
|
+
return None, error
|
|
385
|
+
|
|
386
|
+
cursor, error = _validate_cursor(
|
|
387
|
+
payload.get("cursor"), field="cursor", action=action
|
|
388
|
+
)
|
|
389
|
+
if error:
|
|
390
|
+
return None, error
|
|
391
|
+
|
|
392
|
+
workspace, error = _validate_string(
|
|
393
|
+
payload.get("workspace"), field="workspace", action=action, required=False
|
|
394
|
+
)
|
|
395
|
+
if error:
|
|
396
|
+
return None, error
|
|
397
|
+
|
|
398
|
+
return (
|
|
399
|
+
JournalListInput(
|
|
400
|
+
spec_id=spec_id,
|
|
401
|
+
task_id=task_id,
|
|
402
|
+
entry_type=entry_type,
|
|
403
|
+
cursor=cursor,
|
|
404
|
+
limit=limit,
|
|
405
|
+
workspace=workspace,
|
|
406
|
+
),
|
|
407
|
+
None,
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
def _validate_list_unjournaled_payload(
|
|
412
|
+
payload: Mapping[str, Any],
|
|
413
|
+
) -> Tuple[Optional[JournalListUnjournaledInput], Optional[dict]]:
|
|
414
|
+
action = "list-unjournaled"
|
|
415
|
+
spec_id, error = _validate_string(
|
|
416
|
+
payload.get("spec_id"), field="spec_id", action=action, required=True
|
|
417
|
+
)
|
|
418
|
+
if error:
|
|
419
|
+
return None, error
|
|
420
|
+
if spec_id is None:
|
|
421
|
+
return None, _missing_field("spec_id", action)
|
|
422
|
+
spec_id = cast(str, spec_id)
|
|
423
|
+
|
|
424
|
+
limit, error = _validate_limit(payload.get("limit"), field="limit", action=action)
|
|
425
|
+
if error:
|
|
426
|
+
return None, error
|
|
427
|
+
|
|
428
|
+
cursor, error = _validate_cursor(
|
|
429
|
+
payload.get("cursor"), field="cursor", action=action
|
|
430
|
+
)
|
|
431
|
+
if error:
|
|
432
|
+
return None, error
|
|
433
|
+
|
|
434
|
+
workspace, error = _validate_string(
|
|
435
|
+
payload.get("workspace"), field="workspace", action=action, required=False
|
|
436
|
+
)
|
|
437
|
+
if error:
|
|
438
|
+
return None, error
|
|
439
|
+
|
|
440
|
+
return (
|
|
441
|
+
JournalListUnjournaledInput(
|
|
442
|
+
spec_id=spec_id,
|
|
443
|
+
cursor=cursor,
|
|
444
|
+
limit=limit,
|
|
445
|
+
workspace=workspace,
|
|
446
|
+
),
|
|
447
|
+
None,
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
def _serialize_entry(entry: Any) -> Dict[str, Any]:
|
|
452
|
+
return {
|
|
453
|
+
"timestamp": getattr(entry, "timestamp", None),
|
|
454
|
+
"entry_type": getattr(entry, "entry_type", None),
|
|
455
|
+
"title": getattr(entry, "title", None),
|
|
456
|
+
"content": getattr(entry, "content", None),
|
|
457
|
+
"author": getattr(entry, "author", None),
|
|
458
|
+
"task_id": getattr(entry, "task_id", None),
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def perform_journal_add(
|
|
463
|
+
*,
|
|
464
|
+
config: ServerConfig,
|
|
465
|
+
spec_id: str,
|
|
466
|
+
title: str,
|
|
467
|
+
content: str,
|
|
468
|
+
entry_type: str,
|
|
469
|
+
task_id: Optional[str],
|
|
470
|
+
workspace: Optional[str],
|
|
471
|
+
) -> dict:
|
|
472
|
+
specs_dir, error = _resolve_specs_dir(config, workspace)
|
|
473
|
+
if error:
|
|
474
|
+
return error
|
|
475
|
+
assert specs_dir is not None
|
|
476
|
+
|
|
477
|
+
spec_data, error = _load_spec_data(
|
|
478
|
+
spec_id=spec_id, specs_dir=specs_dir, action="add"
|
|
479
|
+
)
|
|
480
|
+
if error:
|
|
481
|
+
return error
|
|
482
|
+
assert spec_data is not None
|
|
483
|
+
|
|
484
|
+
try:
|
|
485
|
+
entry = add_journal_entry(
|
|
486
|
+
spec_data,
|
|
487
|
+
title=title,
|
|
488
|
+
content=content,
|
|
489
|
+
entry_type=entry_type,
|
|
490
|
+
task_id=task_id,
|
|
491
|
+
author="foundry-mcp",
|
|
492
|
+
)
|
|
493
|
+
except Exception as exc: # pragma: no cover - defensive guard
|
|
494
|
+
logger.exception("Error adding journal entry for %s", spec_id)
|
|
495
|
+
return asdict(
|
|
496
|
+
error_response(
|
|
497
|
+
f"Failed to add journal entry: {exc}",
|
|
498
|
+
error_code=ErrorCode.INTERNAL_ERROR,
|
|
499
|
+
error_type=ErrorType.INTERNAL,
|
|
500
|
+
remediation="Check spec contents and retry",
|
|
501
|
+
)
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
error = _persist_spec(spec_id=spec_id, spec_data=spec_data, specs_dir=specs_dir)
|
|
505
|
+
if error:
|
|
506
|
+
return error
|
|
507
|
+
|
|
508
|
+
data = {
|
|
509
|
+
"spec_id": spec_id,
|
|
510
|
+
"entry": {
|
|
511
|
+
"timestamp": entry.timestamp,
|
|
512
|
+
"entry_type": entry.entry_type,
|
|
513
|
+
"title": entry.title,
|
|
514
|
+
"task_id": entry.task_id,
|
|
515
|
+
},
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
return asdict(success_response(data=data))
|
|
519
|
+
|
|
520
|
+
|
|
521
|
+
def perform_journal_list(
|
|
522
|
+
*,
|
|
523
|
+
config: ServerConfig,
|
|
524
|
+
spec_id: str,
|
|
525
|
+
task_id: Optional[str],
|
|
526
|
+
entry_type: Optional[str],
|
|
527
|
+
cursor: Optional[str],
|
|
528
|
+
limit: Optional[int],
|
|
529
|
+
workspace: Optional[str],
|
|
530
|
+
) -> dict:
|
|
531
|
+
specs_dir, error = _resolve_specs_dir(config, workspace)
|
|
532
|
+
if error:
|
|
533
|
+
return error
|
|
534
|
+
assert specs_dir is not None
|
|
535
|
+
|
|
536
|
+
spec_data, error = _load_spec_data(
|
|
537
|
+
spec_id=spec_id, specs_dir=specs_dir, action="list"
|
|
538
|
+
)
|
|
539
|
+
if error:
|
|
540
|
+
return error
|
|
541
|
+
assert spec_data is not None
|
|
542
|
+
|
|
543
|
+
page_size = normalize_page_size(limit)
|
|
544
|
+
start_after_ts = None
|
|
545
|
+
if cursor:
|
|
546
|
+
try:
|
|
547
|
+
decoded = decode_cursor(cursor)
|
|
548
|
+
start_after_ts = decoded.get("last_ts")
|
|
549
|
+
except CursorError as exc:
|
|
550
|
+
return asdict(
|
|
551
|
+
error_response(
|
|
552
|
+
f"Invalid cursor: {exc}",
|
|
553
|
+
error_code=ErrorCode.INVALID_FORMAT,
|
|
554
|
+
error_type=ErrorType.VALIDATION,
|
|
555
|
+
remediation="Use the cursor returned by the previous response",
|
|
556
|
+
)
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
try:
|
|
560
|
+
entries = get_journal_entries(
|
|
561
|
+
spec_data,
|
|
562
|
+
task_id=task_id,
|
|
563
|
+
entry_type=entry_type,
|
|
564
|
+
limit=None,
|
|
565
|
+
)
|
|
566
|
+
except Exception as exc: # pragma: no cover - defensive guard
|
|
567
|
+
logger.exception("Error retrieving journal entries for %s", spec_id)
|
|
568
|
+
return asdict(
|
|
569
|
+
error_response(
|
|
570
|
+
f"Failed to fetch journal entries: {exc}",
|
|
571
|
+
error_code=ErrorCode.INTERNAL_ERROR,
|
|
572
|
+
error_type=ErrorType.INTERNAL,
|
|
573
|
+
)
|
|
574
|
+
)
|
|
575
|
+
|
|
576
|
+
entries.sort(key=lambda e: getattr(e, "timestamp", ""), reverse=True)
|
|
577
|
+
|
|
578
|
+
if start_after_ts:
|
|
579
|
+
start_index = 0
|
|
580
|
+
for idx, entry in enumerate(entries):
|
|
581
|
+
if getattr(entry, "timestamp", None) == start_after_ts:
|
|
582
|
+
start_index = idx + 1
|
|
583
|
+
break
|
|
584
|
+
entries = entries[start_index:]
|
|
585
|
+
|
|
586
|
+
page_entries = entries[: page_size + 1]
|
|
587
|
+
has_more = len(page_entries) > page_size
|
|
588
|
+
if has_more:
|
|
589
|
+
page_entries = page_entries[:page_size]
|
|
590
|
+
|
|
591
|
+
next_cursor = None
|
|
592
|
+
if has_more and page_entries:
|
|
593
|
+
next_cursor = encode_cursor(
|
|
594
|
+
{"last_ts": getattr(page_entries[-1], "timestamp", None)}
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
warnings = None
|
|
598
|
+
if has_more:
|
|
599
|
+
warnings = [
|
|
600
|
+
f"Results truncated after {page_size} entries. Use the returned cursor to continue."
|
|
601
|
+
]
|
|
602
|
+
|
|
603
|
+
data = {
|
|
604
|
+
"spec_id": spec_id,
|
|
605
|
+
"count": len(page_entries),
|
|
606
|
+
"entries": [_serialize_entry(entry) for entry in page_entries],
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
pagination = {
|
|
610
|
+
"cursor": next_cursor,
|
|
611
|
+
"has_more": has_more,
|
|
612
|
+
"page_size": page_size,
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
return asdict(
|
|
616
|
+
success_response(
|
|
617
|
+
data=data,
|
|
618
|
+
pagination=pagination,
|
|
619
|
+
warnings=warnings,
|
|
620
|
+
)
|
|
621
|
+
)
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
def perform_journal_list_unjournaled(
|
|
625
|
+
*,
|
|
626
|
+
config: ServerConfig,
|
|
627
|
+
spec_id: str,
|
|
628
|
+
cursor: Optional[str],
|
|
629
|
+
limit: Optional[int],
|
|
630
|
+
workspace: Optional[str],
|
|
631
|
+
) -> dict:
|
|
632
|
+
specs_dir, error = _resolve_specs_dir(config, workspace)
|
|
633
|
+
if error:
|
|
634
|
+
return error
|
|
635
|
+
assert specs_dir is not None
|
|
636
|
+
|
|
637
|
+
spec_data, error = _load_spec_data(
|
|
638
|
+
spec_id=spec_id, specs_dir=specs_dir, action="list-unjournaled"
|
|
639
|
+
)
|
|
640
|
+
if error:
|
|
641
|
+
return error
|
|
642
|
+
assert spec_data is not None
|
|
643
|
+
|
|
644
|
+
page_size = normalize_page_size(limit)
|
|
645
|
+
start_after_id = None
|
|
646
|
+
if cursor:
|
|
647
|
+
try:
|
|
648
|
+
decoded = decode_cursor(cursor)
|
|
649
|
+
start_after_id = decoded.get("last_id")
|
|
650
|
+
except CursorError as exc:
|
|
651
|
+
return asdict(
|
|
652
|
+
error_response(
|
|
653
|
+
f"Invalid cursor: {exc}",
|
|
654
|
+
error_code=ErrorCode.INVALID_FORMAT,
|
|
655
|
+
error_type=ErrorType.VALIDATION,
|
|
656
|
+
remediation="Use the cursor returned by the previous response",
|
|
657
|
+
)
|
|
658
|
+
)
|
|
659
|
+
|
|
660
|
+
try:
|
|
661
|
+
tasks = find_unjournaled_tasks(spec_data)
|
|
662
|
+
except Exception as exc: # pragma: no cover - defensive guard
|
|
663
|
+
logger.exception("Error listing unjournaled tasks for %s", spec_id)
|
|
664
|
+
return asdict(
|
|
665
|
+
error_response(
|
|
666
|
+
f"Failed to list unjournaled tasks: {exc}",
|
|
667
|
+
error_code=ErrorCode.INTERNAL_ERROR,
|
|
668
|
+
error_type=ErrorType.INTERNAL,
|
|
669
|
+
)
|
|
670
|
+
)
|
|
671
|
+
|
|
672
|
+
tasks.sort(key=lambda task: task.get("task_id", ""))
|
|
673
|
+
|
|
674
|
+
if start_after_id:
|
|
675
|
+
start_index = 0
|
|
676
|
+
for idx, task in enumerate(tasks):
|
|
677
|
+
if task.get("task_id") == start_after_id:
|
|
678
|
+
start_index = idx + 1
|
|
679
|
+
break
|
|
680
|
+
tasks = tasks[start_index:]
|
|
681
|
+
|
|
682
|
+
page_tasks = tasks[: page_size + 1]
|
|
683
|
+
has_more = len(page_tasks) > page_size
|
|
684
|
+
if has_more:
|
|
685
|
+
page_tasks = page_tasks[:page_size]
|
|
686
|
+
|
|
687
|
+
next_cursor = None
|
|
688
|
+
if has_more and page_tasks:
|
|
689
|
+
next_cursor = encode_cursor({"last_id": page_tasks[-1].get("task_id")})
|
|
690
|
+
|
|
691
|
+
warnings = None
|
|
692
|
+
if has_more:
|
|
693
|
+
warnings = [
|
|
694
|
+
f"Results truncated after {page_size} tasks. Use the returned cursor to continue."
|
|
695
|
+
]
|
|
696
|
+
|
|
697
|
+
data = {
|
|
698
|
+
"spec_id": spec_id,
|
|
699
|
+
"count": len(page_tasks),
|
|
700
|
+
"unjournaled_tasks": page_tasks,
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
pagination = {
|
|
704
|
+
"cursor": next_cursor,
|
|
705
|
+
"has_more": has_more,
|
|
706
|
+
"page_size": page_size,
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
return asdict(
|
|
710
|
+
success_response(
|
|
711
|
+
data=data,
|
|
712
|
+
pagination=pagination,
|
|
713
|
+
warnings=warnings,
|
|
714
|
+
)
|
|
715
|
+
)
|
|
716
|
+
|
|
717
|
+
|
|
718
|
+
def _handle_journal_add(*, config: ServerConfig, **payload: Any) -> dict:
|
|
719
|
+
validated, error = _validate_add_payload(payload)
|
|
720
|
+
if error:
|
|
721
|
+
return error
|
|
722
|
+
|
|
723
|
+
assert validated is not None
|
|
724
|
+
return perform_journal_add(
|
|
725
|
+
config=config,
|
|
726
|
+
spec_id=validated.spec_id,
|
|
727
|
+
title=validated.title,
|
|
728
|
+
content=validated.content,
|
|
729
|
+
entry_type=validated.entry_type,
|
|
730
|
+
task_id=validated.task_id,
|
|
731
|
+
workspace=validated.workspace,
|
|
732
|
+
)
|
|
733
|
+
|
|
734
|
+
|
|
735
|
+
def _handle_journal_list(*, config: ServerConfig, **payload: Any) -> dict:
|
|
736
|
+
validated, error = _validate_list_payload(payload)
|
|
737
|
+
if error:
|
|
738
|
+
return error
|
|
739
|
+
|
|
740
|
+
assert validated is not None
|
|
741
|
+
return perform_journal_list(
|
|
742
|
+
config=config,
|
|
743
|
+
spec_id=validated.spec_id,
|
|
744
|
+
task_id=validated.task_id,
|
|
745
|
+
entry_type=validated.entry_type,
|
|
746
|
+
cursor=validated.cursor,
|
|
747
|
+
limit=validated.limit,
|
|
748
|
+
workspace=validated.workspace,
|
|
749
|
+
)
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
def _handle_journal_list_unjournaled(*, config: ServerConfig, **payload: Any) -> dict:
|
|
753
|
+
validated, error = _validate_list_unjournaled_payload(payload)
|
|
754
|
+
if error:
|
|
755
|
+
return error
|
|
756
|
+
|
|
757
|
+
assert validated is not None
|
|
758
|
+
return perform_journal_list_unjournaled(
|
|
759
|
+
config=config,
|
|
760
|
+
spec_id=validated.spec_id,
|
|
761
|
+
cursor=validated.cursor,
|
|
762
|
+
limit=validated.limit,
|
|
763
|
+
workspace=validated.workspace,
|
|
764
|
+
)
|
|
765
|
+
|
|
766
|
+
|
|
767
|
+
_JOURNAL_ROUTER = ActionRouter(
|
|
768
|
+
tool_name="journal",
|
|
769
|
+
actions=[
|
|
770
|
+
ActionDefinition(
|
|
771
|
+
name="add",
|
|
772
|
+
handler=_handle_journal_add,
|
|
773
|
+
summary=_ACTION_SUMMARY["add"],
|
|
774
|
+
),
|
|
775
|
+
ActionDefinition(
|
|
776
|
+
name="list",
|
|
777
|
+
handler=_handle_journal_list,
|
|
778
|
+
summary=_ACTION_SUMMARY["list"],
|
|
779
|
+
),
|
|
780
|
+
ActionDefinition(
|
|
781
|
+
name="list-unjournaled",
|
|
782
|
+
handler=_handle_journal_list_unjournaled,
|
|
783
|
+
summary=_ACTION_SUMMARY["list-unjournaled"],
|
|
784
|
+
),
|
|
785
|
+
],
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
|
|
789
|
+
def _dispatch_journal_action(
|
|
790
|
+
*, action: str, payload: Dict[str, Any], config: ServerConfig
|
|
791
|
+
) -> dict:
|
|
792
|
+
try:
|
|
793
|
+
return _JOURNAL_ROUTER.dispatch(action=action, config=config, **payload)
|
|
794
|
+
except ActionRouterError as exc:
|
|
795
|
+
allowed = ", ".join(exc.allowed_actions)
|
|
796
|
+
return asdict(
|
|
797
|
+
error_response(
|
|
798
|
+
f"Unsupported journal action '{action}'. Allowed actions: {allowed}",
|
|
799
|
+
error_code=ErrorCode.VALIDATION_ERROR,
|
|
800
|
+
error_type=ErrorType.VALIDATION,
|
|
801
|
+
remediation=f"Use one of: {allowed}",
|
|
802
|
+
)
|
|
803
|
+
)
|
|
804
|
+
|
|
805
|
+
|
|
806
|
+
def register_unified_journal_tool(mcp: FastMCP, config: ServerConfig) -> None:
|
|
807
|
+
"""Register the consolidated journal tool."""
|
|
808
|
+
|
|
809
|
+
@canonical_tool(
|
|
810
|
+
mcp,
|
|
811
|
+
canonical_name="journal",
|
|
812
|
+
)
|
|
813
|
+
def journal(
|
|
814
|
+
action: str,
|
|
815
|
+
spec_id: str,
|
|
816
|
+
title: Optional[str] = None,
|
|
817
|
+
content: Optional[str] = None,
|
|
818
|
+
entry_type: Optional[str] = None,
|
|
819
|
+
task_id: Optional[str] = None,
|
|
820
|
+
workspace: Optional[str] = None,
|
|
821
|
+
cursor: Optional[str] = None,
|
|
822
|
+
limit: Optional[int] = None,
|
|
823
|
+
) -> dict:
|
|
824
|
+
payload = {
|
|
825
|
+
"spec_id": spec_id,
|
|
826
|
+
"title": title,
|
|
827
|
+
"content": content,
|
|
828
|
+
"entry_type": entry_type,
|
|
829
|
+
"task_id": task_id,
|
|
830
|
+
"workspace": workspace,
|
|
831
|
+
"cursor": cursor,
|
|
832
|
+
"limit": limit,
|
|
833
|
+
}
|
|
834
|
+
return _dispatch_journal_action(action=action, payload=payload, config=config)
|
|
835
|
+
|
|
836
|
+
logger.debug("Registered unified journal tool")
|
|
837
|
+
|
|
838
|
+
|
|
839
|
+
__all__ = [
|
|
840
|
+
"register_unified_journal_tool",
|
|
841
|
+
]
|