fenix-mcp 0.6.0__tar.gz → 0.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/PKG-INFO +1 -1
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/__init__.py +1 -1
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tool_base.py +20 -1
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tools/initialize.py +23 -2
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tools/knowledge.py +56 -49
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp.egg-info/PKG-INFO +1 -1
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/README.md +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/presenters.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tool_registry.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tools/__init__.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tools/health.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tools/intelligence.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tools/productivity.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/application/tools/user_config.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/domain/initialization.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/domain/intelligence.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/domain/knowledge.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/domain/productivity.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/domain/user_config.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/infrastructure/config.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/infrastructure/context.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/infrastructure/fenix_api/client.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/infrastructure/http_client.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/infrastructure/logging.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/interface/mcp_server.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/interface/transports.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp/main.py +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp.egg-info/SOURCES.txt +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp.egg-info/dependency_links.txt +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp.egg-info/entry_points.txt +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp.egg-info/requires.txt +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/fenix_mcp.egg-info/top_level.txt +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/pyproject.toml +0 -0
- {fenix_mcp-0.6.0 → fenix_mcp-0.7.0}/setup.cfg +0 -0
|
@@ -4,12 +4,31 @@
|
|
|
4
4
|
from __future__ import annotations
|
|
5
5
|
|
|
6
6
|
from abc import ABC, abstractmethod
|
|
7
|
-
from typing import Annotated, Any, Dict, Type
|
|
7
|
+
from typing import Annotated, Any, Dict, List, Optional, Type, TypeVar
|
|
8
8
|
|
|
9
9
|
from pydantic import BaseModel, ConfigDict, Field, StringConstraints
|
|
10
10
|
|
|
11
11
|
from fenix_mcp.infrastructure.context import AppContext
|
|
12
12
|
|
|
13
|
+
T = TypeVar("T")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def sanitize_null(value: Optional[T]) -> Optional[T]:
|
|
17
|
+
"""Convert string 'null' to None. Handles AI agents passing 'null' as string."""
|
|
18
|
+
if value == "null" or value == "None" or value == "":
|
|
19
|
+
return None
|
|
20
|
+
return value
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def sanitize_null_list(value: Optional[List[T]]) -> Optional[List[T]]:
|
|
24
|
+
"""Sanitize list values, converting 'null' string to None."""
|
|
25
|
+
if value == "null" or value == "None" or value == "": # type: ignore
|
|
26
|
+
return None
|
|
27
|
+
if isinstance(value, list):
|
|
28
|
+
return [v for v in value if v != "null" and v != "None" and v != ""]
|
|
29
|
+
return value
|
|
30
|
+
|
|
31
|
+
|
|
13
32
|
# =============================================================================
|
|
14
33
|
# Type aliases for common field types - generates proper JSON schema
|
|
15
34
|
# =============================================================================
|
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
|
|
4
4
|
from __future__ import annotations
|
|
5
5
|
|
|
6
|
-
from enum import Enum
|
|
7
6
|
import json
|
|
7
|
+
from enum import Enum
|
|
8
8
|
from typing import List, Optional
|
|
9
9
|
|
|
10
10
|
from pydantic import Field
|
|
@@ -93,7 +93,28 @@ class InitializeTool(Tool):
|
|
|
93
93
|
if data.recent_memories:
|
|
94
94
|
payload_dict["recent_memories"] = data.recent_memories
|
|
95
95
|
|
|
96
|
-
|
|
96
|
+
# Extract key IDs for easy reference
|
|
97
|
+
profile = data.profile or {}
|
|
98
|
+
user_info = profile.get("user") or {}
|
|
99
|
+
tenant_info = profile.get("tenant") or {}
|
|
100
|
+
team_info = profile.get("team") or {}
|
|
101
|
+
|
|
102
|
+
context_lines = ["📋 **Contexto do Usuário**"]
|
|
103
|
+
if user_info.get("id"):
|
|
104
|
+
context_lines.append(f"- **user_id**: `{user_info['id']}`")
|
|
105
|
+
if user_info.get("name"):
|
|
106
|
+
context_lines.append(f"- **user_name**: {user_info['name']}")
|
|
107
|
+
if tenant_info.get("id"):
|
|
108
|
+
context_lines.append(f"- **tenant_id**: `{tenant_info['id']}`")
|
|
109
|
+
if tenant_info.get("name"):
|
|
110
|
+
context_lines.append(f"- **tenant_name**: {tenant_info['name']}")
|
|
111
|
+
if team_info.get("id"):
|
|
112
|
+
context_lines.append(f"- **team_id**: `{team_info['id']}`")
|
|
113
|
+
if team_info.get("name"):
|
|
114
|
+
context_lines.append(f"- **team_name**: {team_info['name']}")
|
|
115
|
+
|
|
116
|
+
message_lines = context_lines + [
|
|
117
|
+
"",
|
|
97
118
|
"📦 **Dados de inicialização completos**",
|
|
98
119
|
"```json",
|
|
99
120
|
json.dumps(payload_dict, ensure_ascii=False, indent=2),
|
|
@@ -22,6 +22,8 @@ from fenix_mcp.application.tool_base import (
|
|
|
22
22
|
ToolRequest,
|
|
23
23
|
UUIDStr,
|
|
24
24
|
VersionStr,
|
|
25
|
+
sanitize_null,
|
|
26
|
+
sanitize_null_list,
|
|
25
27
|
)
|
|
26
28
|
from fenix_mcp.domain.knowledge import KnowledgeService, _format_date
|
|
27
29
|
from fenix_mcp.infrastructure.context import AppContext
|
|
@@ -420,20 +422,23 @@ class KnowledgeTool(Tool):
|
|
|
420
422
|
return text(f"🗑️ Work item {payload.id} removido.")
|
|
421
423
|
|
|
422
424
|
if action is KnowledgeAction.WORK_BACKLOG:
|
|
423
|
-
|
|
425
|
+
team_id = sanitize_null(payload.team_id)
|
|
426
|
+
if not team_id:
|
|
424
427
|
return text("❌ Informe team_id para consultar o backlog.")
|
|
425
|
-
items = await self._service.work_backlog(team_id=
|
|
428
|
+
items = await self._service.work_backlog(team_id=team_id)
|
|
426
429
|
if not items:
|
|
427
430
|
return text("📋 Backlog vazio para o time informado.")
|
|
428
431
|
body = "\n\n".join(_format_work(item) for item in items)
|
|
429
432
|
return text(f"📋 **Backlog ({len(items)}):**\n\n{body}")
|
|
430
433
|
|
|
431
434
|
if action is KnowledgeAction.WORK_SEARCH:
|
|
432
|
-
|
|
435
|
+
query = sanitize_null(payload.query)
|
|
436
|
+
team_id = sanitize_null(payload.team_id)
|
|
437
|
+
if not query or not team_id:
|
|
433
438
|
return text("❌ Informe query e team_id para buscar work items.")
|
|
434
439
|
items = await self._service.work_search(
|
|
435
|
-
query=
|
|
436
|
-
team_id=
|
|
440
|
+
query=query,
|
|
441
|
+
team_id=team_id,
|
|
437
442
|
limit=payload.limit,
|
|
438
443
|
)
|
|
439
444
|
if not items:
|
|
@@ -442,9 +447,10 @@ class KnowledgeTool(Tool):
|
|
|
442
447
|
return text(f"🔍 **Resultados ({len(items)}):**\n\n{body}")
|
|
443
448
|
|
|
444
449
|
if action is KnowledgeAction.WORK_ANALYTICS:
|
|
445
|
-
|
|
450
|
+
team_id = sanitize_null(payload.team_id)
|
|
451
|
+
if not team_id:
|
|
446
452
|
return text("❌ Informe team_id para obter analytics.")
|
|
447
|
-
analytics = await self._service.work_analytics(team_id=
|
|
453
|
+
analytics = await self._service.work_analytics(team_id=team_id)
|
|
448
454
|
lines = ["📊 **Analytics de Work Items**"]
|
|
449
455
|
for key, value in analytics.items():
|
|
450
456
|
lines.append(f"- {key}: {value}")
|
|
@@ -826,20 +832,20 @@ class KnowledgeTool(Tool):
|
|
|
826
832
|
doc = await self._service.doc_create(
|
|
827
833
|
{
|
|
828
834
|
"title": payload.doc_title,
|
|
829
|
-
"description": payload.doc_description,
|
|
830
|
-
"content": payload.doc_content,
|
|
831
|
-
"status": payload.doc_status,
|
|
832
|
-
"doc_type": payload.doc_type,
|
|
833
|
-
"language": payload.doc_language,
|
|
834
|
-
"parent_id": payload.doc_parent_id,
|
|
835
|
-
"team_id": payload.doc_team_id or payload.team_id,
|
|
836
|
-
"owner_user_id": payload.doc_owner_id,
|
|
837
|
-
"reviewer_user_id": payload.doc_reviewer_id,
|
|
838
|
-
"version": payload.doc_version,
|
|
839
|
-
"category": payload.doc_category,
|
|
840
|
-
"tags": payload.doc_tags,
|
|
841
|
-
"emoji": payload.doc_emoji or payload.doc_emote,
|
|
842
|
-
"keywords": payload.doc_keywords,
|
|
835
|
+
"description": sanitize_null(payload.doc_description),
|
|
836
|
+
"content": sanitize_null(payload.doc_content),
|
|
837
|
+
"status": sanitize_null(payload.doc_status),
|
|
838
|
+
"doc_type": sanitize_null(payload.doc_type),
|
|
839
|
+
"language": sanitize_null(payload.doc_language),
|
|
840
|
+
"parent_id": sanitize_null(payload.doc_parent_id),
|
|
841
|
+
"team_id": sanitize_null(payload.doc_team_id or payload.team_id),
|
|
842
|
+
"owner_user_id": sanitize_null(payload.doc_owner_id),
|
|
843
|
+
"reviewer_user_id": sanitize_null(payload.doc_reviewer_id),
|
|
844
|
+
"version": sanitize_null(payload.doc_version),
|
|
845
|
+
"category": sanitize_null(payload.doc_category),
|
|
846
|
+
"tags": sanitize_null_list(payload.doc_tags),
|
|
847
|
+
"emoji": sanitize_null(payload.doc_emoji or payload.doc_emote),
|
|
848
|
+
"keywords": sanitize_null_list(payload.doc_keywords),
|
|
843
849
|
"is_public": payload.doc_is_public,
|
|
844
850
|
}
|
|
845
851
|
)
|
|
@@ -876,21 +882,21 @@ class KnowledgeTool(Tool):
|
|
|
876
882
|
doc = await self._service.doc_update(
|
|
877
883
|
payload.id,
|
|
878
884
|
{
|
|
879
|
-
"title": payload.doc_title,
|
|
880
|
-
"description": payload.doc_description,
|
|
881
|
-
"content": payload.doc_content,
|
|
882
|
-
"status": payload.doc_status,
|
|
883
|
-
"doc_type": payload.doc_type,
|
|
884
|
-
"language": payload.doc_language,
|
|
885
|
-
"parent_id": payload.doc_parent_id,
|
|
886
|
-
"team_id": payload.doc_team_id or payload.team_id,
|
|
887
|
-
"owner_user_id": payload.doc_owner_id,
|
|
888
|
-
"reviewer_user_id": payload.doc_reviewer_id,
|
|
889
|
-
"version": payload.doc_version,
|
|
890
|
-
"category": payload.doc_category,
|
|
891
|
-
"tags": payload.doc_tags,
|
|
892
|
-
"emoji": payload.doc_emoji or payload.doc_emote,
|
|
893
|
-
"keywords": payload.doc_keywords,
|
|
885
|
+
"title": sanitize_null(payload.doc_title),
|
|
886
|
+
"description": sanitize_null(payload.doc_description),
|
|
887
|
+
"content": sanitize_null(payload.doc_content),
|
|
888
|
+
"status": sanitize_null(payload.doc_status),
|
|
889
|
+
"doc_type": sanitize_null(payload.doc_type),
|
|
890
|
+
"language": sanitize_null(payload.doc_language),
|
|
891
|
+
"parent_id": sanitize_null(payload.doc_parent_id),
|
|
892
|
+
"team_id": sanitize_null(payload.doc_team_id or payload.team_id),
|
|
893
|
+
"owner_user_id": sanitize_null(payload.doc_owner_id),
|
|
894
|
+
"reviewer_user_id": sanitize_null(payload.doc_reviewer_id),
|
|
895
|
+
"version": sanitize_null(payload.doc_version),
|
|
896
|
+
"category": sanitize_null(payload.doc_category),
|
|
897
|
+
"tags": sanitize_null_list(payload.doc_tags),
|
|
898
|
+
"emoji": sanitize_null(payload.doc_emoji or payload.doc_emote),
|
|
899
|
+
"keywords": sanitize_null_list(payload.doc_keywords),
|
|
894
900
|
"is_public": payload.doc_is_public,
|
|
895
901
|
},
|
|
896
902
|
)
|
|
@@ -903,11 +909,13 @@ class KnowledgeTool(Tool):
|
|
|
903
909
|
return text(f"🗑️ Documentação {payload.id} removida.")
|
|
904
910
|
|
|
905
911
|
if action is KnowledgeAction.DOC_SEARCH:
|
|
906
|
-
|
|
912
|
+
query = sanitize_null(payload.query)
|
|
913
|
+
team_id = sanitize_null(payload.doc_team_id or payload.team_id)
|
|
914
|
+
if not query or not team_id:
|
|
907
915
|
return text("❌ Informe query e team_id para buscar documentação.")
|
|
908
916
|
docs = await self._service.doc_search(
|
|
909
|
-
query=
|
|
910
|
-
team_id=
|
|
917
|
+
query=query,
|
|
918
|
+
team_id=team_id,
|
|
911
919
|
limit=payload.limit,
|
|
912
920
|
)
|
|
913
921
|
if not docs:
|
|
@@ -918,11 +926,10 @@ class KnowledgeTool(Tool):
|
|
|
918
926
|
return text(f"🔍 **Resultados ({len(docs)}):**\n\n{body}")
|
|
919
927
|
|
|
920
928
|
if action is KnowledgeAction.DOC_ROOTS:
|
|
921
|
-
|
|
929
|
+
team_id = sanitize_null(payload.doc_team_id or payload.team_id)
|
|
930
|
+
if not team_id:
|
|
922
931
|
return text("❌ Informe team_id para listar raízes.")
|
|
923
|
-
docs = await self._service.doc_roots(
|
|
924
|
-
team_id=payload.doc_team_id or payload.team_id
|
|
925
|
-
)
|
|
932
|
+
docs = await self._service.doc_roots(team_id=team_id)
|
|
926
933
|
if not docs:
|
|
927
934
|
return text("📚 Nenhuma raiz encontrada.")
|
|
928
935
|
body = "\n".join(
|
|
@@ -932,10 +939,11 @@ class KnowledgeTool(Tool):
|
|
|
932
939
|
return text(f"📚 **Raízes de documentação:**\n{body}")
|
|
933
940
|
|
|
934
941
|
if action is KnowledgeAction.DOC_RECENT:
|
|
935
|
-
|
|
942
|
+
team_id = sanitize_null(payload.doc_team_id or payload.team_id)
|
|
943
|
+
if not team_id:
|
|
936
944
|
return text("❌ Informe team_id para listar documentos recentes.")
|
|
937
945
|
docs = await self._service.doc_recent(
|
|
938
|
-
team_id=
|
|
946
|
+
team_id=team_id,
|
|
939
947
|
limit=payload.limit,
|
|
940
948
|
)
|
|
941
949
|
if not docs:
|
|
@@ -944,11 +952,10 @@ class KnowledgeTool(Tool):
|
|
|
944
952
|
return text(f"🕒 **Documentos recentes ({len(docs)}):**\n\n{body}")
|
|
945
953
|
|
|
946
954
|
if action is KnowledgeAction.DOC_ANALYTICS:
|
|
947
|
-
|
|
955
|
+
team_id = sanitize_null(payload.doc_team_id or payload.team_id)
|
|
956
|
+
if not team_id:
|
|
948
957
|
return text("❌ Informe team_id para obter analytics.")
|
|
949
|
-
analytics = await self._service.doc_analytics(
|
|
950
|
-
team_id=payload.doc_team_id or payload.team_id
|
|
951
|
-
)
|
|
958
|
+
analytics = await self._service.doc_analytics(team_id=team_id)
|
|
952
959
|
lines = ["📊 **Analytics de Documentação**"]
|
|
953
960
|
for key, value in analytics.items():
|
|
954
961
|
lines.append(f"- {key}: {value}")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|