flowllm 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowllm/__init__.py +19 -6
- flowllm/app.py +4 -14
- flowllm/client/__init__.py +25 -0
- flowllm/client/async_http_client.py +81 -0
- flowllm/client/http_client.py +81 -0
- flowllm/client/mcp_client.py +133 -0
- flowllm/client/sync_mcp_client.py +116 -0
- flowllm/config/__init__.py +1 -0
- flowllm/config/{default_config.yaml → default.yaml} +3 -8
- flowllm/config/empty.yaml +37 -0
- flowllm/config/pydantic_config_parser.py +17 -17
- flowllm/context/base_context.py +27 -7
- flowllm/context/flow_context.py +6 -18
- flowllm/context/registry.py +5 -1
- flowllm/context/service_context.py +83 -37
- flowllm/embedding_model/__init__.py +1 -1
- flowllm/embedding_model/base_embedding_model.py +91 -0
- flowllm/embedding_model/openai_compatible_embedding_model.py +63 -5
- flowllm/flow/__init__.py +1 -0
- flowllm/flow/base_flow.py +74 -0
- flowllm/flow/base_tool_flow.py +15 -0
- flowllm/flow/gallery/__init__.py +8 -0
- flowllm/flow/gallery/cmd_flow.py +11 -0
- flowllm/flow/gallery/code_tool_flow.py +30 -0
- flowllm/flow/gallery/dashscope_search_tool_flow.py +34 -0
- flowllm/flow/gallery/deepsearch_tool_flow.py +39 -0
- flowllm/flow/gallery/expression_tool_flow.py +18 -0
- flowllm/flow/gallery/mock_tool_flow.py +62 -0
- flowllm/flow/gallery/tavily_search_tool_flow.py +30 -0
- flowllm/flow/gallery/terminate_tool_flow.py +30 -0
- flowllm/flow/parser/__init__.py +0 -0
- flowllm/{flow_engine/simple_flow_engine.py → flow/parser/expression_parser.py} +25 -67
- flowllm/llm/__init__.py +2 -1
- flowllm/llm/base_llm.py +94 -4
- flowllm/llm/litellm_llm.py +456 -0
- flowllm/llm/openai_compatible_llm.py +205 -5
- flowllm/op/__init__.py +12 -3
- flowllm/op/agent/__init__.py +1 -0
- flowllm/op/agent/react_v1_op.py +109 -0
- flowllm/op/agent/react_v1_prompt.yaml +54 -0
- flowllm/op/agent/react_v2_op.py +86 -0
- flowllm/op/agent/react_v2_prompt.yaml +35 -0
- flowllm/op/akshare/__init__.py +3 -0
- flowllm/op/akshare/get_ak_a_code_op.py +14 -22
- flowllm/op/akshare/get_ak_a_info_op.py +17 -20
- flowllm/op/{llm_base_op.py → base_llm_op.py} +7 -5
- flowllm/op/base_op.py +40 -44
- flowllm/op/base_ray_op.py +313 -0
- flowllm/op/code/__init__.py +1 -0
- flowllm/op/code/execute_code_op.py +42 -0
- flowllm/op/gallery/__init__.py +2 -0
- flowllm/op/{mock_op.py → gallery/mock_op.py} +4 -4
- flowllm/op/gallery/terminate_op.py +29 -0
- flowllm/op/parallel_op.py +2 -9
- flowllm/op/search/__init__.py +3 -0
- flowllm/op/search/dashscope_deep_research_op.py +267 -0
- flowllm/op/search/dashscope_search_op.py +186 -0
- flowllm/op/search/dashscope_search_prompt.yaml +13 -0
- flowllm/op/search/tavily_search_op.py +109 -0
- flowllm/op/sequential_op.py +1 -9
- flowllm/schema/flow_request.py +12 -0
- flowllm/schema/message.py +2 -0
- flowllm/schema/service_config.py +12 -16
- flowllm/schema/tool_call.py +20 -8
- flowllm/schema/vector_node.py +1 -0
- flowllm/service/__init__.py +3 -2
- flowllm/service/base_service.py +50 -41
- flowllm/service/cmd_service.py +15 -0
- flowllm/service/http_service.py +34 -42
- flowllm/service/mcp_service.py +13 -11
- flowllm/storage/cache/__init__.py +1 -0
- flowllm/storage/cache/cache_data_handler.py +104 -0
- flowllm/{utils/dataframe_cache.py → storage/cache/data_cache.py} +136 -92
- flowllm/storage/vector_store/__init__.py +3 -3
- flowllm/storage/vector_store/base_vector_store.py +3 -0
- flowllm/storage/vector_store/es_vector_store.py +4 -5
- flowllm/storage/vector_store/local_vector_store.py +0 -1
- flowllm/utils/common_utils.py +9 -21
- flowllm/utils/fetch_url.py +16 -12
- flowllm/utils/llm_utils.py +28 -0
- flowllm/utils/logger_utils.py +28 -0
- flowllm/utils/ridge_v2.py +54 -0
- {flowllm-0.1.1.dist-info → flowllm-0.1.3.dist-info}/METADATA +43 -390
- flowllm-0.1.3.dist-info/RECORD +102 -0
- flowllm-0.1.3.dist-info/entry_points.txt +2 -0
- flowllm/flow_engine/__init__.py +0 -1
- flowllm/flow_engine/base_flow_engine.py +0 -34
- flowllm-0.1.1.dist-info/RECORD +0 -62
- flowllm-0.1.1.dist-info/entry_points.txt +0 -4
- {flowllm-0.1.1.dist-info → flowllm-0.1.3.dist-info}/WHEEL +0 -0
- {flowllm-0.1.1.dist-info → flowllm-0.1.3.dist-info}/licenses/LICENSE +0 -0
- {flowllm-0.1.1.dist-info → flowllm-0.1.3.dist-info}/top_level.txt +0 -0
flowllm/schema/service_config.py
CHANGED
@@ -14,31 +14,27 @@ class MCPConfig(BaseModel):
|
|
14
14
|
class HttpConfig(BaseModel):
|
15
15
|
host: str = Field(default="0.0.0.0")
|
16
16
|
port: int = Field(default=8001)
|
17
|
-
timeout_keep_alive: int = Field(default=
|
17
|
+
timeout_keep_alive: int = Field(default=3600)
|
18
18
|
limit_concurrency: int = Field(default=64)
|
19
19
|
|
20
20
|
|
21
|
-
class
|
22
|
-
|
23
|
-
|
24
|
-
def set_name(self, name: str):
|
25
|
-
self.name = name
|
26
|
-
return self
|
27
|
-
|
28
|
-
|
29
|
-
class FlowEngineConfig(BaseModel):
|
30
|
-
backend: str = Field(default="")
|
21
|
+
class CmdConfig(BaseModel):
|
22
|
+
flow: str = Field(default="")
|
31
23
|
params: dict = Field(default_factory=dict)
|
32
24
|
|
33
25
|
|
26
|
+
class FlowConfig(ToolCall):
|
27
|
+
flow_content: str = Field(default="")
|
28
|
+
service_type: str = Field(default="all", description="all/http/mcp/cmd")
|
29
|
+
|
34
30
|
class OpConfig(BaseModel):
|
35
31
|
backend: str = Field(default="")
|
36
32
|
language: str = Field(default="")
|
37
33
|
raise_exception: bool = Field(default=True)
|
38
34
|
prompt_path: str = Field(default="")
|
39
|
-
llm: str = Field(default="
|
40
|
-
embedding_model: str = Field(default="
|
41
|
-
vector_store: str = Field(default="
|
35
|
+
llm: str = Field(default="")
|
36
|
+
embedding_model: str = Field(default="")
|
37
|
+
vector_store: str = Field(default="")
|
42
38
|
params: dict = Field(default_factory=dict)
|
43
39
|
|
44
40
|
|
@@ -64,11 +60,11 @@ class ServiceConfig(BaseModel):
|
|
64
60
|
backend: str = Field(default="")
|
65
61
|
language: str = Field(default="")
|
66
62
|
thread_pool_max_workers: int = Field(default=16)
|
67
|
-
ray_max_workers: int = Field(default=
|
63
|
+
ray_max_workers: int = Field(default=1)
|
68
64
|
|
65
|
+
cmd: CmdConfig = Field(default_factory=CmdConfig)
|
69
66
|
mcp: MCPConfig = Field(default_factory=MCPConfig)
|
70
67
|
http: HttpConfig = Field(default_factory=HttpConfig)
|
71
|
-
flow_engine: FlowEngineConfig = Field(default_factory=FlowEngineConfig)
|
72
68
|
flow: Dict[str, FlowConfig] = Field(default_factory=dict)
|
73
69
|
op: Dict[str, OpConfig] = Field(default_factory=dict)
|
74
70
|
llm: Dict[str, LLMConfig] = Field(default_factory=dict)
|
flowllm/schema/tool_call.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import json
|
2
|
-
from typing import Dict
|
2
|
+
from typing import Dict, List
|
3
3
|
|
4
4
|
from pydantic import BaseModel, Field
|
5
5
|
|
@@ -8,7 +8,18 @@ class ParamAttrs(BaseModel):
|
|
8
8
|
type: str = Field(default="str", description="tool parameter type")
|
9
9
|
description: str = Field(default="", description="tool parameter description")
|
10
10
|
required: bool = Field(default=True, description="tool parameter required")
|
11
|
+
enum: List[str] | None = Field(default=None, description="tool parameter enum")
|
11
12
|
|
13
|
+
def simple_dump(self) -> dict:
|
14
|
+
result: dict = {
|
15
|
+
"type": self.type,
|
16
|
+
"description": self.description,
|
17
|
+
}
|
18
|
+
|
19
|
+
if self.enum:
|
20
|
+
result["enum"] = self.enum
|
21
|
+
|
22
|
+
return result
|
12
23
|
|
13
24
|
class ToolCall(BaseModel):
|
14
25
|
"""
|
@@ -47,19 +58,20 @@ class ToolCall(BaseModel):
|
|
47
58
|
type: str = Field(default="function")
|
48
59
|
name: str = Field(default="")
|
49
60
|
|
50
|
-
arguments:
|
61
|
+
arguments: str = Field(default="", description="tool execution arguments")
|
51
62
|
|
52
63
|
description: str = Field(default="")
|
53
64
|
input_schema: Dict[str, ParamAttrs] = Field(default_factory=dict)
|
54
65
|
output_schema: Dict[str, ParamAttrs] = Field(default_factory=dict)
|
55
66
|
|
67
|
+
@property
|
68
|
+
def argument_dict(self) -> dict:
|
69
|
+
return json.loads(self.arguments)
|
70
|
+
|
56
71
|
def simple_input_dump(self, version: str = "v1") -> dict:
|
57
72
|
if version == "v1":
|
58
73
|
required_list = [name for name, tool_param in self.input_schema.items() if tool_param.required]
|
59
|
-
properties = {name:
|
60
|
-
"type": tool_param.type,
|
61
|
-
"description": tool_param.description
|
62
|
-
} for name, tool_param in self.input_schema.items()}
|
74
|
+
properties = {name: tool_param.simple_dump() for name, tool_param in self.input_schema.items()}
|
63
75
|
|
64
76
|
return {
|
65
77
|
"type": self.type,
|
@@ -83,7 +95,7 @@ class ToolCall(BaseModel):
|
|
83
95
|
"index": self.index,
|
84
96
|
"id": self.id,
|
85
97
|
self.type: {
|
86
|
-
"arguments":
|
98
|
+
"arguments": self.arguments,
|
87
99
|
"name": self.name
|
88
100
|
},
|
89
101
|
"type": self.type,
|
@@ -103,7 +115,7 @@ class ToolCall(BaseModel):
|
|
103
115
|
if name:
|
104
116
|
self.name = name
|
105
117
|
if arguments:
|
106
|
-
self.arguments =
|
118
|
+
self.arguments = arguments
|
107
119
|
else:
|
108
120
|
raise NotImplementedError(f"version {version} not supported")
|
109
121
|
|
flowllm/schema/vector_node.py
CHANGED
flowllm/service/__init__.py
CHANGED
@@ -1,2 +1,3 @@
|
|
1
|
-
from
|
2
|
-
from
|
1
|
+
from .cmd_service import CmdService
|
2
|
+
from .http_service import HttpService
|
3
|
+
from .mcp_service import MCPService
|
flowllm/service/base_service.py
CHANGED
@@ -1,59 +1,68 @@
|
|
1
|
-
from
|
2
|
-
from typing import Dict
|
1
|
+
from abc import abstractmethod, ABC
|
2
|
+
from typing import Dict, Optional
|
3
3
|
|
4
4
|
from loguru import logger
|
5
|
+
from pydantic import create_model, Field
|
5
6
|
|
6
|
-
from flowllm.
|
7
|
+
from flowllm.config.pydantic_config_parser import PydanticConfigParser
|
7
8
|
from flowllm.context.service_context import C
|
8
|
-
from flowllm.
|
9
|
-
from flowllm.schema.
|
10
|
-
from flowllm.schema.
|
9
|
+
from flowllm.schema.flow_request import FlowRequest
|
10
|
+
from flowllm.schema.service_config import ServiceConfig
|
11
|
+
from flowllm.schema.tool_call import ParamAttrs
|
12
|
+
from flowllm.utils.common_utils import snake_to_camel
|
11
13
|
|
12
14
|
|
13
|
-
class BaseService:
|
15
|
+
class BaseService(ABC):
|
16
|
+
TYPE_MAPPING = {
|
17
|
+
"str": str,
|
18
|
+
"int": int,
|
19
|
+
"float": float,
|
20
|
+
"bool": bool,
|
21
|
+
"list": list,
|
22
|
+
"dict": dict,
|
23
|
+
}
|
14
24
|
|
15
25
|
def __init__(self, service_config: ServiceConfig):
|
16
26
|
self.service_config = service_config
|
17
27
|
|
18
|
-
C.language = self.service_config.language
|
19
|
-
C.thread_pool = ThreadPoolExecutor(max_workers=self.service_config.thread_pool_max_workers)
|
20
|
-
for name, config in self.service_config.vector_store.items():
|
21
|
-
vector_store_cls = C.resolve_vector_store(config.backend)
|
22
|
-
embedding_model_config: EmbeddingModelConfig = self.service_config.embedding_model[config.embedding_model]
|
23
|
-
embedding_model_cls = C.resolve_embedding_model(embedding_model_config.backend)
|
24
|
-
embedding_model = embedding_model_cls(model_name=embedding_model_config.model_name,
|
25
|
-
**embedding_model_config.params)
|
26
|
-
C.set_vector_store(name, vector_store_cls(embedding_model=embedding_model, **config.params))
|
27
|
-
|
28
|
-
self.flow_engine_config = self.service_config.flow_engine
|
29
|
-
self.flow_engine_cls = C.resolve_flow_engine(self.flow_engine_config.backend)
|
30
|
-
self.flow_config_dict: Dict[str, FlowConfig] = \
|
31
|
-
{name: config.set_name(name) for name, config in self.service_config.flow.items()}
|
32
|
-
|
33
28
|
self.mcp_config = self.service_config.mcp
|
34
29
|
self.http_config = self.service_config.http
|
30
|
+
C.init_by_service_config(self.service_config)
|
31
|
+
|
32
|
+
@classmethod
|
33
|
+
def get_service(cls, *args, parser: type[PydanticConfigParser] = PydanticConfigParser) -> "BaseService":
|
34
|
+
config_parser = parser(ServiceConfig)
|
35
|
+
service_config: ServiceConfig = config_parser.parse_args(*args)
|
36
|
+
service_cls = C.resolve_service(service_config.backend)
|
37
|
+
return service_cls(service_config)
|
38
|
+
|
39
|
+
def _create_pydantic_model(self, flow_name: str, input_schema: Dict[str, ParamAttrs]):
|
40
|
+
fields = {}
|
41
|
+
|
42
|
+
for param_name, param_config in input_schema.items():
|
43
|
+
field_type = self.TYPE_MAPPING.get(param_config.type, str)
|
44
|
+
|
45
|
+
if not param_config.required:
|
46
|
+
fields[param_name] = (Optional[field_type], Field(default=None, description=param_config.description))
|
47
|
+
else:
|
48
|
+
fields[param_name] = (field_type, Field(default=..., description=param_config.description))
|
49
|
+
|
50
|
+
return create_model(f"{snake_to_camel(flow_name)}Model", __base__=FlowRequest, **fields)
|
35
51
|
|
36
|
-
def
|
37
|
-
|
38
|
-
try:
|
39
|
-
logger.info(f"request.params={kwargs}")
|
40
|
-
flow_context = FlowContext(**kwargs,
|
41
|
-
response=response,
|
42
|
-
service_config=self.service_config.model_copy(deep=True))
|
52
|
+
def integrate_tool_flow(self, tool_flow_name: str):
|
53
|
+
...
|
43
54
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
**self.flow_engine_config.params)
|
49
|
-
flow_engine()
|
55
|
+
def integrate_tool_flows(self):
|
56
|
+
for tool_flow_name in C.tool_flow_names:
|
57
|
+
self.integrate_tool_flow(tool_flow_name)
|
58
|
+
logger.info(f"integrate flow_endpoint={tool_flow_name}")
|
50
59
|
|
51
|
-
|
52
|
-
|
53
|
-
response.success = False
|
54
|
-
response.answer = str(e.args)
|
60
|
+
def __enter__(self):
|
61
|
+
return self
|
55
62
|
|
56
|
-
|
63
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
64
|
+
...
|
57
65
|
|
66
|
+
@abstractmethod
|
58
67
|
def __call__(self):
|
59
|
-
|
68
|
+
...
|
@@ -0,0 +1,15 @@
|
|
1
|
+
from loguru import logger
|
2
|
+
|
3
|
+
from flowllm.context.service_context import C
|
4
|
+
from flowllm.flow.gallery import CmdFlow
|
5
|
+
from flowllm.service.base_service import BaseService
|
6
|
+
|
7
|
+
|
8
|
+
@C.register_service("cmd")
|
9
|
+
class CmdService(BaseService):
|
10
|
+
|
11
|
+
def __call__(self):
|
12
|
+
flow = CmdFlow(flow=self.service_config.cmd.flow)
|
13
|
+
response = flow.__call__(**self.service_config.cmd.params)
|
14
|
+
if response.answer:
|
15
|
+
logger.info(f"final_answer={response.answer}")
|
flowllm/service/http_service.py
CHANGED
@@ -1,33 +1,24 @@
|
|
1
1
|
import asyncio
|
2
2
|
from functools import partial
|
3
|
-
from typing import Dict, Optional
|
4
3
|
|
5
4
|
import uvicorn
|
6
5
|
from fastapi import FastAPI
|
7
6
|
from fastapi.middleware.cors import CORSMiddleware
|
8
7
|
from loguru import logger
|
9
|
-
from pydantic import BaseModel, create_model, Field
|
10
8
|
|
11
9
|
from flowllm.context.service_context import C
|
10
|
+
from flowllm.flow.base_tool_flow import BaseToolFlow
|
12
11
|
from flowllm.schema.flow_response import FlowResponse
|
13
|
-
from flowllm.schema.tool_call import ParamAttrs
|
14
12
|
from flowllm.service.base_service import BaseService
|
15
|
-
from flowllm.utils.common_utils import snake_to_camel
|
16
13
|
|
17
14
|
|
18
15
|
@C.register_service("http")
|
19
16
|
class HttpService(BaseService):
|
20
|
-
TYPE_MAPPING = {
|
21
|
-
"str": str,
|
22
|
-
"int": int,
|
23
|
-
"float": float,
|
24
|
-
"bool": bool
|
25
|
-
}
|
26
17
|
|
27
18
|
def __init__(self, *args, **kwargs):
|
28
19
|
super().__init__(*args, **kwargs)
|
29
20
|
self.app = FastAPI(title="FlowLLM", description="HTTP API for FlowLLM")
|
30
|
-
|
21
|
+
|
31
22
|
# Add CORS middleware
|
32
23
|
self.app.add_middleware(
|
33
24
|
CORSMiddleware,
|
@@ -36,50 +27,51 @@ class HttpService(BaseService):
|
|
36
27
|
allow_methods=["*"],
|
37
28
|
allow_headers=["*"],
|
38
29
|
)
|
39
|
-
|
30
|
+
|
40
31
|
# Add health check endpoint
|
41
32
|
self.app.get("/health")(self.health_check)
|
42
33
|
|
43
34
|
@staticmethod
|
44
35
|
def health_check():
|
45
36
|
return {"status": "healthy"}
|
46
|
-
|
47
|
-
def
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
field_type = self.TYPE_MAPPING.get(param_config.type, str)
|
53
|
-
|
54
|
-
if not param_config.required:
|
55
|
-
fields[param_name] = (Optional[field_type], Field(default=None, description=param_config.description))
|
56
|
-
else:
|
57
|
-
fields[param_name] = (field_type, Field(default=..., description=param_config.description))
|
58
|
-
|
59
|
-
return create_model(f"{snake_to_camel(flow_name)}Model", **fields)
|
60
|
-
|
61
|
-
def register_flow(self, flow_name: str):
|
62
|
-
"""Register a flow as an HTTP endpoint"""
|
63
|
-
flow_config = self.flow_config_dict[flow_name]
|
64
|
-
request_model = self._create_pydantic_model(flow_name, flow_config.input_schema)
|
65
|
-
|
66
|
-
async def execute_flow_endpoint(request: request_model) -> FlowResponse:
|
37
|
+
|
38
|
+
def integrate_tool_flow(self, tool_flow_name: str):
|
39
|
+
tool_flow: BaseToolFlow = C.get_tool_flow(tool_flow_name)
|
40
|
+
request_model = self._create_pydantic_model(tool_flow_name, tool_flow.tool_call.input_schema)
|
41
|
+
|
42
|
+
async def execute_endpoint(request: request_model) -> FlowResponse:
|
67
43
|
loop = asyncio.get_event_loop()
|
68
44
|
response: FlowResponse = await loop.run_in_executor(
|
69
45
|
executor=C.thread_pool,
|
70
|
-
func=partial(
|
46
|
+
func=partial(tool_flow.__call__, **request.model_dump())) # noqa
|
71
47
|
|
72
48
|
return response
|
73
49
|
|
74
|
-
endpoint_path = f"/{
|
75
|
-
self.app.post(endpoint_path, response_model=FlowResponse)(
|
76
|
-
|
77
|
-
|
50
|
+
endpoint_path = f"/{tool_flow.name}"
|
51
|
+
self.app.post(endpoint_path, response_model=FlowResponse)(execute_endpoint)
|
52
|
+
|
53
|
+
def integrate_tool_flows(self):
|
54
|
+
super().integrate_tool_flows()
|
55
|
+
|
56
|
+
async def execute_endpoint() -> list:
|
57
|
+
loop = asyncio.get_event_loop()
|
58
|
+
|
59
|
+
def list_tool_flows() -> list:
|
60
|
+
tool_flow_schemas = []
|
61
|
+
for name, tool_flow in C.tool_flow_dict.items():
|
62
|
+
assert isinstance(tool_flow, BaseToolFlow)
|
63
|
+
tool_flow_schemas.append(tool_flow.tool_call.simple_input_dump())
|
64
|
+
return tool_flow_schemas
|
65
|
+
|
66
|
+
return await loop.run_in_executor(executor=C.thread_pool, func=list_tool_flows) # noqa
|
67
|
+
|
68
|
+
endpoint_path = "list"
|
69
|
+
self.app.get("/" + endpoint_path, response_model=list)(execute_endpoint)
|
70
|
+
logger.info(f"integrate endpoint={endpoint_path}")
|
71
|
+
|
78
72
|
def __call__(self):
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
# Start the server
|
73
|
+
self.integrate_tool_flows()
|
74
|
+
|
83
75
|
uvicorn.run(self.app,
|
84
76
|
host=self.http_config.host,
|
85
77
|
port=self.http_config.port,
|
flowllm/service/mcp_service.py
CHANGED
@@ -3,9 +3,9 @@ from functools import partial
|
|
3
3
|
|
4
4
|
from fastmcp import FastMCP
|
5
5
|
from fastmcp.tools import FunctionTool
|
6
|
-
from loguru import logger
|
7
6
|
|
8
7
|
from flowllm.context.service_context import C
|
8
|
+
from flowllm.flow.base_tool_flow import BaseToolFlow
|
9
9
|
from flowllm.service.base_service import BaseService
|
10
10
|
|
11
11
|
|
@@ -14,31 +14,33 @@ class MCPService(BaseService):
|
|
14
14
|
|
15
15
|
def __init__(self, *args, **kwargs):
|
16
16
|
super().__init__(*args, **kwargs)
|
17
|
-
self.mcp = FastMCP("FlowLLM")
|
17
|
+
self.mcp = FastMCP(name="FlowLLM")
|
18
18
|
|
19
|
-
def
|
20
|
-
|
19
|
+
def integrate_tool_flow(self, tool_flow_name: str):
|
20
|
+
tool_flow: BaseToolFlow = C.get_tool_flow(tool_flow_name)
|
21
|
+
request_model = self._create_pydantic_model(tool_flow_name, tool_flow.tool_call.input_schema)
|
21
22
|
|
22
23
|
async def execute_flow_async(**kwargs) -> str:
|
24
|
+
request: request_model = request_model(**kwargs)
|
23
25
|
loop = asyncio.get_event_loop()
|
24
26
|
response = await loop.run_in_executor(
|
25
27
|
executor=C.thread_pool,
|
26
|
-
func=partial(
|
28
|
+
func=partial(tool_flow.__call__, **request.model_dump())) # noqa
|
27
29
|
return response.answer
|
28
30
|
|
29
|
-
tool = FunctionTool(name=
|
30
|
-
description=
|
31
|
+
tool = FunctionTool(name=tool_flow.name, # noqa
|
32
|
+
description=tool_flow.tool_call.description, # noqa
|
31
33
|
fn=execute_flow_async,
|
32
|
-
parameters=
|
34
|
+
parameters=tool_flow.tool_call.input_schema)
|
33
35
|
self.mcp.add_tool(tool)
|
34
|
-
logger.info(f"register flow={flow_name}")
|
35
36
|
|
36
37
|
def __call__(self):
|
37
|
-
|
38
|
-
self.register_flow(flow_name)
|
38
|
+
self.integrate_tool_flows()
|
39
39
|
|
40
40
|
if self.mcp_config.transport == "sse":
|
41
41
|
self.mcp.run(transport="sse", host=self.mcp_config.host, port=self.mcp_config.port)
|
42
|
+
if self.mcp_config.transport == "http":
|
43
|
+
self.mcp.run(transport="http", host=self.mcp_config.host, port=self.mcp_config.port)
|
42
44
|
elif self.mcp_config.transport == "stdio":
|
43
45
|
self.mcp.run(transport="stdio")
|
44
46
|
else:
|
@@ -0,0 +1 @@
|
|
1
|
+
from .data_cache import DataCache
|
@@ -0,0 +1,104 @@
|
|
1
|
+
import json
|
2
|
+
from abc import ABC, abstractmethod
|
3
|
+
from pathlib import Path
|
4
|
+
from typing import Dict, Any
|
5
|
+
|
6
|
+
import pandas as pd
|
7
|
+
|
8
|
+
|
9
|
+
class CacheDataHandler(ABC):
|
10
|
+
"""Abstract base class for data type handlers"""
|
11
|
+
|
12
|
+
@abstractmethod
|
13
|
+
def save(self, data: Any, file_path: Path, **kwargs) -> Dict[str, Any]:
|
14
|
+
"""
|
15
|
+
Save data to file and return metadata
|
16
|
+
|
17
|
+
Args:
|
18
|
+
data: Data to save
|
19
|
+
file_path: File path to save to
|
20
|
+
**kwargs: Additional parameters
|
21
|
+
|
22
|
+
Returns:
|
23
|
+
Dict containing metadata about the saved data
|
24
|
+
"""
|
25
|
+
pass
|
26
|
+
|
27
|
+
@abstractmethod
|
28
|
+
def load(self, file_path: Path, **kwargs) -> Any:
|
29
|
+
"""
|
30
|
+
Load data from file
|
31
|
+
|
32
|
+
Args:
|
33
|
+
file_path: File path to load from
|
34
|
+
**kwargs: Additional parameters
|
35
|
+
|
36
|
+
Returns:
|
37
|
+
Loaded data
|
38
|
+
"""
|
39
|
+
pass
|
40
|
+
|
41
|
+
@abstractmethod
|
42
|
+
def get_file_extension(self) -> str:
|
43
|
+
"""Get the file extension for this data type"""
|
44
|
+
pass
|
45
|
+
|
46
|
+
|
47
|
+
class DataFrameHandler(CacheDataHandler):
|
48
|
+
"""Handler for pandas DataFrame data type"""
|
49
|
+
|
50
|
+
def save(self, data: pd.DataFrame, file_path: Path, **kwargs) -> Dict[str, Any]:
|
51
|
+
"""Save DataFrame as CSV"""
|
52
|
+
csv_params = {
|
53
|
+
"index": False,
|
54
|
+
"encoding": "utf-8"
|
55
|
+
}
|
56
|
+
csv_params.update(kwargs)
|
57
|
+
|
58
|
+
data.to_csv(file_path, **csv_params)
|
59
|
+
|
60
|
+
return {
|
61
|
+
'row_count': len(data),
|
62
|
+
'column_count': len(data.columns),
|
63
|
+
'file_size': file_path.stat().st_size
|
64
|
+
}
|
65
|
+
|
66
|
+
def load(self, file_path: Path, **kwargs) -> pd.DataFrame:
|
67
|
+
"""Load DataFrame from CSV"""
|
68
|
+
csv_params = {
|
69
|
+
'encoding': 'utf-8'
|
70
|
+
}
|
71
|
+
csv_params.update(kwargs)
|
72
|
+
|
73
|
+
return pd.read_csv(file_path, **csv_params)
|
74
|
+
|
75
|
+
def get_file_extension(self) -> str:
|
76
|
+
return ".csv"
|
77
|
+
|
78
|
+
|
79
|
+
class DictHandler(CacheDataHandler):
|
80
|
+
"""Handler for dict data type"""
|
81
|
+
|
82
|
+
def save(self, data: dict, file_path: Path, **kwargs) -> Dict[str, Any]:
|
83
|
+
"""Save dict as JSON"""
|
84
|
+
json_params = {
|
85
|
+
"ensure_ascii": False,
|
86
|
+
"indent": 2
|
87
|
+
}
|
88
|
+
json_params.update(kwargs)
|
89
|
+
|
90
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
91
|
+
json.dump(data, f, **json_params)
|
92
|
+
|
93
|
+
return {
|
94
|
+
'key_count': len(data),
|
95
|
+
'file_size': file_path.stat().st_size
|
96
|
+
}
|
97
|
+
|
98
|
+
def load(self, file_path: Path, **kwargs) -> dict:
|
99
|
+
"""Load dict from JSON"""
|
100
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
101
|
+
return json.load(f)
|
102
|
+
|
103
|
+
def get_file_extension(self) -> str:
|
104
|
+
return ".json"
|