flowllm 0.1.2__tar.gz → 0.1.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {flowllm-0.1.2 → flowllm-0.1.5}/PKG-INFO +7 -6
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/__init__.py +8 -3
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/app.py +1 -1
- flowllm-0.1.5/flowllm/config/base.yaml +75 -0
- flowllm-0.1.5/flowllm/config/fin_supply.yaml +39 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/config/pydantic_config_parser.py +16 -1
- flowllm-0.1.5/flowllm/context/__init__.py +2 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/context/base_context.py +10 -20
- flowllm-0.1.5/flowllm/context/flow_context.py +59 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/context/service_context.py +73 -12
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/embedding_model/openai_compatible_embedding_model.py +1 -2
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/enumeration/chunk_enum.py +1 -0
- flowllm-0.1.5/flowllm/flow/__init__.py +10 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/flow/base_flow.py +44 -11
- flowllm-0.1.5/flowllm/flow/expression/__init__.py +1 -0
- {flowllm-0.1.2/flowllm/flow/parser → flowllm-0.1.5/flowllm/flow/expression}/expression_parser.py +5 -2
- flowllm-0.1.5/flowllm/flow/expression/expression_tool_flow.py +25 -0
- flowllm-0.1.5/flowllm/flow/gallery/__init__.py +1 -0
- flowllm-0.1.5/flowllm/flow/gallery/mock_tool_flow.py +80 -0
- flowllm-0.1.5/flowllm/flow/tool_op_flow.py +97 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/llm/base_llm.py +0 -2
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/llm/litellm_llm.py +2 -1
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/__init__.py +3 -3
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/akshare/get_ak_a_code_op.py +1 -1
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/akshare/get_ak_a_info_op.py +1 -1
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/base_llm_op.py +3 -2
- flowllm-0.1.5/flowllm/op/base_op.py +381 -0
- flowllm-0.1.5/flowllm/op/base_tool_op.py +47 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/gallery/__init__.py +0 -1
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/gallery/mock_op.py +13 -7
- flowllm-0.1.5/flowllm/op/llm/__init__.py +3 -0
- flowllm-0.1.5/flowllm/op/llm/react_llm_op.py +105 -0
- flowllm-0.1.2/flowllm/op/agent/react_prompt.yaml → flowllm-0.1.5/flowllm/op/llm/react_llm_prompt.yaml +17 -10
- flowllm-0.1.5/flowllm/op/llm/simple_llm_op.py +48 -0
- flowllm-0.1.5/flowllm/op/llm/stream_llm_op.py +61 -0
- flowllm-0.1.5/flowllm/op/mcp/__init__.py +2 -0
- flowllm-0.1.5/flowllm/op/mcp/ant_op.py +42 -0
- flowllm-0.1.5/flowllm/op/mcp/base_sse_mcp_op.py +28 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/parallel_op.py +5 -1
- flowllm-0.1.5/flowllm/op/search/__init__.py +2 -0
- flowllm-0.1.5/flowllm/op/search/dashscope_search_op.py +131 -0
- flowllm-0.1.5/flowllm/op/search/tavily_search_op.py +91 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/sequential_op.py +4 -0
- flowllm-0.1.5/flowllm/schema/flow_stream_chunk.py +11 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/schema/message.py +2 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/schema/service_config.py +8 -3
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/schema/tool_call.py +53 -4
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/service/__init__.py +0 -1
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/service/base_service.py +31 -14
- flowllm-0.1.5/flowllm/service/http_service.py +88 -0
- flowllm-0.1.5/flowllm/service/mcp_service.py +41 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/storage/vector_store/__init__.py +1 -0
- flowllm-0.1.5/flowllm/storage/vector_store/base_vector_store.py +131 -0
- flowllm-0.1.5/flowllm/storage/vector_store/chroma_vector_store.py +431 -0
- flowllm-0.1.5/flowllm/storage/vector_store/es_vector_store.py +483 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/storage/vector_store/local_vector_store.py +206 -9
- flowllm-0.1.5/flowllm/storage/vector_store/memory_vector_store.py +509 -0
- flowllm-0.1.5/flowllm/utils/common_utils.py +106 -0
- flowllm-0.1.5/flowllm/utils/logger_utils.py +28 -0
- flowllm-0.1.5/flowllm/utils/miner_u_pdf_processor.py +726 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm.egg-info/PKG-INFO +7 -6
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm.egg-info/SOURCES.txt +22 -21
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm.egg-info/requires.txt +4 -3
- {flowllm-0.1.2 → flowllm-0.1.5}/pyproject.toml +7 -7
- flowllm-0.1.5/test/test_streaming.py +49 -0
- flowllm-0.1.5/test/test_streaming_example.py +71 -0
- flowllm-0.1.2/flowllm/config/default.yaml +0 -77
- flowllm-0.1.2/flowllm/config/empty.yaml +0 -37
- flowllm-0.1.2/flowllm/context/flow_context.py +0 -16
- flowllm-0.1.2/flowllm/flow/__init__.py +0 -1
- flowllm-0.1.2/flowllm/flow/gallery/__init__.py +0 -8
- flowllm-0.1.2/flowllm/flow/gallery/cmd_flow.py +0 -11
- flowllm-0.1.2/flowllm/flow/gallery/code_tool_flow.py +0 -30
- flowllm-0.1.2/flowllm/flow/gallery/dashscope_search_tool_flow.py +0 -34
- flowllm-0.1.2/flowllm/flow/gallery/deepsearch_tool_flow.py +0 -39
- flowllm-0.1.2/flowllm/flow/gallery/expression_tool_flow.py +0 -18
- flowllm-0.1.2/flowllm/flow/gallery/mock_tool_flow.py +0 -67
- flowllm-0.1.2/flowllm/flow/gallery/tavily_search_tool_flow.py +0 -30
- flowllm-0.1.2/flowllm/flow/gallery/terminate_tool_flow.py +0 -30
- flowllm-0.1.2/flowllm/op/agent/__init__.py +0 -0
- flowllm-0.1.2/flowllm/op/agent/react_op.py +0 -83
- flowllm-0.1.2/flowllm/op/base_op.py +0 -148
- flowllm-0.1.2/flowllm/op/base_ray_op.py +0 -313
- flowllm-0.1.2/flowllm/op/code/__init__.py +0 -1
- flowllm-0.1.2/flowllm/op/code/execute_code_op.py +0 -42
- flowllm-0.1.2/flowllm/op/gallery/terminate_op.py +0 -29
- flowllm-0.1.2/flowllm/op/search/__init__.py +0 -3
- flowllm-0.1.2/flowllm/op/search/dashscope_deep_research_op.py +0 -260
- flowllm-0.1.2/flowllm/op/search/dashscope_search_op.py +0 -179
- flowllm-0.1.2/flowllm/op/search/tavily_search_op.py +0 -102
- flowllm-0.1.2/flowllm/schema/__init__.py +0 -0
- flowllm-0.1.2/flowllm/service/cmd_service.py +0 -15
- flowllm-0.1.2/flowllm/service/http_service.py +0 -79
- flowllm-0.1.2/flowllm/service/mcp_service.py +0 -47
- flowllm-0.1.2/flowllm/storage/vector_store/base_vector_store.py +0 -44
- flowllm-0.1.2/flowllm/storage/vector_store/chroma_vector_store.py +0 -189
- flowllm-0.1.2/flowllm/storage/vector_store/es_vector_store.py +0 -227
- flowllm-0.1.2/flowllm/utils/__init__.py +0 -0
- flowllm-0.1.2/flowllm/utils/common_utils.py +0 -52
- {flowllm-0.1.2 → flowllm-0.1.5}/LICENSE +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/README.md +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/client/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/client/async_http_client.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/client/http_client.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/client/mcp_client.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/client/sync_mcp_client.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/config/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/context/prompt_handler.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/context/registry.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/embedding_model/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/embedding_model/base_embedding_model.py +0 -0
- {flowllm-0.1.2/flowllm/context → flowllm-0.1.5/flowllm/enumeration}/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/enumeration/http_enum.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/enumeration/role.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/flow/base_tool_flow.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/llm/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/llm/openai_compatible_llm.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/akshare/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/akshare/get_ak_a_code_prompt.yaml +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/op/search/dashscope_search_prompt.yaml +0 -0
- {flowllm-0.1.2/flowllm/enumeration → flowllm-0.1.5/flowllm/schema}/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/schema/flow_request.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/schema/flow_response.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/schema/vector_node.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/storage/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/storage/cache/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/storage/cache/cache_data_handler.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/storage/cache/data_cache.py +0 -0
- {flowllm-0.1.2/flowllm/flow/parser → flowllm-0.1.5/flowllm/utils}/__init__.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/utils/fetch_url.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/utils/llm_utils.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/utils/ridge_v2.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/utils/singleton.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm/utils/timer.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm.egg-info/dependency_links.txt +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm.egg-info/entry_points.txt +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/flowllm.egg-info/top_level.txt +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/setup.cfg +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/test/test_cache.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/test/test_config.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/test/test_dashscope_llm.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/test/test_dataframe_cache.py +0 -0
- {flowllm-0.1.2 → flowllm-0.1.5}/test/test_simple_flow.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: flowllm
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.5
|
4
4
|
Summary: A flexible framework for building LLM-powered flows and mcp services
|
5
5
|
Author-email: FlowLLM Team <flowllm@example.com>
|
6
6
|
Maintainer-email: FlowLLM Team <flowllm@example.com>
|
@@ -219,7 +219,7 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
219
219
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
220
220
|
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
221
221
|
Classifier: Typing :: Typed
|
222
|
-
Requires-Python: >=3.
|
222
|
+
Requires-Python: >=3.11
|
223
223
|
Description-Content-Type: text/markdown
|
224
224
|
License-File: LICENSE
|
225
225
|
Requires-Dist: akshare
|
@@ -246,9 +246,10 @@ Requires-Dist: urllib3
|
|
246
246
|
Requires-Dist: uvicorn[standard]
|
247
247
|
Requires-Dist: chromadb
|
248
248
|
Requires-Dist: elasticsearch
|
249
|
-
Requires-Dist:
|
250
|
-
|
251
|
-
|
249
|
+
Requires-Dist: pyfiglet
|
250
|
+
Requires-Dist: rich
|
251
|
+
Provides-Extra: dist
|
252
|
+
Requires-Dist: ray; extra == "dist"
|
252
253
|
Provides-Extra: all
|
253
|
-
Requires-Dist: flowllm[
|
254
|
+
Requires-Dist: flowllm[dist]; extra == "all"
|
254
255
|
Dynamic: license-file
|
@@ -1,5 +1,9 @@
|
|
1
1
|
import os
|
2
2
|
|
3
|
+
from flowllm.utils.logger_utils import init_logger
|
4
|
+
|
5
|
+
init_logger()
|
6
|
+
|
3
7
|
from flowllm.utils.common_utils import load_env
|
4
8
|
|
5
9
|
load_env()
|
@@ -14,8 +18,9 @@ if not os.environ.get("FLOW_USE_FRAMEWORK", "").lower() == "true":
|
|
14
18
|
|
15
19
|
from flowllm import service
|
16
20
|
|
17
|
-
from flowllm.context
|
18
|
-
from flowllm.op import BaseOp,
|
21
|
+
from flowllm.context import C
|
22
|
+
from flowllm.op import BaseOp, BaseLLMOp, BaseToolOp
|
23
|
+
|
19
24
|
|
20
|
-
__version__ = "0.1.
|
25
|
+
__version__ = "0.1.5"
|
21
26
|
|
@@ -0,0 +1,75 @@
|
|
1
|
+
backend: http
|
2
|
+
language: ""
|
3
|
+
thread_pool_max_workers: 64
|
4
|
+
ray_max_workers: 1
|
5
|
+
|
6
|
+
mcp:
|
7
|
+
transport: sse
|
8
|
+
host: "0.0.0.0"
|
9
|
+
port: 8001
|
10
|
+
|
11
|
+
http:
|
12
|
+
host: "0.0.0.0"
|
13
|
+
port: 8001
|
14
|
+
timeout_keep_alive: 600
|
15
|
+
limit_concurrency: 64
|
16
|
+
|
17
|
+
flow:
|
18
|
+
llm_flow:
|
19
|
+
flow_content: simple_llm_op
|
20
|
+
stream: false
|
21
|
+
use_async: true
|
22
|
+
service_type: http
|
23
|
+
description: "llm_flow"
|
24
|
+
input_schema:
|
25
|
+
query:
|
26
|
+
type: "str"
|
27
|
+
description: "user query"
|
28
|
+
required: true
|
29
|
+
|
30
|
+
llm_flow_stream:
|
31
|
+
flow_content: stream_llm_op
|
32
|
+
stream: true
|
33
|
+
use_async: true
|
34
|
+
service_type: http
|
35
|
+
description: "llm_flow"
|
36
|
+
input_schema:
|
37
|
+
query:
|
38
|
+
type: "str"
|
39
|
+
description: "user query"
|
40
|
+
required: true
|
41
|
+
|
42
|
+
op:
|
43
|
+
mock1_op:
|
44
|
+
backend: mock1_op
|
45
|
+
llm: default
|
46
|
+
vector_store: default
|
47
|
+
|
48
|
+
llm:
|
49
|
+
default:
|
50
|
+
backend: openai_compatible
|
51
|
+
model_name: qwen3-30b-a3b-instruct-2507
|
52
|
+
params:
|
53
|
+
temperature: 0.6
|
54
|
+
|
55
|
+
qwen3_30b_instruct:
|
56
|
+
backend: openai_compatible
|
57
|
+
model_name: qwen3-30b-a3b-instruct-2507
|
58
|
+
|
59
|
+
qwen3_30b_thinking:
|
60
|
+
backend: openai_compatible
|
61
|
+
model_name: qwen3-30b-a3b-thinking-2507
|
62
|
+
|
63
|
+
embedding_model:
|
64
|
+
default:
|
65
|
+
backend: openai_compatible
|
66
|
+
model_name: text-embedding-v4
|
67
|
+
params:
|
68
|
+
dimensions: 1024
|
69
|
+
|
70
|
+
vector_store:
|
71
|
+
default:
|
72
|
+
backend: elasticsearch
|
73
|
+
embedding_model: default
|
74
|
+
# params:
|
75
|
+
# hosts: "http://localhost:9200"
|
@@ -0,0 +1,39 @@
|
|
1
|
+
backend: mcp
|
2
|
+
import_config: base
|
3
|
+
disabled_flows:
|
4
|
+
- llm_flow
|
5
|
+
- llm_flow_stream
|
6
|
+
- mock_async_tool_flow
|
7
|
+
- mock_tool_flow
|
8
|
+
- react_llm_tool_flow
|
9
|
+
- simple_llm_tool_flow
|
10
|
+
- stream_llm_tool_flow
|
11
|
+
|
12
|
+
flow:
|
13
|
+
get_a_stock_infos:
|
14
|
+
flow_content: get_ak_a_code_op >> get_ak_a_info_op >> get_ak_a_spot_op >> get_ak_a_money_flow_op >> get_ak_a_financial_info_op >> merge_ak_a_info_op
|
15
|
+
stream: false
|
16
|
+
use_async: false
|
17
|
+
service_type: http+mcp
|
18
|
+
description: "Retrieve the A-share stock codes from the query, and fetch information about these stocks, including company basic information, current stock price and its change percentage, capital inflow and outflow data for the most recent day, and financial information from the latest quarter."
|
19
|
+
input_schema:
|
20
|
+
query:
|
21
|
+
type: "str"
|
22
|
+
description: "user question"
|
23
|
+
|
24
|
+
get_a_stock_news:
|
25
|
+
flow_content: get_ak_a_code_op >> get_ak_a_news_op >> merge_ak_a_info_op
|
26
|
+
stream: false
|
27
|
+
use_async: false
|
28
|
+
service_type: http+mcp
|
29
|
+
description: "Retrieve the A-share stock codes from the query, and obtain the latest news information about these stocks."
|
30
|
+
input_schema:
|
31
|
+
query:
|
32
|
+
type: "str"
|
33
|
+
description: "user question"
|
34
|
+
|
35
|
+
ant_search:
|
36
|
+
flow_content: ant_search_op
|
37
|
+
|
38
|
+
ant_investment:
|
39
|
+
flow_content: ant_investment_op
|
@@ -202,9 +202,24 @@ class PydanticConfigParser(Generic[T]):
|
|
202
202
|
config_path = Path(self.current_file).parent / config
|
203
203
|
if not config_path.exists():
|
204
204
|
config_path = Path(config)
|
205
|
+
logger.info(f"flowllm using config={config_path}")
|
205
206
|
|
206
207
|
yaml_config = self.load_from_yaml(config_path)
|
207
|
-
|
208
|
+
|
209
|
+
# load import configs
|
210
|
+
import_config = yaml_config.get("import_config", "")
|
211
|
+
if import_config:
|
212
|
+
if not import_config.endswith(".yaml"):
|
213
|
+
import_config += ".yaml"
|
214
|
+
import_config_path = Path(self.current_file).parent / import_config
|
215
|
+
if not import_config_path.exists():
|
216
|
+
import_config_path = Path(import_config)
|
217
|
+
logger.info(f"flowllm using import_config_path={import_config_path}")
|
218
|
+
|
219
|
+
# load import config
|
220
|
+
import_yaml_config = self.load_from_yaml(import_config_path)
|
221
|
+
configs_to_merge.append(import_yaml_config)
|
222
|
+
|
208
223
|
configs_to_merge.append(yaml_config)
|
209
224
|
|
210
225
|
# 3. Command line override configuration
|
@@ -1,22 +1,13 @@
|
|
1
1
|
class BaseContext:
|
2
2
|
def __init__(self, **kwargs):
|
3
|
-
self._data = {**kwargs}
|
3
|
+
self._data: dict = {**kwargs}
|
4
4
|
|
5
5
|
def __getattr__(self, name: str):
|
6
|
-
|
7
|
-
if name == '_data':
|
8
|
-
raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
|
9
|
-
|
10
|
-
# Use object.__getattribute__ to safely access _data
|
11
|
-
try:
|
12
|
-
data = object.__getattribute__(self, '_data')
|
13
|
-
except AttributeError:
|
14
|
-
raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
|
15
|
-
|
6
|
+
data = object.__getattribute__(self, "_data")
|
16
7
|
if name in data:
|
17
8
|
return data[name]
|
18
|
-
|
19
|
-
|
9
|
+
else:
|
10
|
+
raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
|
20
11
|
|
21
12
|
def __setattr__(self, name: str, value):
|
22
13
|
if name == "_data":
|
@@ -25,9 +16,10 @@ class BaseContext:
|
|
25
16
|
self._data[name] = value
|
26
17
|
|
27
18
|
def __getitem__(self, name: str):
|
28
|
-
if name
|
19
|
+
if name in self._data:
|
20
|
+
return self._data[name]
|
21
|
+
else:
|
29
22
|
raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
|
30
|
-
return self._data[name]
|
31
23
|
|
32
24
|
def __setitem__(self, name: str, value):
|
33
25
|
self._data[name] = value
|
@@ -48,19 +40,17 @@ class BaseContext:
|
|
48
40
|
def keys(self):
|
49
41
|
return self._data.keys()
|
50
42
|
|
51
|
-
def update(self,
|
43
|
+
def update(self, kwargs: dict):
|
52
44
|
self._data.update(kwargs)
|
53
45
|
|
54
46
|
def items(self):
|
55
47
|
return self._data.items()
|
56
48
|
|
57
49
|
def __getstate__(self):
|
58
|
-
|
59
|
-
return {'_data': self._data}
|
50
|
+
return self._data
|
60
51
|
|
61
52
|
def __setstate__(self, state):
|
62
|
-
|
63
|
-
self._data = state['_data']
|
53
|
+
self._data = state
|
64
54
|
|
65
55
|
if __name__ == "__main__":
|
66
56
|
ctx = BaseContext(**{"name": "Alice", "age": 30, "city": "New York"})
|
@@ -0,0 +1,59 @@
|
|
1
|
+
import asyncio
|
2
|
+
import uuid
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from flowllm.context.base_context import BaseContext
|
6
|
+
from flowllm.enumeration.chunk_enum import ChunkEnum
|
7
|
+
from flowllm.schema.flow_response import FlowResponse
|
8
|
+
from flowllm.schema.flow_stream_chunk import FlowStreamChunk
|
9
|
+
|
10
|
+
|
11
|
+
class FlowContext(BaseContext):
|
12
|
+
|
13
|
+
def __init__(self,
|
14
|
+
flow_id: str = uuid.uuid4().hex,
|
15
|
+
response: Optional[FlowResponse] = None,
|
16
|
+
stream_queue: Optional[asyncio.Queue] = None,
|
17
|
+
**kwargs):
|
18
|
+
super().__init__(**kwargs)
|
19
|
+
|
20
|
+
self.flow_id: str = flow_id
|
21
|
+
self.response: Optional[FlowResponse] = response if response is not None else FlowResponse()
|
22
|
+
self.stream_queue: Optional[asyncio.Queue] = stream_queue
|
23
|
+
|
24
|
+
async def add_stream_chunk(self, stream_chunk: FlowStreamChunk):
|
25
|
+
stream_chunk.flow_id = self.flow_id
|
26
|
+
await self.stream_queue.put(stream_chunk)
|
27
|
+
return self
|
28
|
+
|
29
|
+
async def add_stream_chunk_and_type(self, chunk: str | bytes, chunk_type: ChunkEnum):
|
30
|
+
await self.stream_queue.put(FlowStreamChunk(flow_id=self.flow_id, chunk_type=chunk_type, chunk=chunk))
|
31
|
+
return self
|
32
|
+
|
33
|
+
async def add_stream_answer(self, chunk: str):
|
34
|
+
await self.add_stream_chunk_and_type(chunk_type=ChunkEnum.ANSWER, chunk=chunk)
|
35
|
+
return self
|
36
|
+
|
37
|
+
async def add_stream_think(self, chunk: str):
|
38
|
+
await self.add_stream_chunk_and_type(chunk_type=ChunkEnum.THINK, chunk=chunk)
|
39
|
+
return self
|
40
|
+
|
41
|
+
async def add_stream_error(self, e: Exception):
|
42
|
+
await self.add_stream_chunk_and_type(chunk_type=ChunkEnum.ERROR, chunk=str(e))
|
43
|
+
return self
|
44
|
+
|
45
|
+
async def add_stream_done(self):
|
46
|
+
done_chunk = FlowStreamChunk(flow_id=self.flow_id, chunk_type=ChunkEnum.DONE, chunk="", done=True)
|
47
|
+
await self.stream_queue.put(done_chunk)
|
48
|
+
return self
|
49
|
+
|
50
|
+
def add_response_error(self, e: Exception):
|
51
|
+
self.response.success = False
|
52
|
+
self.response.answer = str(e.args)
|
53
|
+
|
54
|
+
def copy(self, **kwargs) -> "FlowContext":
|
55
|
+
context_kwargs = self.dump()
|
56
|
+
context_kwargs.update(kwargs)
|
57
|
+
context_kwargs["response"] = FlowResponse()
|
58
|
+
context = FlowContext(**context_kwargs)
|
59
|
+
return context
|
@@ -1,15 +1,18 @@
|
|
1
|
+
import asyncio
|
2
|
+
import json
|
1
3
|
import os
|
2
4
|
import uuid
|
3
5
|
from concurrent.futures import ThreadPoolExecutor
|
4
6
|
from inspect import isclass
|
5
7
|
from typing import Dict, List
|
6
8
|
|
7
|
-
import
|
9
|
+
from fastmcp import Client
|
8
10
|
from loguru import logger
|
9
11
|
|
10
12
|
from flowllm.context.base_context import BaseContext
|
11
13
|
from flowllm.context.registry import Registry
|
12
14
|
from flowllm.schema.service_config import ServiceConfig, EmbeddingModelConfig
|
15
|
+
from flowllm.schema.tool_call import ToolCall
|
13
16
|
from flowllm.utils.singleton import singleton
|
14
17
|
|
15
18
|
|
@@ -24,6 +27,7 @@ class ServiceContext(BaseContext):
|
|
24
27
|
self.language: str = ""
|
25
28
|
self.thread_pool: ThreadPoolExecutor | None = None
|
26
29
|
self.vector_store_dict: dict = {}
|
30
|
+
self.sse_mcp_dict: dict = {}
|
27
31
|
|
28
32
|
self.registry_dict: Dict[str, Registry] = {}
|
29
33
|
use_framework: bool = os.environ.get("FLOW_USE_FRAMEWORK", "").lower() == "true"
|
@@ -37,13 +41,42 @@ class ServiceContext(BaseContext):
|
|
37
41
|
register_flow_module = False
|
38
42
|
self.registry_dict[key] = Registry(key, enable_log=enable_log, register_flow_module=register_flow_module)
|
39
43
|
|
40
|
-
self.
|
44
|
+
self.flow_dict: dict = {}
|
41
45
|
|
42
|
-
def
|
43
|
-
|
46
|
+
def set_service_config(self, parser=None, config_name: str = "config=base"):
|
47
|
+
if parser is None:
|
48
|
+
from flowllm.config.pydantic_config_parser import PydanticConfigParser
|
49
|
+
parser = PydanticConfigParser
|
44
50
|
|
45
|
-
config_parser =
|
46
|
-
self.service_config = config_parser.parse_args(
|
51
|
+
config_parser = parser(ServiceConfig)
|
52
|
+
self.service_config = config_parser.parse_args(config_name)
|
53
|
+
return self
|
54
|
+
|
55
|
+
@staticmethod
|
56
|
+
async def get_sse_mcp_dict(hosts: List[str]):
|
57
|
+
tool_call_dict = {}
|
58
|
+
|
59
|
+
for host in hosts:
|
60
|
+
async with Client(f"{host}/sse/") as client:
|
61
|
+
tools = await client.list_tools()
|
62
|
+
for tool in tools:
|
63
|
+
tool_call = ToolCall.from_mcp_tool(tool)
|
64
|
+
key = host + "/" + tool.name
|
65
|
+
tool_call_dict[key] = tool_call
|
66
|
+
logger.info(f"{host} find mcp_name={key} "
|
67
|
+
f"tool_call={json.dumps(tool_call.simple_input_dump(), ensure_ascii=False)}")
|
68
|
+
return tool_call_dict
|
69
|
+
|
70
|
+
def prepare_sse_mcp(self):
|
71
|
+
hosts = os.getenv("FLOW_MCP_HOSTS")
|
72
|
+
if not hosts:
|
73
|
+
return self
|
74
|
+
|
75
|
+
hosts = [x.strip() for x in hosts.strip().split(",") if x.strip()]
|
76
|
+
if not hosts:
|
77
|
+
return self
|
78
|
+
|
79
|
+
self.sse_mcp_dict = asyncio.run(self.get_sse_mcp_dict(hosts))
|
47
80
|
return self
|
48
81
|
|
49
82
|
def init_by_service_config(self, service_config: ServiceConfig = None):
|
@@ -53,6 +86,7 @@ class ServiceContext(BaseContext):
|
|
53
86
|
self.language = self.service_config.language
|
54
87
|
self.thread_pool = ThreadPoolExecutor(max_workers=self.service_config.thread_pool_max_workers)
|
55
88
|
if self.service_config.ray_max_workers > 1:
|
89
|
+
import ray
|
56
90
|
ray.init(num_cpus=self.service_config.ray_max_workers)
|
57
91
|
|
58
92
|
# add vector store
|
@@ -65,33 +99,50 @@ class ServiceContext(BaseContext):
|
|
65
99
|
self.vector_store_dict[name] = vector_store_cls(embedding_model=embedding_model, **config.params)
|
66
100
|
|
67
101
|
from flowllm.flow.base_tool_flow import BaseToolFlow
|
68
|
-
from flowllm.flow.
|
102
|
+
from flowllm.flow.expression.expression_tool_flow import ExpressionToolFlow
|
69
103
|
|
70
104
|
# add tool flow cls
|
71
105
|
for name, tool_flow_cls in self.registry_dict["tool_flow"].items():
|
72
106
|
if not isclass(tool_flow_cls):
|
73
107
|
continue
|
74
108
|
|
109
|
+
if name in self.service_config.disabled_flows:
|
110
|
+
continue
|
111
|
+
|
75
112
|
tool_flow: BaseToolFlow = tool_flow_cls()
|
76
|
-
self.
|
77
|
-
logger.info(f"add
|
113
|
+
self.flow_dict[tool_flow.name] = tool_flow
|
114
|
+
logger.info(f"add cls tool_flow: {tool_flow.name}")
|
78
115
|
|
79
116
|
# add tool flow config
|
80
117
|
for name, flow_config in self.service_config.flow.items():
|
118
|
+
if name in self.service_config.disabled_flows:
|
119
|
+
continue
|
120
|
+
|
81
121
|
flow_config.name = name
|
82
122
|
tool_flow: BaseToolFlow = ExpressionToolFlow(flow_config=flow_config)
|
83
|
-
self.
|
123
|
+
self.flow_dict[tool_flow.name] = tool_flow
|
84
124
|
logger.info(f"add expression tool_flow:{tool_flow.name}")
|
85
125
|
|
126
|
+
def stop_by_service_config(self, wait_thread_pool=True, wait_ray: bool = True):
|
127
|
+
self.thread_pool.shutdown(wait=wait_thread_pool)
|
128
|
+
if self.service_config.ray_max_workers > 1:
|
129
|
+
import ray
|
130
|
+
ray.shutdown(_exiting_interpreter=not wait_ray)
|
131
|
+
|
132
|
+
from flowllm.storage.vector_store.base_vector_store import BaseVectorStore
|
133
|
+
for name, vector_store in self.vector_store_dict.items():
|
134
|
+
assert isinstance(vector_store, BaseVectorStore)
|
135
|
+
vector_store.close()
|
136
|
+
|
86
137
|
def get_vector_store(self, name: str = "default"):
|
87
138
|
return self.vector_store_dict[name]
|
88
139
|
|
89
140
|
def get_tool_flow(self, name: str = "default"):
|
90
|
-
return self.
|
141
|
+
return self.flow_dict[name]
|
91
142
|
|
92
143
|
@property
|
93
144
|
def tool_flow_names(self) -> List[str]:
|
94
|
-
return sorted(self.
|
145
|
+
return sorted(self.flow_dict.keys())
|
95
146
|
|
96
147
|
"""
|
97
148
|
register models
|
@@ -143,5 +194,15 @@ class ServiceContext(BaseContext):
|
|
143
194
|
assert name in self.registry_dict["service"], f"service={name} not found!"
|
144
195
|
return self.registry_dict["service"][name]
|
145
196
|
|
197
|
+
@staticmethod
|
198
|
+
def list_flow_schemas() -> List[dict]:
|
199
|
+
from flowllm.flow.base_tool_flow import BaseToolFlow
|
200
|
+
|
201
|
+
flow_schemas = []
|
202
|
+
for name, tool_flow in C.flow_dict.items():
|
203
|
+
assert isinstance(tool_flow, BaseToolFlow)
|
204
|
+
flow_schemas.append(tool_flow.tool_call.simple_input_dump())
|
205
|
+
return flow_schemas
|
206
|
+
|
146
207
|
|
147
208
|
C = ServiceContext()
|
{flowllm-0.1.2 → flowllm-0.1.5}/flowllm/embedding_model/openai_compatible_embedding_model.py
RENAMED
@@ -65,8 +65,7 @@ class OpenAICompatibleEmbeddingModel(BaseEmbeddingModel):
|
|
65
65
|
model=self.model_name,
|
66
66
|
input=input_text,
|
67
67
|
dimensions=self.dimensions,
|
68
|
-
encoding_format=self.encoding_format
|
69
|
-
)
|
68
|
+
encoding_format=self.encoding_format)
|
70
69
|
|
71
70
|
if isinstance(input_text, str):
|
72
71
|
return completion.data[0].embedding
|
@@ -0,0 +1,10 @@
|
|
1
|
+
from .base_flow import BaseFlow
|
2
|
+
from .base_tool_flow import BaseToolFlow
|
3
|
+
from .tool_op_flow import TavilySearchToolFlow, DashscopeSearchToolFlow, SimpleLLMToolFlow, ReactLLMToolFlow, \
|
4
|
+
StreamLLMToolFlow
|
5
|
+
|
6
|
+
"""
|
7
|
+
"""
|
8
|
+
|
9
|
+
from . import gallery
|
10
|
+
from . import expression
|
@@ -1,23 +1,35 @@
|
|
1
|
+
import asyncio
|
1
2
|
from abc import ABC, abstractmethod
|
2
|
-
from
|
3
|
+
from functools import partial
|
4
|
+
from typing import Union
|
3
5
|
|
4
6
|
from loguru import logger
|
5
7
|
|
6
8
|
from flowllm.context.flow_context import FlowContext
|
9
|
+
from flowllm.context.service_context import C
|
7
10
|
from flowllm.op.base_op import BaseOp
|
8
11
|
from flowllm.op.parallel_op import ParallelOp
|
9
12
|
from flowllm.op.sequential_op import SequentialOp
|
10
13
|
from flowllm.schema.flow_response import FlowResponse
|
14
|
+
from flowllm.schema.flow_stream_chunk import FlowStreamChunk
|
11
15
|
from flowllm.utils.common_utils import camel_to_snake
|
12
16
|
|
13
17
|
|
14
18
|
class BaseFlow(ABC):
|
15
19
|
|
16
|
-
def __init__(self,
|
20
|
+
def __init__(self,
|
21
|
+
name: str = "",
|
22
|
+
use_async: bool = True,
|
23
|
+
stream: bool = True,
|
24
|
+
service_type: str = "",
|
25
|
+
**kwargs):
|
17
26
|
self.name: str = name or camel_to_snake(self.__class__.__name__)
|
27
|
+
self.use_async: bool = use_async
|
28
|
+
self.stream: bool = stream
|
29
|
+
self.service_type: str = service_type
|
18
30
|
self.flow_params: dict = kwargs
|
19
31
|
|
20
|
-
self.flow_op
|
32
|
+
self.flow_op = self.build_flow()
|
21
33
|
self.print_flow()
|
22
34
|
|
23
35
|
@abstractmethod
|
@@ -53,20 +65,41 @@ class BaseFlow(ABC):
|
|
53
65
|
|
54
66
|
else:
|
55
67
|
logger.info(f"{prefix}Operation: {op.name}")
|
68
|
+
if op.sub_op is not None:
|
69
|
+
self._print_operation_tree(op.sub_op, indent + 2)
|
56
70
|
|
57
|
-
def
|
58
|
-
|
71
|
+
def after_flow(self, context):
|
72
|
+
...
|
59
73
|
|
60
|
-
def __call__(self, **kwargs) -> FlowResponse:
|
61
|
-
context = FlowContext(**kwargs)
|
74
|
+
async def __call__(self, **kwargs) -> Union[FlowResponse | FlowStreamChunk | None]:
|
75
|
+
context = FlowContext(stream=self.stream, use_async=self.use_async, service_type=self.service_type, **kwargs)
|
62
76
|
logger.info(f"request.params={kwargs}")
|
63
77
|
|
64
78
|
try:
|
65
|
-
self.
|
79
|
+
flow_op: BaseOp = self.build_flow()
|
80
|
+
|
81
|
+
if self.use_async:
|
82
|
+
await flow_op.async_call(context=context)
|
83
|
+
|
84
|
+
else:
|
85
|
+
loop = asyncio.get_event_loop()
|
86
|
+
op_call_fn = partial(flow_op.__call__, context=context)
|
87
|
+
await loop.run_in_executor(executor=C.thread_pool, func=op_call_fn) # noqa
|
88
|
+
|
89
|
+
if self.stream:
|
90
|
+
await context.add_stream_done()
|
91
|
+
else:
|
92
|
+
self.after_flow(context)
|
66
93
|
|
67
94
|
except Exception as e:
|
68
95
|
logger.exception(f"flow_name={self.name} encounter error={e.args}")
|
69
|
-
context.response.success = False
|
70
|
-
context.response.answer = str(e.args)
|
71
96
|
|
72
|
-
|
97
|
+
if self.stream:
|
98
|
+
await context.add_stream_error(e)
|
99
|
+
else:
|
100
|
+
context.add_response_error(e)
|
101
|
+
|
102
|
+
if self.stream:
|
103
|
+
return context.stream_queue
|
104
|
+
else:
|
105
|
+
return context.response
|
@@ -0,0 +1 @@
|
|
1
|
+
from .expression_tool_flow import ExpressionToolFlow
|
{flowllm-0.1.2/flowllm/flow/parser → flowllm-0.1.5/flowllm/flow/expression}/expression_parser.py
RENAMED
@@ -141,11 +141,13 @@ class ExpressionParser:
|
|
141
141
|
def _create_op(op_name: str) -> BaseOp:
|
142
142
|
if op_name in C.service_config.op:
|
143
143
|
op_config: OpConfig = C.service_config.op[op_name]
|
144
|
-
|
144
|
+
else:
|
145
|
+
op_config: OpConfig = OpConfig()
|
145
146
|
|
147
|
+
if op_config.backend in C.registry_dict["op"]:
|
148
|
+
op_cls = C.resolve_op(op_config.backend)
|
146
149
|
|
147
150
|
elif op_name in C.registry_dict["op"]:
|
148
|
-
op_config: OpConfig = OpConfig()
|
149
151
|
op_cls = C.resolve_op(op_name)
|
150
152
|
|
151
153
|
else:
|
@@ -153,6 +155,7 @@ class ExpressionParser:
|
|
153
155
|
|
154
156
|
kwargs = {
|
155
157
|
"name": op_name,
|
158
|
+
"max_retries": op_config.max_retries,
|
156
159
|
"raise_exception": op_config.raise_exception,
|
157
160
|
**op_config.params
|
158
161
|
}
|
@@ -0,0 +1,25 @@
|
|
1
|
+
from flowllm.flow.base_tool_flow import BaseToolFlow
|
2
|
+
from flowllm.flow.expression.expression_parser import ExpressionParser
|
3
|
+
from flowllm.schema.service_config import FlowConfig
|
4
|
+
from flowllm.schema.tool_call import ToolCall
|
5
|
+
|
6
|
+
|
7
|
+
class ExpressionToolFlow(BaseToolFlow):
|
8
|
+
|
9
|
+
def __init__(self, flow_config: FlowConfig = None, **kwargs):
|
10
|
+
self.flow_config: FlowConfig = flow_config
|
11
|
+
super().__init__(name=flow_config.name,
|
12
|
+
use_async=self.flow_config.use_async,
|
13
|
+
stream=self.flow_config.stream,
|
14
|
+
service_type=self.flow_config.service_type,
|
15
|
+
**kwargs)
|
16
|
+
|
17
|
+
def build_flow(self):
|
18
|
+
parser = ExpressionParser(self.flow_config.flow_content)
|
19
|
+
return parser.parse_flow()
|
20
|
+
|
21
|
+
def build_tool_call(self) -> ToolCall:
|
22
|
+
if hasattr(self.flow_op, "tool_call"):
|
23
|
+
return self.flow_op.tool_call
|
24
|
+
else:
|
25
|
+
return ToolCall(**self.flow_config.model_dump())
|
@@ -0,0 +1 @@
|
|
1
|
+
from .mock_tool_flow import MockToolFlow, MockAsyncToolFlow
|