flowllm 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. flowllm/__init__.py +21 -0
  2. flowllm/app.py +15 -0
  3. flowllm/client/__init__.py +25 -0
  4. flowllm/client/async_http_client.py +81 -0
  5. flowllm/client/http_client.py +81 -0
  6. flowllm/client/mcp_client.py +133 -0
  7. flowllm/client/sync_mcp_client.py +116 -0
  8. flowllm/config/__init__.py +1 -0
  9. flowllm/config/default.yaml +77 -0
  10. flowllm/config/empty.yaml +37 -0
  11. flowllm/config/pydantic_config_parser.py +242 -0
  12. flowllm/context/base_context.py +79 -0
  13. flowllm/context/flow_context.py +16 -0
  14. llmflow/op/prompt_mixin.py → flowllm/context/prompt_handler.py +25 -14
  15. flowllm/context/registry.py +30 -0
  16. flowllm/context/service_context.py +147 -0
  17. flowllm/embedding_model/__init__.py +1 -0
  18. {llmflow → flowllm}/embedding_model/base_embedding_model.py +93 -2
  19. {llmflow → flowllm}/embedding_model/openai_compatible_embedding_model.py +71 -13
  20. flowllm/flow/__init__.py +1 -0
  21. flowllm/flow/base_flow.py +72 -0
  22. flowllm/flow/base_tool_flow.py +15 -0
  23. flowllm/flow/gallery/__init__.py +8 -0
  24. flowllm/flow/gallery/cmd_flow.py +11 -0
  25. flowllm/flow/gallery/code_tool_flow.py +30 -0
  26. flowllm/flow/gallery/dashscope_search_tool_flow.py +34 -0
  27. flowllm/flow/gallery/deepsearch_tool_flow.py +39 -0
  28. flowllm/flow/gallery/expression_tool_flow.py +18 -0
  29. flowllm/flow/gallery/mock_tool_flow.py +67 -0
  30. flowllm/flow/gallery/tavily_search_tool_flow.py +30 -0
  31. flowllm/flow/gallery/terminate_tool_flow.py +30 -0
  32. flowllm/flow/parser/expression_parser.py +171 -0
  33. flowllm/llm/__init__.py +2 -0
  34. {llmflow → flowllm}/llm/base_llm.py +100 -18
  35. flowllm/llm/litellm_llm.py +455 -0
  36. flowllm/llm/openai_compatible_llm.py +439 -0
  37. flowllm/op/__init__.py +11 -0
  38. llmflow/op/react/react_v1_op.py → flowllm/op/agent/react_op.py +17 -22
  39. flowllm/op/akshare/__init__.py +3 -0
  40. flowllm/op/akshare/get_ak_a_code_op.py +108 -0
  41. flowllm/op/akshare/get_ak_a_code_prompt.yaml +21 -0
  42. flowllm/op/akshare/get_ak_a_info_op.py +140 -0
  43. flowllm/op/base_llm_op.py +64 -0
  44. flowllm/op/base_op.py +148 -0
  45. flowllm/op/base_ray_op.py +313 -0
  46. flowllm/op/code/__init__.py +1 -0
  47. flowllm/op/code/execute_code_op.py +42 -0
  48. flowllm/op/gallery/__init__.py +2 -0
  49. flowllm/op/gallery/mock_op.py +42 -0
  50. flowllm/op/gallery/terminate_op.py +29 -0
  51. flowllm/op/parallel_op.py +23 -0
  52. flowllm/op/search/__init__.py +3 -0
  53. flowllm/op/search/dashscope_deep_research_op.py +260 -0
  54. flowllm/op/search/dashscope_search_op.py +179 -0
  55. flowllm/op/search/dashscope_search_prompt.yaml +13 -0
  56. flowllm/op/search/tavily_search_op.py +102 -0
  57. flowllm/op/sequential_op.py +21 -0
  58. flowllm/schema/flow_request.py +12 -0
  59. flowllm/schema/flow_response.py +12 -0
  60. flowllm/schema/message.py +35 -0
  61. flowllm/schema/service_config.py +72 -0
  62. flowllm/schema/tool_call.py +118 -0
  63. {llmflow → flowllm}/schema/vector_node.py +1 -0
  64. flowllm/service/__init__.py +3 -0
  65. flowllm/service/base_service.py +68 -0
  66. flowllm/service/cmd_service.py +15 -0
  67. flowllm/service/http_service.py +79 -0
  68. flowllm/service/mcp_service.py +47 -0
  69. flowllm/storage/__init__.py +1 -0
  70. flowllm/storage/cache/__init__.py +1 -0
  71. flowllm/storage/cache/cache_data_handler.py +104 -0
  72. flowllm/storage/cache/data_cache.py +375 -0
  73. flowllm/storage/vector_store/__init__.py +3 -0
  74. flowllm/storage/vector_store/base_vector_store.py +44 -0
  75. {llmflow → flowllm/storage}/vector_store/chroma_vector_store.py +11 -10
  76. {llmflow → flowllm/storage}/vector_store/es_vector_store.py +11 -11
  77. llmflow/vector_store/file_vector_store.py → flowllm/storage/vector_store/local_vector_store.py +110 -11
  78. flowllm/utils/common_utils.py +52 -0
  79. flowllm/utils/fetch_url.py +117 -0
  80. flowllm/utils/llm_utils.py +28 -0
  81. flowllm/utils/ridge_v2.py +54 -0
  82. {llmflow → flowllm}/utils/timer.py +5 -4
  83. {flowllm-0.1.0.dist-info → flowllm-0.1.2.dist-info}/METADATA +45 -388
  84. flowllm-0.1.2.dist-info/RECORD +99 -0
  85. flowllm-0.1.2.dist-info/entry_points.txt +2 -0
  86. {flowllm-0.1.0.dist-info → flowllm-0.1.2.dist-info}/licenses/LICENSE +1 -1
  87. flowllm-0.1.2.dist-info/top_level.txt +1 -0
  88. flowllm-0.1.0.dist-info/RECORD +0 -66
  89. flowllm-0.1.0.dist-info/entry_points.txt +0 -3
  90. flowllm-0.1.0.dist-info/top_level.txt +0 -1
  91. llmflow/app.py +0 -53
  92. llmflow/config/config_parser.py +0 -80
  93. llmflow/config/mock_config.yaml +0 -58
  94. llmflow/embedding_model/__init__.py +0 -5
  95. llmflow/enumeration/agent_state.py +0 -8
  96. llmflow/llm/__init__.py +0 -5
  97. llmflow/llm/openai_compatible_llm.py +0 -283
  98. llmflow/mcp_server.py +0 -110
  99. llmflow/op/__init__.py +0 -10
  100. llmflow/op/base_op.py +0 -125
  101. llmflow/op/mock_op.py +0 -40
  102. llmflow/op/vector_store/__init__.py +0 -13
  103. llmflow/op/vector_store/recall_vector_store_op.py +0 -48
  104. llmflow/op/vector_store/update_vector_store_op.py +0 -28
  105. llmflow/op/vector_store/vector_store_action_op.py +0 -46
  106. llmflow/pipeline/pipeline.py +0 -94
  107. llmflow/pipeline/pipeline_context.py +0 -37
  108. llmflow/schema/app_config.py +0 -69
  109. llmflow/schema/experience.py +0 -144
  110. llmflow/schema/message.py +0 -68
  111. llmflow/schema/request.py +0 -32
  112. llmflow/schema/response.py +0 -29
  113. llmflow/service/__init__.py +0 -0
  114. llmflow/service/llmflow_service.py +0 -96
  115. llmflow/tool/__init__.py +0 -9
  116. llmflow/tool/base_tool.py +0 -80
  117. llmflow/tool/code_tool.py +0 -43
  118. llmflow/tool/dashscope_search_tool.py +0 -162
  119. llmflow/tool/mcp_tool.py +0 -77
  120. llmflow/tool/tavily_search_tool.py +0 -109
  121. llmflow/tool/terminate_tool.py +0 -23
  122. llmflow/utils/__init__.py +0 -0
  123. llmflow/utils/common_utils.py +0 -17
  124. llmflow/utils/file_handler.py +0 -25
  125. llmflow/utils/http_client.py +0 -156
  126. llmflow/utils/op_utils.py +0 -102
  127. llmflow/utils/registry.py +0 -33
  128. llmflow/vector_store/__init__.py +0 -7
  129. llmflow/vector_store/base_vector_store.py +0 -136
  130. {llmflow → flowllm/context}/__init__.py +0 -0
  131. {llmflow/config → flowllm/enumeration}/__init__.py +0 -0
  132. {llmflow → flowllm}/enumeration/chunk_enum.py +0 -0
  133. {llmflow → flowllm}/enumeration/http_enum.py +0 -0
  134. {llmflow → flowllm}/enumeration/role.py +0 -0
  135. {llmflow/enumeration → flowllm/flow/parser}/__init__.py +0 -0
  136. {llmflow/op/react → flowllm/op/agent}/__init__.py +0 -0
  137. /llmflow/op/react/react_v1_prompt.yaml → /flowllm/op/agent/react_prompt.yaml +0 -0
  138. {llmflow/pipeline → flowllm/schema}/__init__.py +0 -0
  139. {llmflow/schema → flowllm/utils}/__init__.py +0 -0
  140. {llmflow → flowllm}/utils/singleton.py +0 -0
  141. {flowllm-0.1.0.dist-info → flowllm-0.1.2.dist-info}/WHEEL +0 -0
@@ -0,0 +1,242 @@
1
+ import copy
2
+ import json
3
+ from pathlib import Path
4
+ from typing import Any, Generic, List, Type, TypeVar
5
+
6
+ import yaml
7
+ from loguru import logger
8
+ from pydantic import BaseModel
9
+
10
+ T = TypeVar('T', bound=BaseModel)
11
+
12
+
13
+ class PydanticConfigParser(Generic[T]):
14
+ current_file: str = __file__
15
+ default_config_name: str = ""
16
+
17
+ """
18
+ Pydantic Configuration Parser
19
+
20
+ Supported configuration sources (priority from low to high):
21
+ 1. Default configuration (Pydantic model default values)
22
+ 2. YAML configuration file
23
+ 3. Command line arguments (dot notation format)
24
+ 4. Runtime parameters
25
+ """
26
+
27
+ def __init__(self, config_class: Type[T]):
28
+ """
29
+ Initialize configuration parser
30
+
31
+ Args:
32
+ config_class: Pydantic configuration model class
33
+ """
34
+ self.config_class = config_class
35
+ self.config_dict: dict = {}
36
+
37
+ def parse_dot_notation(self, dot_list: List[str]) -> dict:
38
+ """
39
+ Parse dot notation format configuration list
40
+
41
+ Args:
42
+ dot_list: Configuration list in format ['a.b.c=value', 'x.y=123']
43
+
44
+ Returns:
45
+ Parsed nested dictionary
46
+ """
47
+ config_dict = {}
48
+
49
+ for item in dot_list:
50
+ if '=' not in item:
51
+ continue
52
+
53
+ key_path, value_str = item.split('=', 1)
54
+ keys = key_path.split('.')
55
+
56
+ # Automatic type conversion
57
+ value = self._convert_value(value_str)
58
+
59
+ # Build nested dictionary
60
+ current_dict = config_dict
61
+ for key in keys[:-1]:
62
+ if key not in current_dict:
63
+ current_dict[key] = {}
64
+ current_dict = current_dict[key]
65
+
66
+ current_dict[keys[-1]] = value
67
+
68
+ return config_dict
69
+
70
+ @staticmethod
71
+ def _convert_value(value_str: str) -> Any:
72
+ """
73
+ Automatically convert string values to appropriate Python types
74
+
75
+ Args:
76
+ value_str: String value
77
+
78
+ Returns:
79
+ Converted value
80
+ """
81
+ value_str = value_str.strip()
82
+
83
+ if value_str.lower() in ("true", "false"):
84
+ return value_str.lower() == "true"
85
+
86
+ if value_str.lower() in ("none", "null"):
87
+ return None
88
+
89
+ try:
90
+ if "." not in value_str and "e" not in value_str.lower():
91
+ return int(value_str)
92
+
93
+ return float(value_str)
94
+
95
+ except ValueError:
96
+ pass
97
+
98
+ try:
99
+ return json.loads(value_str)
100
+ except (json.JSONDecodeError, ValueError):
101
+ pass
102
+
103
+ return value_str
104
+
105
+ @staticmethod
106
+ def load_from_yaml(yaml_path: str | Path) -> dict:
107
+ """
108
+ Load configuration from YAML file
109
+
110
+ Args:
111
+ yaml_path: YAML file path
112
+
113
+ Returns:
114
+ Configuration dictionary
115
+ """
116
+ if isinstance(yaml_path, str):
117
+ yaml_path = Path(yaml_path)
118
+
119
+ if not yaml_path.exists():
120
+ raise FileNotFoundError(f"Configuration file does not exist: {yaml_path}")
121
+
122
+ with yaml_path.open() as f:
123
+ return yaml.safe_load(f)
124
+
125
+ def merge_configs(self, *config_dicts: dict) -> dict:
126
+ """
127
+ Deep merge multiple configuration dictionaries
128
+
129
+ Args:
130
+ *config_dicts: Multiple configuration dictionaries
131
+
132
+ Returns:
133
+ Merged configuration dictionary
134
+ """
135
+ result = {}
136
+
137
+ for config_dict in config_dicts:
138
+ result = self._deep_merge(result, config_dict)
139
+
140
+ return result
141
+
142
+ def _deep_merge(self, base_dict: dict, update_dict: dict) -> dict:
143
+ """
144
+ Deep merge two dictionaries
145
+
146
+ Args:
147
+ base_dict: Base dictionary
148
+ update_dict: Update dictionary
149
+
150
+ Returns:
151
+ Merged dictionary
152
+ """
153
+ result = base_dict.copy()
154
+
155
+ for key, value in update_dict.items():
156
+ if key in result and isinstance(result[key], dict) and isinstance(value, dict):
157
+ result[key] = self._deep_merge(result[key], value)
158
+ else:
159
+ result[key] = value
160
+
161
+ return result
162
+
163
+ def parse_args(self, *args) -> T:
164
+ """
165
+ Parse command line arguments and return configuration object
166
+
167
+ Args:
168
+ args: Command line arguments.
169
+
170
+ Returns:
171
+ Parsed configuration object
172
+ """
173
+ configs_to_merge = []
174
+
175
+ # 1. Default configuration (from Pydantic model)
176
+ default_config = self.config_class().model_dump()
177
+ configs_to_merge.append(default_config)
178
+
179
+ # 2. YAML configuration file
180
+ config = ""
181
+ filter_args = []
182
+ for arg in args:
183
+ if "=" not in arg:
184
+ continue
185
+
186
+ arg = arg.lstrip("--").lstrip("-")
187
+
188
+ if "c=" in arg or "config=" in arg:
189
+ config = arg.split("=")[-1]
190
+ else:
191
+ filter_args.append(arg)
192
+
193
+ if not config:
194
+ if self.default_config_name:
195
+ config = self.default_config_name
196
+ assert config, "add `config=<config_file>` in cmd!"
197
+
198
+ if not config.endswith(".yaml"):
199
+ config += ".yaml"
200
+
201
+ # load pre-built configs
202
+ config_path = Path(self.current_file).parent / config
203
+ if not config_path.exists():
204
+ config_path = Path(config)
205
+
206
+ yaml_config = self.load_from_yaml(config_path)
207
+ logger.info(f"flowllm using config={config_path}")
208
+ configs_to_merge.append(yaml_config)
209
+
210
+ # 3. Command line override configuration
211
+ if args:
212
+ cli_config = self.parse_dot_notation(filter_args)
213
+ configs_to_merge.append(cli_config)
214
+
215
+ # Merge all configurations
216
+ self.config_dict = self.merge_configs(*configs_to_merge)
217
+
218
+ # Create and validate final configuration object
219
+ return self.config_class.model_validate(self.config_dict)
220
+
221
+ def update_config(self, **kwargs) -> T:
222
+ """
223
+ Update configuration object using keyword arguments
224
+
225
+ Args:
226
+ **kwargs: Configuration items to update, supports dot notation, e.g. server__host='localhost'
227
+
228
+ Returns:
229
+ Updated configuration object
230
+ """
231
+ # Convert kwargs to dot notation format
232
+ dot_list = []
233
+ for key, value in kwargs.items():
234
+ # support double underscore as dot replacement (server__host -> server.host)
235
+ dot_key = key.replace("__", ".")
236
+ dot_list.append(f"{dot_key}={value}")
237
+
238
+ # Parse and merge configuration
239
+ override_config = self.parse_dot_notation(dot_list)
240
+ final_config = self.merge_configs(copy.deepcopy(self.config_dict), override_config)
241
+
242
+ return self.config_class.model_validate(final_config)
@@ -0,0 +1,79 @@
1
+ class BaseContext:
2
+ def __init__(self, **kwargs):
3
+ self._data = {**kwargs}
4
+
5
+ def __getattr__(self, name: str):
6
+ # Avoid infinite recursion when _data is not yet initialized
7
+ if name == '_data':
8
+ raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
9
+
10
+ # Use object.__getattribute__ to safely access _data
11
+ try:
12
+ data = object.__getattribute__(self, '_data')
13
+ except AttributeError:
14
+ raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
15
+
16
+ if name in data:
17
+ return data[name]
18
+
19
+ raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
20
+
21
+ def __setattr__(self, name: str, value):
22
+ if name == "_data":
23
+ super().__setattr__(name, value)
24
+ else:
25
+ self._data[name] = value
26
+
27
+ def __getitem__(self, name: str):
28
+ if name not in self._data:
29
+ raise AttributeError(f"'{self.__class__.__name__}' has no attribute '{name}'")
30
+ return self._data[name]
31
+
32
+ def __setitem__(self, name: str, value):
33
+ self._data[name] = value
34
+
35
+ def __contains__(self, name: str):
36
+ return name in self._data
37
+
38
+ def __repr__(self):
39
+ return f"{self.__class__.__name__}({self._data!r})"
40
+
41
+ def dump(self) -> dict:
42
+ return self._data
43
+
44
+ def get(self, key: str, default=None):
45
+ return self._data.get(key, default)
46
+
47
+ @property
48
+ def keys(self):
49
+ return self._data.keys()
50
+
51
+ def update(self, **kwargs):
52
+ self._data.update(kwargs)
53
+
54
+ def items(self):
55
+ return self._data.items()
56
+
57
+ def __getstate__(self):
58
+ """Support for pickle serialization"""
59
+ return {'_data': self._data}
60
+
61
+ def __setstate__(self, state):
62
+ """Support for pickle deserialization"""
63
+ self._data = state['_data']
64
+
65
+ if __name__ == "__main__":
66
+ ctx = BaseContext(**{"name": "Alice", "age": 30, "city": "New York"})
67
+
68
+ print(ctx.name)
69
+ print(ctx.age)
70
+ print(ctx.city)
71
+
72
+ ctx.email = "alice@example.com"
73
+ ctx["email"] = "alice@example.com"
74
+ print(ctx.email)
75
+
76
+ print(ctx.keys)
77
+ print(ctx)
78
+
79
+ # print(ctx.city2)
@@ -0,0 +1,16 @@
1
+ import uuid
2
+
3
+ from flowllm.context.base_context import BaseContext
4
+ from flowllm.schema.flow_response import FlowResponse
5
+
6
+
7
+ class FlowContext(BaseContext):
8
+
9
+ def __init__(self,
10
+ flow_id: str = uuid.uuid4().hex,
11
+ response: FlowResponse = None,
12
+ **kwargs):
13
+ super().__init__(**kwargs)
14
+
15
+ self.flow_id: str = flow_id
16
+ self.response: FlowResponse = FlowResponse() if response is None else response
@@ -3,42 +3,56 @@ from pathlib import Path
3
3
  import yaml
4
4
  from loguru import logger
5
5
 
6
+ from flowllm.context.base_context import BaseContext
7
+ from flowllm.context.service_context import C
6
8
 
7
- class PromptMixin:
8
9
 
9
- def __init__(self):
10
- self._prompt_dict: dict = {}
10
+ class PromptHandler(BaseContext):
11
+
12
+ def __init__(self, language: str = "", **kwargs):
13
+ super().__init__(**kwargs)
14
+ self.language: str = language or C.language
11
15
 
12
16
  def load_prompt_by_file(self, prompt_file_path: Path | str = None):
13
17
  if prompt_file_path is None:
14
- return
18
+ return self
15
19
 
16
20
  if isinstance(prompt_file_path, str):
17
21
  prompt_file_path = Path(prompt_file_path)
18
22
 
19
23
  if not prompt_file_path.exists():
20
- return
24
+ return self
21
25
 
22
26
  with prompt_file_path.open() as f:
23
27
  prompt_dict = yaml.load(f, yaml.FullLoader)
24
28
  self.load_prompt_dict(prompt_dict)
29
+ return self
25
30
 
26
31
  def load_prompt_dict(self, prompt_dict: dict = None):
27
32
  if not prompt_dict:
28
- return
33
+ return self
29
34
 
30
35
  for key, value in prompt_dict.items():
31
36
  if isinstance(value, str):
32
- if key in self._prompt_dict:
33
- self._prompt_dict[key] = value
37
+ if key in self._data:
38
+ self._data[key] = value
34
39
  logger.warning(f"prompt_dict key={key} overwrite!")
35
40
 
36
41
  else:
37
- self._prompt_dict[key] = value
42
+ self._data[key] = value
38
43
  logger.info(f"add prompt_dict key={key}")
44
+ return self
45
+
46
+ def get_prompt(self, prompt_name: str):
47
+ key: str = prompt_name
48
+ if self.language and not key.endswith(self.language.strip()):
49
+ key += "_" + self.language.strip()
39
50
 
40
- def prompt_format(self, prompt_name: str, **kwargs):
41
- prompt = self._prompt_dict[prompt_name]
51
+ assert key in self._data, f"prompt_name={key} not found."
52
+ return self._data[key]
53
+
54
+ def prompt_format(self, prompt_name: str, **kwargs) -> str:
55
+ prompt = self.get_prompt(prompt_name)
42
56
 
43
57
  flag_kwargs = {k: v for k, v in kwargs.items() if isinstance(v, bool)}
44
58
  other_kwargs = {k: v for k, v in kwargs.items() if not isinstance(v, bool)}
@@ -69,6 +83,3 @@ class PromptMixin:
69
83
  prompt = prompt.format(**other_kwargs)
70
84
 
71
85
  return prompt
72
-
73
- def get_prompt(self, key: str):
74
- return self._prompt_dict[key]
@@ -0,0 +1,30 @@
1
+ from loguru import logger
2
+
3
+ from flowllm.context.base_context import BaseContext
4
+ from flowllm.utils.common_utils import camel_to_snake
5
+
6
+
7
+ class Registry(BaseContext):
8
+
9
+ def __init__(self, registry_name: str, enable_log: bool = True, register_flow_module: bool = True, **kwargs):
10
+ super().__init__(**kwargs)
11
+ self.registry_name: str = registry_name
12
+ self.enable_log: bool = enable_log
13
+ self.register_flow_module: bool = register_flow_module
14
+
15
+ def register(self, name: str = ""):
16
+ def decorator(cls):
17
+ if not self.register_flow_module and cls.__module__.startswith("flowllm"):
18
+ return cls
19
+
20
+ class_name = name if name else camel_to_snake(cls.__name__)
21
+ if self.enable_log:
22
+ if class_name in self._data:
23
+ logger.warning(f"{self.registry_name}.class({class_name}) is already registered!")
24
+ else:
25
+ logger.info(f"{self.registry_name}.class({class_name}) is registered.")
26
+
27
+ self._data[class_name] = cls
28
+ return cls
29
+
30
+ return decorator
@@ -0,0 +1,147 @@
1
+ import os
2
+ import uuid
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from inspect import isclass
5
+ from typing import Dict, List
6
+
7
+ import ray
8
+ from loguru import logger
9
+
10
+ from flowllm.context.base_context import BaseContext
11
+ from flowllm.context.registry import Registry
12
+ from flowllm.schema.service_config import ServiceConfig, EmbeddingModelConfig
13
+ from flowllm.utils.singleton import singleton
14
+
15
+
16
+ @singleton
17
+ class ServiceContext(BaseContext):
18
+
19
+ def __init__(self, service_id: str = uuid.uuid4().hex, **kwargs):
20
+ super().__init__(**kwargs)
21
+
22
+ self.service_id: str = service_id
23
+ self.service_config: ServiceConfig | None = None
24
+ self.language: str = ""
25
+ self.thread_pool: ThreadPoolExecutor | None = None
26
+ self.vector_store_dict: dict = {}
27
+
28
+ self.registry_dict: Dict[str, Registry] = {}
29
+ use_framework: bool = os.environ.get("FLOW_USE_FRAMEWORK", "").lower() == "true"
30
+ for key in ["embedding_model", "llm", "vector_store", "op", "tool_flow", "service"]:
31
+ enable_log = True
32
+ register_flow_module = True
33
+
34
+ if use_framework:
35
+ enable_log = False
36
+ if key in ["op", "tool_flow"]:
37
+ register_flow_module = False
38
+ self.registry_dict[key] = Registry(key, enable_log=enable_log, register_flow_module=register_flow_module)
39
+
40
+ self.tool_flow_dict: dict = {}
41
+
42
+ def set_default_service_config(self):
43
+ from flowllm.config.pydantic_config_parser import PydanticConfigParser
44
+
45
+ config_parser = PydanticConfigParser(ServiceConfig)
46
+ self.service_config = config_parser.parse_args("config=default")
47
+ return self
48
+
49
+ def init_by_service_config(self, service_config: ServiceConfig = None):
50
+ if service_config:
51
+ self.service_config = service_config
52
+
53
+ self.language = self.service_config.language
54
+ self.thread_pool = ThreadPoolExecutor(max_workers=self.service_config.thread_pool_max_workers)
55
+ if self.service_config.ray_max_workers > 1:
56
+ ray.init(num_cpus=self.service_config.ray_max_workers)
57
+
58
+ # add vector store
59
+ for name, config in self.service_config.vector_store.items():
60
+ vector_store_cls = self.resolve_vector_store(config.backend)
61
+ embedding_model_config: EmbeddingModelConfig = self.service_config.embedding_model[config.embedding_model]
62
+ embedding_model_cls = self.resolve_embedding_model(embedding_model_config.backend)
63
+ embedding_model = embedding_model_cls(model_name=embedding_model_config.model_name,
64
+ **embedding_model_config.params)
65
+ self.vector_store_dict[name] = vector_store_cls(embedding_model=embedding_model, **config.params)
66
+
67
+ from flowllm.flow.base_tool_flow import BaseToolFlow
68
+ from flowllm.flow.gallery import ExpressionToolFlow
69
+
70
+ # add tool flow cls
71
+ for name, tool_flow_cls in self.registry_dict["tool_flow"].items():
72
+ if not isclass(tool_flow_cls):
73
+ continue
74
+
75
+ tool_flow: BaseToolFlow = tool_flow_cls()
76
+ self.tool_flow_dict[tool_flow.name] = tool_flow
77
+ logger.info(f"add diy tool_flow: {tool_flow.name}")
78
+
79
+ # add tool flow config
80
+ for name, flow_config in self.service_config.flow.items():
81
+ flow_config.name = name
82
+ tool_flow: BaseToolFlow = ExpressionToolFlow(flow_config=flow_config)
83
+ self.tool_flow_dict[tool_flow.name] = tool_flow
84
+ logger.info(f"add expression tool_flow:{tool_flow.name}")
85
+
86
+ def get_vector_store(self, name: str = "default"):
87
+ return self.vector_store_dict[name]
88
+
89
+ def get_tool_flow(self, name: str = "default"):
90
+ return self.tool_flow_dict[name]
91
+
92
+ @property
93
+ def tool_flow_names(self) -> List[str]:
94
+ return sorted(self.tool_flow_dict.keys())
95
+
96
+ """
97
+ register models
98
+ """
99
+
100
+ def register_embedding_model(self, name: str = ""):
101
+ return self.registry_dict["embedding_model"].register(name=name)
102
+
103
+ def register_llm(self, name: str = ""):
104
+ return self.registry_dict["llm"].register(name=name)
105
+
106
+ def register_vector_store(self, name: str = ""):
107
+ return self.registry_dict["vector_store"].register(name=name)
108
+
109
+ def register_op(self, name: str = ""):
110
+ return self.registry_dict["op"].register(name=name)
111
+
112
+ def register_tool_flow(self, name: str = ""):
113
+ return self.registry_dict["tool_flow"].register(name=name)
114
+
115
+ def register_service(self, name: str = ""):
116
+ return self.registry_dict["service"].register(name=name)
117
+
118
+ """
119
+ resolve models
120
+ """
121
+
122
+ def resolve_embedding_model(self, name: str):
123
+ assert name in self.registry_dict["embedding_model"], f"embedding_model={name} not found!"
124
+ return self.registry_dict["embedding_model"][name]
125
+
126
+ def resolve_llm(self, name: str):
127
+ assert name in self.registry_dict["llm"], f"llm={name} not found!"
128
+ return self.registry_dict["llm"][name]
129
+
130
+ def resolve_vector_store(self, name: str):
131
+ assert name in self.registry_dict["vector_store"], f"vector_store={name} not found!"
132
+ return self.registry_dict["vector_store"][name]
133
+
134
+ def resolve_op(self, name: str):
135
+ assert name in self.registry_dict["op"], f"op={name} not found!"
136
+ return self.registry_dict["op"][name]
137
+
138
+ def resolve_tool_flow(self, name: str):
139
+ assert name in self.registry_dict["tool_flow"], f"tool_flow={name} not found!"
140
+ return self.registry_dict["tool_flow"][name]
141
+
142
+ def resolve_service(self, name: str):
143
+ assert name in self.registry_dict["service"], f"service={name} not found!"
144
+ return self.registry_dict["service"][name]
145
+
146
+
147
+ C = ServiceContext()
@@ -0,0 +1 @@
1
+ from .openai_compatible_embedding_model import OpenAICompatibleEmbeddingModel