flowllm 0.1.1__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. {flowllm-0.1.1 → flowllm-0.1.3}/PKG-INFO +43 -390
  2. flowllm-0.1.3/flowllm/__init__.py +25 -0
  3. flowllm-0.1.3/flowllm/app.py +15 -0
  4. flowllm-0.1.3/flowllm/client/__init__.py +25 -0
  5. flowllm-0.1.3/flowllm/client/async_http_client.py +81 -0
  6. flowllm-0.1.3/flowllm/client/http_client.py +81 -0
  7. flowllm-0.1.3/flowllm/client/mcp_client.py +133 -0
  8. flowllm-0.1.3/flowllm/client/sync_mcp_client.py +116 -0
  9. flowllm-0.1.3/flowllm/config/__init__.py +1 -0
  10. flowllm-0.1.1/flowllm/config/default_config.yaml → flowllm-0.1.3/flowllm/config/default.yaml +3 -8
  11. flowllm-0.1.3/flowllm/config/empty.yaml +37 -0
  12. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/config/pydantic_config_parser.py +17 -17
  13. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/context/base_context.py +27 -7
  14. flowllm-0.1.3/flowllm/context/flow_context.py +16 -0
  15. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/context/registry.py +5 -1
  16. flowllm-0.1.3/flowllm/context/service_context.py +149 -0
  17. flowllm-0.1.3/flowllm/embedding_model/__init__.py +1 -0
  18. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/embedding_model/base_embedding_model.py +91 -0
  19. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/embedding_model/openai_compatible_embedding_model.py +63 -5
  20. flowllm-0.1.3/flowllm/flow/__init__.py +1 -0
  21. flowllm-0.1.3/flowllm/flow/base_flow.py +74 -0
  22. flowllm-0.1.3/flowllm/flow/base_tool_flow.py +15 -0
  23. flowllm-0.1.3/flowllm/flow/gallery/__init__.py +8 -0
  24. flowllm-0.1.3/flowllm/flow/gallery/cmd_flow.py +11 -0
  25. flowllm-0.1.3/flowllm/flow/gallery/code_tool_flow.py +30 -0
  26. flowllm-0.1.3/flowllm/flow/gallery/dashscope_search_tool_flow.py +34 -0
  27. flowllm-0.1.3/flowllm/flow/gallery/deepsearch_tool_flow.py +39 -0
  28. flowllm-0.1.3/flowllm/flow/gallery/expression_tool_flow.py +18 -0
  29. flowllm-0.1.3/flowllm/flow/gallery/mock_tool_flow.py +62 -0
  30. flowllm-0.1.3/flowllm/flow/gallery/tavily_search_tool_flow.py +30 -0
  31. flowllm-0.1.3/flowllm/flow/gallery/terminate_tool_flow.py +30 -0
  32. flowllm-0.1.1/flowllm/flow_engine/simple_flow_engine.py → flowllm-0.1.3/flowllm/flow/parser/expression_parser.py +25 -67
  33. flowllm-0.1.3/flowllm/llm/__init__.py +2 -0
  34. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/llm/base_llm.py +94 -4
  35. flowllm-0.1.3/flowllm/llm/litellm_llm.py +456 -0
  36. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/llm/openai_compatible_llm.py +205 -5
  37. flowllm-0.1.3/flowllm/op/__init__.py +12 -0
  38. flowllm-0.1.3/flowllm/op/agent/__init__.py +1 -0
  39. flowllm-0.1.3/flowllm/op/agent/react_v1_op.py +109 -0
  40. flowllm-0.1.3/flowllm/op/agent/react_v1_prompt.yaml +54 -0
  41. flowllm-0.1.3/flowllm/op/agent/react_v2_op.py +86 -0
  42. flowllm-0.1.3/flowllm/op/agent/react_v2_prompt.yaml +35 -0
  43. flowllm-0.1.3/flowllm/op/akshare/__init__.py +3 -0
  44. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/op/akshare/get_ak_a_code_op.py +14 -22
  45. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/op/akshare/get_ak_a_info_op.py +17 -20
  46. flowllm-0.1.1/flowllm/op/llm_base_op.py → flowllm-0.1.3/flowllm/op/base_llm_op.py +7 -5
  47. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/op/base_op.py +40 -44
  48. flowllm-0.1.3/flowllm/op/base_ray_op.py +313 -0
  49. flowllm-0.1.3/flowllm/op/code/__init__.py +1 -0
  50. flowllm-0.1.3/flowllm/op/code/execute_code_op.py +42 -0
  51. flowllm-0.1.3/flowllm/op/gallery/__init__.py +2 -0
  52. {flowllm-0.1.1/flowllm/op → flowllm-0.1.3/flowllm/op/gallery}/mock_op.py +4 -4
  53. flowllm-0.1.3/flowllm/op/gallery/terminate_op.py +29 -0
  54. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/op/parallel_op.py +2 -9
  55. flowllm-0.1.3/flowllm/op/search/__init__.py +3 -0
  56. flowllm-0.1.3/flowllm/op/search/dashscope_deep_research_op.py +267 -0
  57. flowllm-0.1.3/flowllm/op/search/dashscope_search_op.py +186 -0
  58. flowllm-0.1.3/flowllm/op/search/dashscope_search_prompt.yaml +13 -0
  59. flowllm-0.1.3/flowllm/op/search/tavily_search_op.py +109 -0
  60. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/op/sequential_op.py +1 -9
  61. flowllm-0.1.3/flowllm/schema/flow_request.py +12 -0
  62. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/schema/message.py +2 -0
  63. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/schema/service_config.py +12 -16
  64. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/schema/tool_call.py +20 -8
  65. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/schema/vector_node.py +1 -0
  66. flowllm-0.1.3/flowllm/service/__init__.py +3 -0
  67. flowllm-0.1.3/flowllm/service/base_service.py +68 -0
  68. flowllm-0.1.3/flowllm/service/cmd_service.py +15 -0
  69. flowllm-0.1.3/flowllm/service/http_service.py +79 -0
  70. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/service/mcp_service.py +13 -11
  71. flowllm-0.1.3/flowllm/storage/cache/__init__.py +1 -0
  72. flowllm-0.1.3/flowllm/storage/cache/cache_data_handler.py +104 -0
  73. flowllm-0.1.1/flowllm/utils/dataframe_cache.py → flowllm-0.1.3/flowllm/storage/cache/data_cache.py +136 -92
  74. flowllm-0.1.3/flowllm/storage/vector_store/__init__.py +3 -0
  75. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/storage/vector_store/base_vector_store.py +3 -0
  76. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/storage/vector_store/es_vector_store.py +4 -5
  77. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/storage/vector_store/local_vector_store.py +0 -1
  78. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/utils/common_utils.py +9 -21
  79. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/utils/fetch_url.py +16 -12
  80. flowllm-0.1.3/flowllm/utils/llm_utils.py +28 -0
  81. flowllm-0.1.3/flowllm/utils/logger_utils.py +28 -0
  82. flowllm-0.1.3/flowllm/utils/ridge_v2.py +54 -0
  83. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm.egg-info/PKG-INFO +43 -390
  84. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm.egg-info/SOURCES.txt +49 -7
  85. flowllm-0.1.3/flowllm.egg-info/entry_points.txt +2 -0
  86. flowllm-0.1.3/flowllm.egg-info/requires.txt +31 -0
  87. flowllm-0.1.3/pyproject.toml +81 -0
  88. flowllm-0.1.3/test/test_cache.py +456 -0
  89. flowllm-0.1.3/test/test_dashscope_llm.py +190 -0
  90. {flowllm-0.1.1 → flowllm-0.1.3}/test/test_dataframe_cache.py +6 -6
  91. {flowllm-0.1.1 → flowllm-0.1.3}/test/test_simple_flow.py +1 -1
  92. flowllm-0.1.1/README.md +0 -373
  93. flowllm-0.1.1/flowllm/__init__.py +0 -12
  94. flowllm-0.1.1/flowllm/app.py +0 -25
  95. flowllm-0.1.1/flowllm/context/flow_context.py +0 -28
  96. flowllm-0.1.1/flowllm/context/service_context.py +0 -103
  97. flowllm-0.1.1/flowllm/embedding_model/__init__.py +0 -1
  98. flowllm-0.1.1/flowllm/flow_engine/__init__.py +0 -1
  99. flowllm-0.1.1/flowllm/flow_engine/base_flow_engine.py +0 -34
  100. flowllm-0.1.1/flowllm/llm/__init__.py +0 -1
  101. flowllm-0.1.1/flowllm/op/__init__.py +0 -3
  102. flowllm-0.1.1/flowllm/service/__init__.py +0 -2
  103. flowllm-0.1.1/flowllm/service/base_service.py +0 -59
  104. flowllm-0.1.1/flowllm/service/http_service.py +0 -87
  105. flowllm-0.1.1/flowllm/storage/vector_store/__init__.py +0 -3
  106. flowllm-0.1.1/flowllm.egg-info/entry_points.txt +0 -4
  107. flowllm-0.1.1/flowllm.egg-info/requires.txt +0 -14
  108. flowllm-0.1.1/pyproject.toml +0 -51
  109. {flowllm-0.1.1 → flowllm-0.1.3}/LICENSE +0 -0
  110. /flowllm-0.1.1/flowllm/config/__init__.py → /flowllm-0.1.3/README.md +0 -0
  111. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/context/__init__.py +0 -0
  112. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/context/prompt_handler.py +0 -0
  113. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/enumeration/__init__.py +0 -0
  114. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/enumeration/chunk_enum.py +0 -0
  115. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/enumeration/http_enum.py +0 -0
  116. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/enumeration/role.py +0 -0
  117. {flowllm-0.1.1/flowllm/op/akshare → flowllm-0.1.3/flowllm/flow/parser}/__init__.py +0 -0
  118. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/op/akshare/get_ak_a_code_prompt.yaml +0 -0
  119. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/schema/__init__.py +0 -0
  120. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/schema/flow_response.py +0 -0
  121. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/storage/__init__.py +0 -0
  122. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/storage/vector_store/chroma_vector_store.py +0 -0
  123. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/utils/__init__.py +0 -0
  124. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/utils/singleton.py +0 -0
  125. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm/utils/timer.py +0 -0
  126. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm.egg-info/dependency_links.txt +0 -0
  127. {flowllm-0.1.1 → flowllm-0.1.3}/flowllm.egg-info/top_level.txt +0 -0
  128. {flowllm-0.1.1 → flowllm-0.1.3}/setup.cfg +0 -0
  129. {flowllm-0.1.1 → flowllm-0.1.3}/test/test_config.py +0 -0
@@ -1,7 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flowllm
3
- Version: 0.1.1
4
- Summary: build llm flow
3
+ Version: 0.1.3
4
+ Summary: A flexible framework for building LLM-powered flows and mcp services
5
+ Author-email: FlowLLM Team <flowllm@example.com>
6
+ Maintainer-email: FlowLLM Team <flowllm@example.com>
5
7
  License: Apache License
6
8
  Version 2.0, January 2004
7
9
  http://www.apache.org/licenses/
@@ -204,398 +206,49 @@ License: Apache License
204
206
  See the License for the specific language governing permissions and
205
207
  limitations under the License.
206
208
 
207
- Classifier: Programming Language :: Python :: 3
209
+ Keywords: llm,ai,flow,framework,openai,chatgpt,language-model,mcp,http
210
+ Classifier: Development Status :: 3 - Alpha
211
+ Classifier: Intended Audience :: Developers
212
+ Classifier: Intended Audience :: Science/Research
208
213
  Classifier: License :: OSI Approved :: Apache Software License
209
214
  Classifier: Operating System :: OS Independent
215
+ Classifier: Programming Language :: Python :: 3
216
+ Classifier: Programming Language :: Python :: 3.12
217
+ Classifier: Programming Language :: Python :: 3.13
218
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
219
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
220
+ Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
221
+ Classifier: Typing :: Typed
210
222
  Requires-Python: >=3.12
211
223
  Description-Content-Type: text/markdown
212
224
  License-File: LICENSE
213
- Requires-Dist: dashscope>=1.19.1
214
- Requires-Dist: elasticsearch>=8.14.0
215
- Requires-Dist: fastapi>=0.115.13
216
- Requires-Dist: fastmcp>=2.10.6
217
- Requires-Dist: loguru>=0.7.3
218
- Requires-Dist: mcp>=1.9.4
219
- Requires-Dist: numpy>=2.3.0
220
- Requires-Dist: openai>=1.88.0
221
- Requires-Dist: pydantic>=2.11.7
222
- Requires-Dist: PyYAML>=6.0.2
223
- Requires-Dist: Requests>=2.32.4
224
- Requires-Dist: uvicorn>=0.34.3
225
- Requires-Dist: setuptools>=75.0
226
225
  Requires-Dist: akshare
226
+ Requires-Dist: beautifulsoup4
227
+ Requires-Dist: dashscope
228
+ Requires-Dist: elasticsearch
229
+ Requires-Dist: fastapi
230
+ Requires-Dist: fastmcp
231
+ Requires-Dist: httpx
232
+ Requires-Dist: litellm
233
+ Requires-Dist: loguru
234
+ Requires-Dist: mcp
235
+ Requires-Dist: numpy
236
+ Requires-Dist: openai
237
+ Requires-Dist: pandas
238
+ Requires-Dist: pydantic
239
+ Requires-Dist: PyYAML
240
+ Requires-Dist: ray
241
+ Requires-Dist: requests
242
+ Requires-Dist: scikit-learn
243
+ Requires-Dist: tavily-python
244
+ Requires-Dist: tqdm
245
+ Requires-Dist: urllib3
246
+ Requires-Dist: uvicorn[standard]
247
+ Requires-Dist: chromadb
248
+ Requires-Dist: elasticsearch
249
+ Requires-Dist: ray
250
+ Provides-Extra: distributed
251
+ Requires-Dist: ray; extra == "distributed"
252
+ Provides-Extra: all
253
+ Requires-Dist: flowllm[distributed]; extra == "all"
227
254
  Dynamic: license-file
228
-
229
- # flowllm
230
-
231
- [![Python](https://img.shields.io/badge/python-3.12+-blue.svg)](https://www.python.org/downloads/)
232
- [![License](https://img.shields.io/badge/License-Apache%202.0-green.svg)](https://opensource.org/licenses/Apache-2.0)
233
-
234
- flowllm is a flexible large language model workflow framework that provides a modular pipeline architecture for building complex AI applications. The framework supports multiple LLM providers, vector storage backends, and tool integrations, enabling you to easily build Retrieval-Augmented Generation (RAG), intelligent agents, and other AI-powered applications.
235
-
236
- ## 🚀 Key Features
237
-
238
- ### 🔧 Modular Architecture
239
- - **Pipeline System**: Flexible pipeline configuration supporting both serial and parallel operations
240
- - **Operation Registry**: Extensible operation registry with support for custom operations
241
- - **Configuration-Driven**: Manage entire applications through YAML configuration files
242
-
243
- ### 🤖 LLM Support
244
- - **Multi-Provider Compatible**: Support for OpenAI-compatible APIs
245
- - **Streaming Responses**: Real-time streaming output support
246
- - **Tool Calling**: Built-in tool calling and parallel execution support
247
- - **Reasoning Mode**: Chain-of-thought reasoning support
248
-
249
- ### 📚 Vector Storage
250
- - **Multi-Backend Support**:
251
- - Elasticsearch
252
- - ChromaDB
253
- - Local file storage
254
- - **Embedding Models**: Support for multiple embedding models
255
- - **Workspace Management**: Multi-tenant vector storage management
256
-
257
- ### 🛠️ Rich Tool Ecosystem
258
- - **Code Execution**: Python code execution tool
259
- - **Web Search**: Integrated Tavily and DashScope search
260
- - **MCP Protocol**: Model Context Protocol support
261
- - **Termination Control**: Intelligent conversation termination management
262
-
263
- ### 🌐 API Services
264
- - **RESTful API**: FastAPI-powered HTTP services
265
- - **MCP Server**: Model Context Protocol server support
266
- - **Multiple Endpoints**: Retriever, summarizer, vector store, agent APIs
267
-
268
- ## 📦 Installation
269
-
270
- ### Prerequisites
271
- - Python 3.12+
272
- - pip or poetry
273
-
274
- ### Installation Steps
275
-
276
- ```bash
277
- # Clone the repository
278
- git clone https://github.com/your-username/flowllm.git
279
- cd flowllm
280
-
281
- # Install dependencies
282
- pip install -e .
283
-
284
- # Or using poetry
285
- poetry install
286
- ```
287
-
288
- ### Environment Configuration
289
-
290
- Copy the environment template:
291
- ```bash
292
- cp example.env .env
293
- ```
294
-
295
- Edit the `.env` file to configure necessary API keys:
296
-
297
- ```bash
298
- # LLM Configuration
299
- LLM_API_KEY=sk-your-llm-api-key
300
- LLM_BASE_URL=https://your-llm-endpoint/v1
301
-
302
- # Embedding Model Configuration
303
- EMBEDDING_API_KEY=sk-your-embedding-api-key
304
- EMBEDDING_BASE_URL=https://your-embedding-endpoint/v1
305
-
306
- # Elasticsearch (Optional)
307
- ES_HOSTS=http://localhost:9200
308
-
309
- # DashScope Search (Optional)
310
- DASHSCOPE_API_KEY=sk-your-dashscope-key
311
- ```
312
-
313
- ## 🏃 Quick Start
314
-
315
- ### 1. Start HTTP Service
316
-
317
- ```bash
318
- flowllm \
319
- http_service.port=8001 \
320
- llm.default.model_name=qwen3-32b \
321
- embedding_model.default.model_name=text-embedding-v4 \
322
- vector_store.default.backend=local_file
323
- ```
324
-
325
- ### 2. Start MCP Server
326
-
327
- ```bash
328
- flowllm_mcp \
329
- mcp_transport=stdio \
330
- http_service.port=8001 \
331
- llm.default.model_name=qwen3-32b \
332
- embedding_model.default.model_name=text-embedding-v4 \
333
- vector_store.default.backend=local_file
334
- ```
335
-
336
- ### 3. API Usage Examples
337
-
338
- #### Retriever API
339
- ```python
340
- import requests
341
-
342
- response = requests.post('http://localhost:8001/retriever', json={
343
- "query": "What is artificial intelligence?",
344
- "top_k": 5,
345
- "workspace_id": "default",
346
- "config": {}
347
- })
348
- print(response.json())
349
- ```
350
-
351
- #### Agent API
352
- ```python
353
- response = requests.post('http://localhost:8001/agent', json={
354
- "query": "Help me search for the latest AI technology trends",
355
- "workspace_id": "default",
356
- "config": {}
357
- })
358
- print(response.json())
359
- ```
360
-
361
- ## ⚙️ Configuration Guide
362
-
363
- ### Pipeline Configuration Syntax
364
-
365
- flowllm uses an intuitive string syntax to define operation pipelines:
366
-
367
- ```yaml
368
- api:
369
- # Serial execution: op1 -> op2 -> op3
370
- retriever: recall_vector_store_op->summarizer_op
371
-
372
- # Parallel execution: [op1 | op2] runs in parallel
373
- summarizer: mock1_op->[mock4_op->mock2_op|mock5_op]->mock3_op
374
-
375
- # Mixed mode: combination of serial and parallel
376
- agent: react_v1_op
377
- ```
378
-
379
- ### Complete Configuration Example
380
-
381
- ```yaml
382
- # HTTP Service Configuration
383
- http_service:
384
- host: "0.0.0.0"
385
- port: 8001
386
- timeout_keep_alive: 600
387
- limit_concurrency: 64
388
-
389
- # Thread Pool Configuration
390
- thread_pool:
391
- max_workers: 10
392
-
393
- # API Pipeline Definitions
394
- api:
395
- retriever: recall_vector_store_op
396
- summarizer: update_vector_store_op
397
- vector_store: vector_store_action_op
398
- agent: react_v1_op
399
-
400
- # Operation Configuration
401
- op:
402
- react_v1_op:
403
- backend: react_v1_op
404
- llm: default
405
- params:
406
- max_steps: 10
407
- tool_names: "code_tool,tavily_search_tool,terminate_tool"
408
-
409
- # LLM Configuration
410
- llm:
411
- default:
412
- backend: openai_compatible
413
- model_name: qwen3-32b
414
- params:
415
- temperature: 0.6
416
- max_retries: 5
417
-
418
- # Embedding Model Configuration
419
- embedding_model:
420
- default:
421
- backend: openai_compatible
422
- model_name: text-embedding-v4
423
- params:
424
- dimensions: 1024
425
-
426
- # Vector Store Configuration
427
- vector_store:
428
- default:
429
- backend: local_file # or elasticsearch, chroma
430
- embedding_model: default
431
- params:
432
- store_dir: "./vector_store_data"
433
- ```
434
-
435
- ## 🧩 Architecture Design
436
-
437
- ### Core Components
438
-
439
- ```
440
- ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
441
- │ FastAPI App │ │ MCP Server │ │ Configuration │
442
- │ │ │ │ │ Parser │
443
- └─────────────────┘ └─────────────────┘ └─────────────────┘
444
- │ │ │
445
- └───────────────────────┼───────────────────────┘
446
-
447
- ┌─────────────────┐
448
- │ flowllm Service │
449
- └─────────────────┘
450
-
451
- ┌─────────────────┐
452
- │ Pipeline │
453
- │ Context │
454
- └─────────────────┘
455
-
456
- ┌───────────────────┼───────────────────┐
457
- │ │ │
458
- ┌─────────────┐ ┌─────────────┐ ┌─────────────┐
459
- │ Operations │ │ Tools │ │Vector Stores│
460
- │ │ │ │ │ │
461
- │ • ReAct │ │ • Code │ │ • File │
462
- │ • Recall │ │ • Search │ │ • ES │
463
- │ • Update │ │ • MCP │ │ • Chroma │
464
- │ • Mock │ │ • Terminate │ │ │
465
- └─────────────┘ └─────────────┘ └─────────────┘
466
- ```
467
-
468
- ### Data Flow
469
-
470
- ```
471
- Request → Configuration → Pipeline → Operations → Tools/VectorStore → Response
472
- ```
473
-
474
- ## 🔧 Development Guide
475
-
476
- ### Custom Operations
477
-
478
- ```python
479
- from old.op import OP_REGISTRY
480
- from old.op.base_op import BaseOp
481
-
482
-
483
- @OP_REGISTRY.register()
484
- class CustomOp(BaseOp):
485
- def execute(self):
486
- # Implement your custom logic
487
- request = self.context.request
488
- response = self.context.response
489
-
490
- # Process request
491
- result = self.process_data(request.query)
492
-
493
- # Update response
494
- response.metadata["custom_result"] = result
495
- ```
496
-
497
- ### Custom Tools
498
-
499
- ```python
500
- from old.tool import TOOL_REGISTRY
501
- from old.tool.base_tool import BaseTool
502
-
503
-
504
- @TOOL_REGISTRY.register()
505
- class CustomTool(BaseTool):
506
- name: str = "custom_tool"
507
- description: str = "Custom tool description"
508
- parameters: dict = {
509
- "type": "object",
510
- "properties": {
511
- "input": {"type": "string", "description": "Input parameter"}
512
- },
513
- "required": ["input"]
514
- }
515
-
516
- def _execute(self, input: str, **kwargs):
517
- # Implement tool logic
518
- return f"Processing result: {input}"
519
- ```
520
-
521
- ### Custom Vector Stores
522
-
523
- ```python
524
- from old.vector_store import VECTOR_STORE_REGISTRY
525
- from old.vector_store.base_vector_store import BaseVectorStore
526
-
527
-
528
- @VECTOR_STORE_REGISTRY.register("custom_store")
529
- class CustomVectorStore(BaseVectorStore):
530
- def search(self, query: str, top_k: int = 10, **kwargs):
531
- # Implement search logic
532
- pass
533
-
534
- def insert(self, nodes: List[VectorNode], **kwargs):
535
- # Implement insertion logic
536
- pass
537
- ```
538
-
539
- ## 🧪 Testing
540
-
541
- ```bash
542
- # Run tests
543
- pytest
544
-
545
- # Run specific tests
546
- pytest tests/test_pipeline.py
547
-
548
- # Generate coverage report
549
- pytest --cov=flowllm tests/
550
- ```
551
-
552
- ## 🤝 Contributing
553
-
554
- We welcome community contributions! Please follow these steps:
555
-
556
- 1. Fork the repository
557
- 2. Create a feature branch (`git checkout -b feature/AmazingFeature`)
558
- 3. Commit your changes (`git commit -m 'Add some AmazingFeature'`)
559
- 4. Push to the branch (`git push origin feature/AmazingFeature`)
560
- 5. Open a Pull Request
561
-
562
- ### Development Environment Setup
563
-
564
- ```bash
565
- # Install development dependencies
566
- pip install -e ".[dev]"
567
-
568
- # Install pre-commit hooks
569
- pre-commit install
570
-
571
- # Run code formatting
572
- black flowllm/
573
- isort flowllm/
574
-
575
- # Run type checking
576
- mypy flowllm/
577
- ```
578
-
579
- ## 📚 Documentation
580
-
581
- - [API Documentation](docs/api.md)
582
- - [Configuration Guide](docs/configuration.md)
583
- - [Operations Development](docs/operations.md)
584
- - [Tools Development](docs/tools.md)
585
- - [Deployment Guide](docs/deployment.md)
586
-
587
- ## 🐛 Bug Reports
588
-
589
- If you find bugs or have feature requests, please create an issue on [GitHub Issues](https://github.com/your-username/flowllm/issues).
590
-
591
- ## 📄 License
592
-
593
- This project is licensed under the Apache License 2.0. See the [LICENSE](LICENSE) file for details.
594
-
595
- ## 🙏 Acknowledgments
596
-
597
- Thanks to all developers and community members who have contributed to the flowllm project.
598
-
599
- ---
600
-
601
- **flowllm** - Making AI workflow development simple and powerful 🚀
@@ -0,0 +1,25 @@
1
+ import os
2
+
3
+ from flowllm.utils.logger_utils import init_logger
4
+
5
+ init_logger()
6
+
7
+ from flowllm.utils.common_utils import load_env
8
+
9
+ load_env()
10
+
11
+ from flowllm import embedding_model
12
+ from flowllm import llm
13
+ from flowllm import storage
14
+
15
+ if not os.environ.get("FLOW_USE_FRAMEWORK", "").lower() == "true":
16
+ from flowllm import flow
17
+ from flowllm import op
18
+
19
+ from flowllm import service
20
+
21
+ from flowllm.context.service_context import C
22
+ from flowllm.op import BaseOp, BaseRayOp, BaseLLMOp
23
+
24
+ __version__ = "0.1.3"
25
+
@@ -0,0 +1,15 @@
1
+ import sys
2
+
3
+ from flowllm.service.base_service import BaseService
4
+
5
+
6
+ def main():
7
+ with BaseService.get_service(*sys.argv[1:]) as service:
8
+ service()
9
+
10
+
11
+ if __name__ == "__main__":
12
+ main()
13
+
14
+ # python -m build
15
+ # twine upload dist/*
@@ -0,0 +1,25 @@
1
+ """
2
+ FlowLLM service clients module
3
+
4
+ This module provides various client implementations for interacting with FlowLLM services:
5
+
6
+ - HttpClient: Synchronous HTTP client for FlowLLM HTTP service
7
+ - AsyncHttpClient: Asynchronous HTTP client for FlowLLM HTTP service
8
+ - MCPClient: Asynchronous client for FlowLLM MCP (Model Context Protocol) service
9
+ - SyncMCPClient: Synchronous wrapper around MCPClient for easier synchronous usage
10
+
11
+ Each client provides methods to execute tool flows, list available flows, and perform
12
+ health checks on the respective services.
13
+ """
14
+
15
+ from .async_http_client import AsyncHttpClient
16
+ from .http_client import HttpClient
17
+ from .mcp_client import MCPClient
18
+ from .sync_mcp_client import SyncMCPClient
19
+
20
+ __all__ = [
21
+ "HttpClient",
22
+ "AsyncHttpClient",
23
+ "MCPClient",
24
+ "SyncMCPClient"
25
+ ]
@@ -0,0 +1,81 @@
1
+ from typing import Dict
2
+
3
+ import httpx
4
+
5
+ from flowllm.schema.flow_response import FlowResponse
6
+
7
+
8
+ class AsyncHttpClient:
9
+ """Async client for interacting with FlowLLM HTTP service"""
10
+
11
+ def __init__(self, base_url: str = "http://localhost:8001", timeout: float = 3600.0):
12
+ """
13
+ Initialize async HTTP client
14
+
15
+ Args:
16
+ base_url: Base URL of the FlowLLM HTTP service
17
+ timeout: Request timeout in seconds
18
+ """
19
+ self.base_url = base_url.rstrip('/') # Remove trailing slash for consistent URL formatting
20
+ self.timeout = timeout
21
+ self.client = httpx.AsyncClient(timeout=timeout) # Create async HTTP client with timeout
22
+
23
+ async def __aenter__(self):
24
+ """Async context manager entry - returns self for 'async with' usage"""
25
+ return self
26
+
27
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
28
+ """Async context manager exit - ensures proper cleanup of HTTP client"""
29
+ await self.client.aclose()
30
+
31
+ async def close(self):
32
+ """Explicitly close the HTTP client connection"""
33
+ await self.client.aclose()
34
+
35
+ async def health_check(self) -> Dict[str, str]:
36
+ """
37
+ Perform health check on the FlowLLM service
38
+
39
+ Returns:
40
+ Dict containing health status information from the service
41
+
42
+ Raises:
43
+ httpx.HTTPStatusError: If the service is not healthy or unreachable
44
+ """
45
+ response = await self.client.get(f"{self.base_url}/health")
46
+ response.raise_for_status() # Raise exception for HTTP error status codes
47
+ return response.json()
48
+
49
+ async def execute_tool_flow(self, flow_name: str, **kwargs) -> FlowResponse:
50
+ """
51
+ Execute a specific tool flow on the FlowLLM service
52
+
53
+ Args:
54
+ flow_name: Name of the tool flow to execute
55
+ **kwargs: Additional parameters to pass to the tool flow
56
+
57
+ Returns:
58
+ FlowResponse object containing the execution results
59
+
60
+ Raises:
61
+ httpx.HTTPStatusError: If the request fails or flow execution errors
62
+ """
63
+ endpoint = f"{self.base_url}/{flow_name}"
64
+ response = await self.client.post(endpoint, json=kwargs) # Send flow parameters as JSON
65
+ response.raise_for_status() # Raise exception for HTTP error status codes
66
+ result_data = response.json()
67
+ return FlowResponse(**result_data) # Parse response into FlowResponse schema
68
+
69
+ async def list_tool_flows(self) -> list:
70
+ """
71
+ Get list of available tool flows from the FlowLLM service
72
+
73
+ Returns:
74
+ List of available tool flow names and their metadata
75
+
76
+ Raises:
77
+ httpx.HTTPStatusError: If the service is unreachable or returns an error
78
+ """
79
+ response = await self.client.get(f"{self.base_url}/list")
80
+ response.raise_for_status() # Raise exception for HTTP error status codes
81
+ return response.json()
@@ -0,0 +1,81 @@
1
+ from typing import Dict
2
+
3
+ import httpx
4
+
5
+ from flowllm.schema.flow_response import FlowResponse
6
+
7
+
8
+ class HttpClient:
9
+ """Client for interacting with FlowLLM HTTP service"""
10
+
11
+ def __init__(self, base_url: str = "http://localhost:8001", timeout: float = 3600.0):
12
+ """
13
+ Initialize HTTP client
14
+
15
+ Args:
16
+ base_url: Base URL of the FlowLLM HTTP service
17
+ timeout: Request timeout in seconds
18
+ """
19
+ self.base_url = base_url.rstrip('/') # Remove trailing slash for consistent URL formatting
20
+ self.timeout = timeout
21
+ self.client = httpx.Client(timeout=timeout) # Create synchronous HTTP client with timeout
22
+
23
+ def __enter__(self):
24
+ """Context manager entry - returns self for 'with' usage"""
25
+ return self
26
+
27
+ def __exit__(self, exc_type, exc_val, exc_tb):
28
+ """Context manager exit - ensures proper cleanup of HTTP client"""
29
+ self.client.close()
30
+
31
+ def close(self):
32
+ """Explicitly close the HTTP client connection"""
33
+ self.client.close()
34
+
35
+ def health_check(self) -> Dict[str, str]:
36
+ """
37
+ Perform health check on the FlowLLM service
38
+
39
+ Returns:
40
+ Dict containing health status information from the service
41
+
42
+ Raises:
43
+ httpx.HTTPStatusError: If the service is not healthy or unreachable
44
+ """
45
+ response = self.client.get(f"{self.base_url}/health")
46
+ response.raise_for_status() # Raise exception for HTTP error status codes
47
+ return response.json()
48
+
49
+ def execute_tool_flow(self, flow_name: str, **kwargs) -> FlowResponse:
50
+ """
51
+ Execute a specific tool flow on the FlowLLM service
52
+
53
+ Args:
54
+ flow_name: Name of the tool flow to execute
55
+ **kwargs: Additional parameters to pass to the tool flow
56
+
57
+ Returns:
58
+ FlowResponse object containing the execution results
59
+
60
+ Raises:
61
+ httpx.HTTPStatusError: If the request fails or flow execution errors
62
+ """
63
+ endpoint = f"{self.base_url}/{flow_name}"
64
+ response = self.client.post(endpoint, json=kwargs) # Send flow parameters as JSON
65
+ response.raise_for_status() # Raise exception for HTTP error status codes
66
+ result_data = response.json()
67
+ return FlowResponse(**result_data) # Parse response into FlowResponse schema
68
+
69
+ def list_tool_flows(self) -> list:
70
+ """
71
+ Get list of available tool flows from the FlowLLM service
72
+
73
+ Returns:
74
+ List of available tool flow names and their metadata
75
+
76
+ Raises:
77
+ httpx.HTTPStatusError: If the service is unreachable or returns an error
78
+ """
79
+ response = self.client.get(f"{self.base_url}/list")
80
+ response.raise_for_status() # Raise exception for HTTP error status codes
81
+ return response.json()