thinkai-framework 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. thinkai_framework-0.1.0/.env.example +30 -0
  2. thinkai_framework-0.1.0/LICENSE +21 -0
  3. thinkai_framework-0.1.0/MANIFEST.in +6 -0
  4. thinkai_framework-0.1.0/PKG-INFO +382 -0
  5. thinkai_framework-0.1.0/README.md +325 -0
  6. thinkai_framework-0.1.0/config.example.yaml +86 -0
  7. thinkai_framework-0.1.0/examples/agent_example.py +89 -0
  8. thinkai_framework-0.1.0/examples/basic_usage.py +106 -0
  9. thinkai_framework-0.1.0/examples/fastapi_demo.py +56 -0
  10. thinkai_framework-0.1.0/examples/rag_example.py +39 -0
  11. thinkai_framework-0.1.0/pyproject.toml +87 -0
  12. thinkai_framework-0.1.0/setup.cfg +4 -0
  13. thinkai_framework-0.1.0/tests/test_thinkai.py +266 -0
  14. thinkai_framework-0.1.0/thinkai/__init__.py +24 -0
  15. thinkai_framework-0.1.0/thinkai/agent/__init__.py +12 -0
  16. thinkai_framework-0.1.0/thinkai/agent/base.py +83 -0
  17. thinkai_framework-0.1.0/thinkai/agent/react.py +148 -0
  18. thinkai_framework-0.1.0/thinkai/agent/tool.py +115 -0
  19. thinkai_framework-0.1.0/thinkai/core/__init__.py +22 -0
  20. thinkai_framework-0.1.0/thinkai/core/client.py +397 -0
  21. thinkai_framework-0.1.0/thinkai/core/config.py +153 -0
  22. thinkai_framework-0.1.0/thinkai/core/models.py +182 -0
  23. thinkai_framework-0.1.0/thinkai/exceptions.py +102 -0
  24. thinkai_framework-0.1.0/thinkai/middleware/__init__.py +11 -0
  25. thinkai_framework-0.1.0/thinkai/middleware/base.py +63 -0
  26. thinkai_framework-0.1.0/thinkai/middleware/logging_middleware.py +34 -0
  27. thinkai_framework-0.1.0/thinkai/middleware/retry_middleware.py +48 -0
  28. thinkai_framework-0.1.0/thinkai/prompt/template.py +145 -0
  29. thinkai_framework-0.1.0/thinkai/providers/__init__.py +18 -0
  30. thinkai_framework-0.1.0/thinkai/providers/base.py +303 -0
  31. thinkai_framework-0.1.0/thinkai/providers/deepseek.py +109 -0
  32. thinkai_framework-0.1.0/thinkai/providers/ollama.py +151 -0
  33. thinkai_framework-0.1.0/thinkai/providers/openai.py +108 -0
  34. thinkai_framework-0.1.0/thinkai/providers/qwen.py +117 -0
  35. thinkai_framework-0.1.0/thinkai/providers/registry.py +56 -0
  36. thinkai_framework-0.1.0/thinkai/rag/__init__.py +14 -0
  37. thinkai_framework-0.1.0/thinkai/rag/chroma_store.py +68 -0
  38. thinkai_framework-0.1.0/thinkai/rag/document_loader.py +189 -0
  39. thinkai_framework-0.1.0/thinkai/rag/pipeline.py +201 -0
  40. thinkai_framework-0.1.0/thinkai/rag/text_splitter.py +148 -0
  41. thinkai_framework-0.1.0/thinkai/rag/vector_store.py +27 -0
  42. thinkai_framework-0.1.0/thinkai/session/__init__.py +12 -0
  43. thinkai_framework-0.1.0/thinkai/session/context.py +67 -0
  44. thinkai_framework-0.1.0/thinkai/session/manager.py +96 -0
  45. thinkai_framework-0.1.0/thinkai/session/memory.py +70 -0
  46. thinkai_framework-0.1.0/thinkai/session/storage.py +39 -0
  47. thinkai_framework-0.1.0/thinkai/streaming.py +36 -0
  48. thinkai_framework-0.1.0/thinkai_framework.egg-info/PKG-INFO +382 -0
  49. thinkai_framework-0.1.0/thinkai_framework.egg-info/SOURCES.txt +51 -0
  50. thinkai_framework-0.1.0/thinkai_framework.egg-info/dependency_links.txt +1 -0
  51. thinkai_framework-0.1.0/thinkai_framework.egg-info/entry_points.txt +2 -0
  52. thinkai_framework-0.1.0/thinkai_framework.egg-info/requires.txt +44 -0
  53. thinkai_framework-0.1.0/thinkai_framework.egg-info/top_level.txt +1 -0
@@ -0,0 +1,30 @@
1
+ # ThinkAi 环境变量配置
2
+ # 复制此文件为 .env 并填写实际值
3
+
4
+ # OpenAI配置
5
+ OPENAI_API_KEY=your_openai_api_key_here
6
+
7
+ # 通义千问配置
8
+ QWEN_API_KEY=your_qwen_api_key_here
9
+
10
+ # DeepSeek配置
11
+ DEEPSEEK_API_KEY=your_deepseek_api_key_here
12
+
13
+ # Anthropic Claude配置
14
+ ANTHROPIC_API_KEY=your_anthropic_api_key_here
15
+
16
+ # Google Gemini配置
17
+ GOOGLE_API_KEY=your_google_api_key_here
18
+
19
+ # 百度文心配置
20
+ BAIDU_API_KEY=your_baidu_api_key_here
21
+ BAIDU_SECRET_KEY=your_baidu_secret_key_here
22
+
23
+ # 智谱AI配置
24
+ ZHIPU_API_KEY=your_zhipu_api_key_here
25
+
26
+ # ThinkAi全局配置
27
+ THINKAI_DEFAULT_PROVIDER=ollama
28
+ THINKAI_DEFAULT_MODEL=llama3
29
+ THINKAI_DEBUG=false
30
+ THINKAI_LOG_LEVEL=INFO
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 ThinkAi Team
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,6 @@
1
+ include README.md
2
+ include LICENSE
3
+ include .env.example
4
+ include config.example.yaml
5
+ recursive-include examples *
6
+ recursive-include thinkai *.py
@@ -0,0 +1,382 @@
1
+ Metadata-Version: 2.4
2
+ Name: thinkai-framework
3
+ Version: 0.1.0
4
+ Summary: Enterprise-grade AI framework for seamless LLM integration
5
+ Author-email: ThinkAi Team <thinkai@example.com>
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://github.com/thinkai/thinkai
8
+ Project-URL: Documentation, https://thinkai.readthedocs.io
9
+ Project-URL: Repository, https://github.com/thinkai/thinkai
10
+ Project-URL: Issues, https://github.com/thinkai/thinkai/issues
11
+ Keywords: ai,llm,fastapi,openai,ollama,rag,agent
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.9
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Requires-Python: >=3.9
20
+ Description-Content-Type: text/markdown
21
+ License-File: LICENSE
22
+ Requires-Dist: fastapi>=0.100.0
23
+ Requires-Dist: pydantic>=2.0
24
+ Requires-Dist: pydantic-settings>=2.0
25
+ Requires-Dist: httpx>=0.24.0
26
+ Requires-Dist: pyyaml>=6.0
27
+ Requires-Dist: python-dotenv>=1.0.0
28
+ Requires-Dist: tenacity>=8.0.0
29
+ Provides-Extra: ollama
30
+ Requires-Dist: ollama>=0.1.0; extra == "ollama"
31
+ Provides-Extra: openai
32
+ Requires-Dist: openai>=1.0.0; extra == "openai"
33
+ Provides-Extra: qwen
34
+ Requires-Dist: dashscope>=1.0.0; extra == "qwen"
35
+ Provides-Extra: deepseek
36
+ Requires-Dist: openai>=1.0.0; extra == "deepseek"
37
+ Provides-Extra: claude
38
+ Requires-Dist: anthropic>=0.7.0; extra == "claude"
39
+ Provides-Extra: gemini
40
+ Requires-Dist: google-generativeai>=0.3.0; extra == "gemini"
41
+ Provides-Extra: rag
42
+ Requires-Dist: chromadb>=0.4.0; extra == "rag"
43
+ Requires-Dist: tiktoken>=0.5.0; extra == "rag"
44
+ Requires-Dist: pypdf>=3.0; extra == "rag"
45
+ Requires-Dist: python-docx>=0.8.0; extra == "rag"
46
+ Provides-Extra: agent
47
+ Requires-Dist: openai>=1.0.0; extra == "agent"
48
+ Provides-Extra: dev
49
+ Requires-Dist: pytest>=7.0; extra == "dev"
50
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
51
+ Requires-Dist: black>=23.0; extra == "dev"
52
+ Requires-Dist: ruff>=0.1.0; extra == "dev"
53
+ Requires-Dist: mypy>=1.0; extra == "dev"
54
+ Provides-Extra: all
55
+ Requires-Dist: thinkai[agent,claude,deepseek,gemini,ollama,openai,qwen,rag]; extra == "all"
56
+ Dynamic: license-file
57
+
58
+ # ThinkAi - Enterprise AI Framework
59
+
60
+ 基于FastAPI的企业级AI大模型集成框架 - **开箱即用,简单易用,功能全面**
61
+
62
+ ## 特性
63
+
64
+ - **多模型支持** - 支持Ollama、OpenAI、通义千问、DeepSeek、Claude、Gemini等主流大模型
65
+ - **统一接口** - 一次配置,多模型自由切换
66
+ - **开箱即用** - 简单配置即可使用,无需复杂配置
67
+ - **符合OpenAI标准** - 采用OpenAI兼容的API格式
68
+ - **流式响应** - 支持SSE流式输出
69
+ - **会话管理** - 内置多轮对话上下文管理
70
+ - **RAG支持** - 3行代码实现检索增强生成
71
+ - **Agent系统** - 内置ReAct Agent,支持工具调用
72
+ - **中间件管道** - 日志、重试、缓存、限流
73
+ - **企业级性能** - 异步架构,连接池,自动重试
74
+
75
+ ## 安装
76
+
77
+ ```bash
78
+ # 基础安装
79
+ pip install thinkai
80
+
81
+ # 安装特定Provider
82
+ pip install thinkai[ollama]
83
+ pip install thinkai[openai]
84
+ pip install thinkai[qwen]
85
+
86
+ # 安装全部依赖
87
+ pip install thinkai[all]
88
+ ```
89
+
90
+ ## 快速开始
91
+
92
+ ### 1. 最简使用(3行代码)
93
+
94
+ ```python
95
+ from thinkai import ThinkAI
96
+
97
+ ai = ThinkAI(provider="ollama", model="llama3")
98
+ response = await ai.chat("你好")
99
+ print(response.content)
100
+ ```
101
+
102
+ ### 2. FastAPI集成
103
+
104
+ ```python
105
+ from fastapi import FastAPI
106
+ from thinkai import ThinkAI
107
+
108
+ app = FastAPI()
109
+ ai = ThinkAI(provider="ollama", model="llama3")
110
+
111
+ @app.post("/chat")
112
+ async def chat(message: str):
113
+ response = await ai.chat(message)
114
+ return {"content": response.content}
115
+
116
+ # 启动: uvicorn main:app --reload
117
+ ```
118
+
119
+ ### 3. 多模型配置与切换
120
+
121
+ ```python
122
+ from thinkai import ThinkAI
123
+
124
+ ai = ThinkAI(provider="ollama", model="llama3")
125
+
126
+ # 注册多个模型
127
+ ai.register_model("qwen", provider="qwen", model="qwen-turbo")
128
+ ai.register_model("deepseek", provider="deepseek", model="deepseek-chat")
129
+ ai.register_model("gpt4", provider="openai", model="gpt-4")
130
+
131
+ # 自由切换
132
+ response1 = await ai.chat("你好", model="llama3")
133
+ response2 = await ai.chat("你好", model="qwen")
134
+ response3 = await ai.chat("你好", model="deepseek")
135
+ response4 = await ai.chat("你好", model="gpt4")
136
+ ```
137
+
138
+ ### 4. 多轮对话(会话管理)
139
+
140
+ ```python
141
+ ai = ThinkAI()
142
+
143
+ async with ai.session() as session:
144
+ response1 = await session.chat("你好,我想学习Python")
145
+ response2 = await session.chat("有什么好的学习路径?")
146
+ response3 = await session.chat("推荐一些资源吧")
147
+ ```
148
+
149
+ ### 5. 流式响应
150
+
151
+ ```python
152
+ ai = ThinkAI()
153
+
154
+ async for chunk in ai.chat_stream("讲一个故事"):
155
+ if chunk.choices and chunk.choices[0].delta.content:
156
+ print(chunk.choices[0].delta.content, end="", flush=True)
157
+ ```
158
+
159
+ ### 6. RAG(检索增强生成)
160
+
161
+ ```python
162
+ from thinkai import ThinkAI
163
+ from thinkai.rag import RAGPipeline
164
+
165
+ ai = ThinkAI()
166
+
167
+ # 3行代码实现RAG
168
+ rag = RAGPipeline(
169
+ documents=["./docs", "./knowledge"],
170
+ ai_client=ai,
171
+ chunk_size=500,
172
+ )
173
+
174
+ # 查询
175
+ answer = await rag.query("ThinkAi框架支持哪些AI模型?")
176
+ print(answer)
177
+ ```
178
+
179
+ ### 7. Agent(智能体)
180
+
181
+ ```python
182
+ from thinkai import ThinkAI
183
+ from thinkai.agent import ReActAgent, Tool
184
+
185
+ # 定义工具
186
+ @Tool(name="calculator", description="计算数学表达式")
187
+ def calculator(expression: str) -> str:
188
+ return str(eval(expression))
189
+
190
+ @Tool(name="search", description="搜索信息")
191
+ async def search(query: str) -> str:
192
+ # 实现搜索逻辑
193
+ return "搜索结果"
194
+
195
+ ai = ThinkAI()
196
+
197
+ # 创建Agent
198
+ agent = ReActAgent(
199
+ tools=[calculator, search],
200
+ ai_client=ai,
201
+ verbose=True,
202
+ )
203
+
204
+ # 运行任务
205
+ result = await agent.run("计算25*48,然后搜索Python的相关信息")
206
+ print(result)
207
+ ```
208
+
209
+ ## 支持的AI模型
210
+
211
+ | Provider | 模型 | 类型 | 配置方式 |
212
+ |----------|------|------|----------|
213
+ | **Ollama** | llama3, mistral, qwen等 | 本地 | `provider="ollama"` |
214
+ | **OpenAI** | gpt-4, gpt-3.5-turbo, gpt-4o | 云端 | `provider="openai"` |
215
+ | **通义千问** | qwen-turbo, qwen-plus, qwen-max | 云端 | `provider="qwen"` |
216
+ | **DeepSeek** | deepseek-chat, deepseek-coder | 云端 | `provider="deepseek"` |
217
+ | **Anthropic** | claude-3-opus/sonnet/haiku | 云端 | `provider="claude"` |
218
+ | **Google** | gemini-pro, gemini-ultra | 云端 | `provider="gemini"` |
219
+
220
+ ## 项目结构
221
+
222
+ ```
223
+ thinkai/
224
+ ├── thinkai/
225
+ │ ├── __init__.py
226
+ │ ├── core/ # 核心模块
227
+ │ │ ├── client.py # 统一客户端
228
+ │ │ ├── config.py # 配置管理
229
+ │ │ └── models.py # 数据模型
230
+ │ ├── providers/ # Provider实现
231
+ │ │ ├── base.py # Provider基类
232
+ │ │ ├── registry.py # 注册表
233
+ │ │ ├── ollama.py # Ollama
234
+ │ │ ├── openai.py # OpenAI
235
+ │ │ ├── qwen.py # 通义千问
236
+ │ │ └── deepseek.py # DeepSeek
237
+ │ ├── session/ # 会话管理
238
+ │ ├── prompt/ # Prompt模板
239
+ │ ├── middleware/ # 中间件
240
+ │ ├── rag/ # RAG模块
241
+ │ ├── agent/ # Agent模块
242
+ │ ├── streaming.py # 流式处理
243
+ │ └── exceptions.py # 异常定义
244
+ ├── examples/ # 示例代码
245
+ ├── config.example.yaml # 配置示例
246
+ ├── .env.example # 环境变量示例
247
+ └── pyproject.toml # 项目配置
248
+ ```
249
+
250
+ ## 配置方式
251
+
252
+ ### 方式1: 代码配置
253
+
254
+ ```python
255
+ ai = ThinkAI(
256
+ provider="ollama",
257
+ model="llama3",
258
+ temperature=0.7,
259
+ max_tokens=2048,
260
+ timeout=60,
261
+ )
262
+ ```
263
+
264
+ ### 方式2: 环境变量
265
+
266
+ ```bash
267
+ export THINKAI_DEFAULT_PROVIDER=ollama
268
+ export THINKAI_DEFAULT_MODEL=llama3
269
+ export OPENAI_API_KEY=your_key_here
270
+ ```
271
+
272
+ ### 方式3: YAML配置文件
273
+
274
+ ```yaml
275
+ default_provider: "ollama"
276
+ default_model: "llama3"
277
+
278
+ providers:
279
+ openai:
280
+ api_key: "${OPENAI_API_KEY}"
281
+ api_base: "https://api.openai.com/v1"
282
+
283
+ models:
284
+ llama3:
285
+ provider: "ollama"
286
+ model: "llama3"
287
+ temperature: 0.7
288
+ ```
289
+
290
+ ```python
291
+ from thinkai.core.config import Settings
292
+
293
+ config = Settings.from_file("config.yaml")
294
+ ai = ThinkAI(config=config)
295
+ ```
296
+
297
+ ## 高级功能
298
+
299
+ ### 中间件系统
300
+
301
+ ```python
302
+ from thinkai.middleware import LoggingMiddleware, RetryMiddleware
303
+
304
+ ai = ThinkAI()
305
+ ai.add_middleware(LoggingMiddleware())
306
+ ai.add_middleware(RetryMiddleware(max_retries=3))
307
+ ```
308
+
309
+ ### Prompt模板
310
+
311
+ ```python
312
+ from thinkai.prompt.template import PromptTemplate, prompt_manager
313
+
314
+ # 使用内置模板
315
+ template = prompt_manager.get("system_code")
316
+ prompt = template.format()
317
+
318
+ # 自定义模板
319
+ custom = PromptTemplate("将以下代码转换为$type: $code")
320
+ result = custom.format(type="Python", code="...")
321
+ ```
322
+
323
+ ### 自定义Provider
324
+
325
+ ```python
326
+ from thinkai.providers.base import BaseProvider
327
+ from thinkai.providers.registry import register_provider
328
+
329
+ @register_provider("custom")
330
+ class CustomProvider(BaseProvider):
331
+ name = "custom"
332
+ default_model = "custom-model"
333
+
334
+ async def chat(self, request):
335
+ # 实现聊天逻辑
336
+ pass
337
+
338
+ async def chat_stream(self, request):
339
+ # 实现流式聊天逻辑
340
+ pass
341
+ ```
342
+
343
+ ## 企业级特性
344
+
345
+ - **异步架构** - 全面使用async/await,高性能
346
+ - **连接池** - HTTP连接复用
347
+ - **自动重试** - 失败自动重试,指数退避
348
+ - **错误处理** - 完善的异常体系
349
+ - **类型安全** - 完整的Type Hints
350
+ - **日志记录** - 结构化日志支持
351
+ - **监控指标** - 集成Prometheus(规划中)
352
+ - **负载均衡** - 多模型路由(规划中)
353
+
354
+ ## 文档
355
+
356
+ 完整文档请访问: [https://thinkai.readthedocs.io](https://thinkai.readthedocs.io)
357
+
358
+ ## 示例
359
+
360
+ 运行示例代码:
361
+
362
+ ```bash
363
+ # 基础使用
364
+ python examples/basic_usage.py
365
+
366
+ # FastAPI集成
367
+ python examples/fastapi_demo.py
368
+
369
+ # RAG示例
370
+ python examples/rag_example.py
371
+
372
+ # Agent示例
373
+ python examples/agent_example.py
374
+ ```
375
+
376
+ ## 贡献
377
+
378
+ 欢迎提交Issue和Pull Request!
379
+
380
+ ## 许可证
381
+
382
+ MIT License