deepanything 0.1.0__tar.gz → 0.1.1__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {deepanything-0.1.0 → deepanything-0.1.1}/PKG-INFO +20 -3
- {deepanything-0.1.0 → deepanything-0.1.1}/README.md +13 -2
- deepanything-0.1.1/deepanything/Server/__init__.py +4 -0
- deepanything-0.1.1/deepanything/__init__.py +3 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything.egg-info/PKG-INFO +20 -3
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything.egg-info/SOURCES.txt +6 -1
- deepanything-0.1.1/deepanything.egg-info/requires.txt +6 -0
- deepanything-0.1.1/requirements.txt +6 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/setup.py +6 -2
- deepanything-0.1.1/test/server.py +8 -0
- deepanything-0.1.1/test/think.py +39 -0
- deepanything-0.1.1/test/think_async.py +38 -0
- deepanything-0.1.0/deepanything/Server/__init__.py +0 -1
- deepanything-0.1.0/deepanything/__init__.py +0 -1
- {deepanything-0.1.0 → deepanything-0.1.1}/LICENSE +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/DeepAnythingClient.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/ReasonClient.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/ResponseClient.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/Server/Server.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/Server/Types.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/Stream.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/Utility.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything/__main__.py +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything.egg-info/dependency_links.txt +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything.egg-info/entry_points.txt +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/deepanything.egg-info/top_level.txt +0 -0
- {deepanything-0.1.0 → deepanything-0.1.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: deepanything
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.1
|
4
4
|
Summary: DeepAnything is a project that provides DeepSeek R1's deep thinking capabilities for various large language models (LLMs).
|
5
5
|
Author: Junity
|
6
6
|
Author-email: 1727636624@qq.com
|
@@ -9,6 +9,12 @@ Classifier: License :: OSI Approved :: MIT License
|
|
9
9
|
Classifier: Operating System :: OS Independent
|
10
10
|
Description-Content-Type: text/markdown
|
11
11
|
License-File: LICENSE
|
12
|
+
Requires-Dist: openai
|
13
|
+
Requires-Dist: uvicorn
|
14
|
+
Requires-Dist: attrs
|
15
|
+
Requires-Dist: fastapi
|
16
|
+
Requires-Dist: pydantic
|
17
|
+
Requires-Dist: setuptools
|
12
18
|
|
13
19
|
# DeepAnything
|
14
20
|
|
@@ -28,6 +34,7 @@ DeepAnything is a project that provides DeepSeek R1's deep thinking capabilities
|
|
28
34
|
## Installation Guide
|
29
35
|
|
30
36
|
Install via pip:
|
37
|
+
|
31
38
|
```bash
|
32
39
|
pip install deepanything
|
33
40
|
```
|
@@ -35,6 +42,7 @@ pip install deepanything
|
|
35
42
|
## Quick Start
|
36
43
|
|
37
44
|
### 1. Integrate into Code
|
45
|
+
|
38
46
|
#### Chat Completion
|
39
47
|
|
40
48
|
```python
|
@@ -72,6 +80,7 @@ completions = da_client.chat_completion(
|
|
72
80
|
```
|
73
81
|
|
74
82
|
#### Streaming Call
|
83
|
+
|
75
84
|
```python
|
76
85
|
stream = da_client.chat_completion(
|
77
86
|
messages=[
|
@@ -91,6 +100,7 @@ for chunk in stream:
|
|
91
100
|
```
|
92
101
|
|
93
102
|
#### Asynchronous Usage
|
103
|
+
|
94
104
|
```python
|
95
105
|
from deepanything.ReasonClient import AsyncDeepseekReasonClient
|
96
106
|
from deepanything.ResponseClient import AsyncOpenaiResponseClient
|
@@ -129,10 +139,13 @@ async def main():
|
|
129
139
|
|
130
140
|
asyncio.run(main())
|
131
141
|
```
|
142
|
+
|
132
143
|
### 2. Use as a Server
|
144
|
+
|
133
145
|
```bash
|
134
146
|
python -m deepanything --host host --port port --config config.json
|
135
147
|
```
|
148
|
+
|
136
149
|
| Parameter | Description |
|
137
150
|
| --- |----------------|
|
138
151
|
| --host | Server listening address, will override the setting in config.json |
|
@@ -140,6 +153,7 @@ asyncio.run(main())
|
|
140
153
|
| --config | Configuration file path |
|
141
154
|
|
142
155
|
#### Configuration File Format
|
156
|
+
|
143
157
|
Below is an example of a configuration file:
|
144
158
|
|
145
159
|
```json
|
@@ -176,16 +190,19 @@ Below is an example of a configuration file:
|
|
176
190
|
]
|
177
191
|
}
|
178
192
|
```
|
179
|
-
|
193
|
+
|
194
|
+
#### **Detailed Explanation**
|
180
195
|
|
181
196
|
- reason_clients: Configuration for thinking models, currently supports deepseek and openai types. When the type is openai, deepanything directly uses the model's output as the thinking content, and it is recommended to use qwq-32b in this case.
|
182
197
|
- response_clients: Configuration for response models, currently only supports the openai type.
|
183
198
|
- api_keys: API keys for user authentication. When left blank or an empty list, the server does not use API keys for authentication.
|
184
199
|
|
185
200
|
## License
|
201
|
+
|
186
202
|
This project is licensed under the [MIT License](LICENSE)
|
187
203
|
|
188
204
|
## Contact Us
|
205
|
+
|
189
206
|
Email: 1737636624@qq.com
|
190
207
|
|
191
|
-
GitHub Issues: https://github.com/
|
208
|
+
GitHub Issues: [https://github.com/junity233/deep-anything/issues](https://github.com/junity233/deep-anything/issues)
|
@@ -16,6 +16,7 @@ DeepAnything is a project that provides DeepSeek R1's deep thinking capabilities
|
|
16
16
|
## Installation Guide
|
17
17
|
|
18
18
|
Install via pip:
|
19
|
+
|
19
20
|
```bash
|
20
21
|
pip install deepanything
|
21
22
|
```
|
@@ -23,6 +24,7 @@ pip install deepanything
|
|
23
24
|
## Quick Start
|
24
25
|
|
25
26
|
### 1. Integrate into Code
|
27
|
+
|
26
28
|
#### Chat Completion
|
27
29
|
|
28
30
|
```python
|
@@ -60,6 +62,7 @@ completions = da_client.chat_completion(
|
|
60
62
|
```
|
61
63
|
|
62
64
|
#### Streaming Call
|
65
|
+
|
63
66
|
```python
|
64
67
|
stream = da_client.chat_completion(
|
65
68
|
messages=[
|
@@ -79,6 +82,7 @@ for chunk in stream:
|
|
79
82
|
```
|
80
83
|
|
81
84
|
#### Asynchronous Usage
|
85
|
+
|
82
86
|
```python
|
83
87
|
from deepanything.ReasonClient import AsyncDeepseekReasonClient
|
84
88
|
from deepanything.ResponseClient import AsyncOpenaiResponseClient
|
@@ -117,10 +121,13 @@ async def main():
|
|
117
121
|
|
118
122
|
asyncio.run(main())
|
119
123
|
```
|
124
|
+
|
120
125
|
### 2. Use as a Server
|
126
|
+
|
121
127
|
```bash
|
122
128
|
python -m deepanything --host host --port port --config config.json
|
123
129
|
```
|
130
|
+
|
124
131
|
| Parameter | Description |
|
125
132
|
| --- |----------------|
|
126
133
|
| --host | Server listening address, will override the setting in config.json |
|
@@ -128,6 +135,7 @@ asyncio.run(main())
|
|
128
135
|
| --config | Configuration file path |
|
129
136
|
|
130
137
|
#### Configuration File Format
|
138
|
+
|
131
139
|
Below is an example of a configuration file:
|
132
140
|
|
133
141
|
```json
|
@@ -164,16 +172,19 @@ Below is an example of a configuration file:
|
|
164
172
|
]
|
165
173
|
}
|
166
174
|
```
|
167
|
-
|
175
|
+
|
176
|
+
#### **Detailed Explanation**
|
168
177
|
|
169
178
|
- reason_clients: Configuration for thinking models, currently supports deepseek and openai types. When the type is openai, deepanything directly uses the model's output as the thinking content, and it is recommended to use qwq-32b in this case.
|
170
179
|
- response_clients: Configuration for response models, currently only supports the openai type.
|
171
180
|
- api_keys: API keys for user authentication. When left blank or an empty list, the server does not use API keys for authentication.
|
172
181
|
|
173
182
|
## License
|
183
|
+
|
174
184
|
This project is licensed under the [MIT License](LICENSE)
|
175
185
|
|
176
186
|
## Contact Us
|
187
|
+
|
177
188
|
Email: 1737636624@qq.com
|
178
189
|
|
179
|
-
GitHub Issues: https://github.com/
|
190
|
+
GitHub Issues: [https://github.com/junity233/deep-anything/issues](https://github.com/junity233/deep-anything/issues)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: deepanything
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.1
|
4
4
|
Summary: DeepAnything is a project that provides DeepSeek R1's deep thinking capabilities for various large language models (LLMs).
|
5
5
|
Author: Junity
|
6
6
|
Author-email: 1727636624@qq.com
|
@@ -9,6 +9,12 @@ Classifier: License :: OSI Approved :: MIT License
|
|
9
9
|
Classifier: Operating System :: OS Independent
|
10
10
|
Description-Content-Type: text/markdown
|
11
11
|
License-File: LICENSE
|
12
|
+
Requires-Dist: openai
|
13
|
+
Requires-Dist: uvicorn
|
14
|
+
Requires-Dist: attrs
|
15
|
+
Requires-Dist: fastapi
|
16
|
+
Requires-Dist: pydantic
|
17
|
+
Requires-Dist: setuptools
|
12
18
|
|
13
19
|
# DeepAnything
|
14
20
|
|
@@ -28,6 +34,7 @@ DeepAnything is a project that provides DeepSeek R1's deep thinking capabilities
|
|
28
34
|
## Installation Guide
|
29
35
|
|
30
36
|
Install via pip:
|
37
|
+
|
31
38
|
```bash
|
32
39
|
pip install deepanything
|
33
40
|
```
|
@@ -35,6 +42,7 @@ pip install deepanything
|
|
35
42
|
## Quick Start
|
36
43
|
|
37
44
|
### 1. Integrate into Code
|
45
|
+
|
38
46
|
#### Chat Completion
|
39
47
|
|
40
48
|
```python
|
@@ -72,6 +80,7 @@ completions = da_client.chat_completion(
|
|
72
80
|
```
|
73
81
|
|
74
82
|
#### Streaming Call
|
83
|
+
|
75
84
|
```python
|
76
85
|
stream = da_client.chat_completion(
|
77
86
|
messages=[
|
@@ -91,6 +100,7 @@ for chunk in stream:
|
|
91
100
|
```
|
92
101
|
|
93
102
|
#### Asynchronous Usage
|
103
|
+
|
94
104
|
```python
|
95
105
|
from deepanything.ReasonClient import AsyncDeepseekReasonClient
|
96
106
|
from deepanything.ResponseClient import AsyncOpenaiResponseClient
|
@@ -129,10 +139,13 @@ async def main():
|
|
129
139
|
|
130
140
|
asyncio.run(main())
|
131
141
|
```
|
142
|
+
|
132
143
|
### 2. Use as a Server
|
144
|
+
|
133
145
|
```bash
|
134
146
|
python -m deepanything --host host --port port --config config.json
|
135
147
|
```
|
148
|
+
|
136
149
|
| Parameter | Description |
|
137
150
|
| --- |----------------|
|
138
151
|
| --host | Server listening address, will override the setting in config.json |
|
@@ -140,6 +153,7 @@ asyncio.run(main())
|
|
140
153
|
| --config | Configuration file path |
|
141
154
|
|
142
155
|
#### Configuration File Format
|
156
|
+
|
143
157
|
Below is an example of a configuration file:
|
144
158
|
|
145
159
|
```json
|
@@ -176,16 +190,19 @@ Below is an example of a configuration file:
|
|
176
190
|
]
|
177
191
|
}
|
178
192
|
```
|
179
|
-
|
193
|
+
|
194
|
+
#### **Detailed Explanation**
|
180
195
|
|
181
196
|
- reason_clients: Configuration for thinking models, currently supports deepseek and openai types. When the type is openai, deepanything directly uses the model's output as the thinking content, and it is recommended to use qwq-32b in this case.
|
182
197
|
- response_clients: Configuration for response models, currently only supports the openai type.
|
183
198
|
- api_keys: API keys for user authentication. When left blank or an empty list, the server does not use API keys for authentication.
|
184
199
|
|
185
200
|
## License
|
201
|
+
|
186
202
|
This project is licensed under the [MIT License](LICENSE)
|
187
203
|
|
188
204
|
## Contact Us
|
205
|
+
|
189
206
|
Email: 1737636624@qq.com
|
190
207
|
|
191
|
-
GitHub Issues: https://github.com/
|
208
|
+
GitHub Issues: [https://github.com/junity233/deep-anything/issues](https://github.com/junity233/deep-anything/issues)
|
@@ -1,5 +1,6 @@
|
|
1
1
|
LICENSE
|
2
2
|
README.md
|
3
|
+
requirements.txt
|
3
4
|
setup.py
|
4
5
|
deepanything/DeepAnythingClient.py
|
5
6
|
deepanything/ReasonClient.py
|
@@ -12,7 +13,11 @@ deepanything.egg-info/PKG-INFO
|
|
12
13
|
deepanything.egg-info/SOURCES.txt
|
13
14
|
deepanything.egg-info/dependency_links.txt
|
14
15
|
deepanything.egg-info/entry_points.txt
|
16
|
+
deepanything.egg-info/requires.txt
|
15
17
|
deepanything.egg-info/top_level.txt
|
16
18
|
deepanything/Server/Server.py
|
17
19
|
deepanything/Server/Types.py
|
18
|
-
deepanything/Server/__init__.py
|
20
|
+
deepanything/Server/__init__.py
|
21
|
+
test/server.py
|
22
|
+
test/think.py
|
23
|
+
test/think_async.py
|
@@ -3,9 +3,12 @@ from setuptools import setup, find_packages
|
|
3
3
|
with open("README.md",encoding='utf-8') as f:
|
4
4
|
long_description = f.read()
|
5
5
|
|
6
|
+
with open("requirements.txt") as f:
|
7
|
+
requirements = f.readlines()
|
8
|
+
|
6
9
|
setup(
|
7
10
|
name="deepanything",
|
8
|
-
version="0.1.
|
11
|
+
version="0.1.1",
|
9
12
|
author="Junity",
|
10
13
|
author_email="1727636624@qq.com",
|
11
14
|
description="DeepAnything is a project that provides DeepSeek R1's deep thinking capabilities for various large language models (LLMs).",
|
@@ -21,5 +24,6 @@ setup(
|
|
21
24
|
"Programming Language :: Python :: 3",
|
22
25
|
"License :: OSI Approved :: MIT License",
|
23
26
|
"Operating System :: OS Independent",
|
24
|
-
]
|
27
|
+
],
|
28
|
+
install_requires=requirements
|
25
29
|
)
|
@@ -0,0 +1,39 @@
|
|
1
|
+
from deepanything.ReasonClient import DeepseekReasonClient
|
2
|
+
from deepanything.ResponseClient import OpenaiResponseClient
|
3
|
+
from deepanything.DeepAnythingClient import DeepAnythingClient
|
4
|
+
|
5
|
+
import asyncio
|
6
|
+
|
7
|
+
think_client = DeepseekReasonClient(
|
8
|
+
base_url="https://api.siliconflow.cn/v1",
|
9
|
+
api_key="sk-vyxfdewjjxgzctheaquyvelzcpixaapjkonktsnloqlyeelj"
|
10
|
+
)
|
11
|
+
|
12
|
+
response_client = OpenaiResponseClient(
|
13
|
+
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
|
14
|
+
api_key="sk-80d422aac1784a66bdb6e411ce91de53",
|
15
|
+
)
|
16
|
+
|
17
|
+
da_client = DeepAnythingClient(
|
18
|
+
reason_client=think_client,
|
19
|
+
response_client=response_client,
|
20
|
+
reason_prompt="<Think>{}</Think>"
|
21
|
+
)
|
22
|
+
def main():
|
23
|
+
stream = da_client.chat_completion_stream(
|
24
|
+
messages=[
|
25
|
+
{
|
26
|
+
"role": "user",
|
27
|
+
"content": "你好"
|
28
|
+
}
|
29
|
+
],
|
30
|
+
reason_model="Pro/deepseek-ai/DeepSeek-R1",
|
31
|
+
response_model="qwen-max-latest",
|
32
|
+
show_model="R1-qwen-max"
|
33
|
+
)
|
34
|
+
|
35
|
+
for chunk in stream:
|
36
|
+
print(chunk)
|
37
|
+
|
38
|
+
|
39
|
+
main()
|
@@ -0,0 +1,38 @@
|
|
1
|
+
from deepanything.ReasonClient import AsyncDeepseekReasonClient
|
2
|
+
from deepanything.ResponseClient import AsyncOpenaiResponseClient
|
3
|
+
from deepanything.DeepAnythingClient import AsyncDeepAnythingClient
|
4
|
+
|
5
|
+
import asyncio
|
6
|
+
|
7
|
+
think_client = AsyncDeepseekReasonClient(
|
8
|
+
base_url="https://api.siliconflow.cn/v1",
|
9
|
+
api_key="sk-vyxfdewjjxgzctheaquyvelzcpixaapjkonktsnloqlyeelj"
|
10
|
+
)
|
11
|
+
|
12
|
+
response_client = AsyncOpenaiResponseClient(
|
13
|
+
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
|
14
|
+
api_key="sk-80d422aac1784a66bdb6e411ce91de53",
|
15
|
+
)
|
16
|
+
|
17
|
+
da_client = AsyncDeepAnythingClient(
|
18
|
+
think_client=think_client,
|
19
|
+
response_client=response_client,
|
20
|
+
think_prompt="<Think>{}</Think>"
|
21
|
+
)
|
22
|
+
async def main():
|
23
|
+
stream = await da_client.chat_completion_stream(
|
24
|
+
messages=[
|
25
|
+
{
|
26
|
+
"role": "user",
|
27
|
+
"content": "你好"
|
28
|
+
}
|
29
|
+
],
|
30
|
+
think_model="Pro/deepseek-ai/DeepSeek-R1",
|
31
|
+
response_model="qwen-max-latest",
|
32
|
+
show_model="R1-qwen-max"
|
33
|
+
)
|
34
|
+
|
35
|
+
async for chunk in stream:
|
36
|
+
print(chunk)
|
37
|
+
|
38
|
+
asyncio.run(main())
|
@@ -1 +0,0 @@
|
|
1
|
-
__all__ = ["Server", "Types"]
|
@@ -1 +0,0 @@
|
|
1
|
-
__all__ = ['DeepAnythingClient', 'ReasonClient', 'Utility', 'Stream', 'ResponseClient', 'Server']
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|