@researai/deepscientist 1.5.16 → 1.5.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +66 -23
- package/bin/ds.js +550 -19
- package/docs/en/00_QUICK_START.md +65 -5
- package/docs/en/01_SETTINGS_REFERENCE.md +1 -1
- package/docs/en/09_DOCTOR.md +14 -3
- package/docs/en/15_CODEX_PROVIDER_SETUP.md +12 -3
- package/docs/en/21_LOCAL_MODEL_BACKENDS_GUIDE.md +283 -0
- package/docs/en/91_DEVELOPMENT.md +237 -0
- package/docs/en/README.md +7 -3
- package/docs/zh/00_QUICK_START.md +54 -5
- package/docs/zh/01_SETTINGS_REFERENCE.md +1 -1
- package/docs/zh/09_DOCTOR.md +15 -4
- package/docs/zh/15_CODEX_PROVIDER_SETUP.md +12 -3
- package/docs/zh/21_LOCAL_MODEL_BACKENDS_GUIDE.md +281 -0
- package/docs/zh/README.md +7 -3
- package/install.sh +46 -4
- package/package.json +2 -1
- package/pyproject.toml +1 -1
- package/src/deepscientist/__init__.py +1 -1
- package/src/deepscientist/bridges/connectors.py +8 -2
- package/src/deepscientist/codex_cli_compat.py +185 -72
- package/src/deepscientist/config/service.py +154 -6
- package/src/deepscientist/daemon/api/handlers.py +130 -25
- package/src/deepscientist/daemon/api/router.py +5 -0
- package/src/deepscientist/daemon/app.py +446 -22
- package/src/deepscientist/diagnostics/__init__.py +6 -0
- package/src/deepscientist/diagnostics/runner_failures.py +130 -0
- package/src/deepscientist/doctor.py +207 -3
- package/src/deepscientist/prompts/builder.py +22 -4
- package/src/deepscientist/quest/service.py +413 -13
- package/src/deepscientist/runners/codex.py +59 -14
- package/src/deepscientist/shared.py +19 -0
- package/src/prompts/contracts/shared_interaction.md +3 -2
- package/src/prompts/system.md +13 -0
- package/src/prompts/system_copilot.md +13 -0
- package/src/tui/package.json +1 -1
- package/src/ui/dist/assets/{AiManusChatView-COFACy7V.js → AiManusChatView-Bv-Z8YpU.js} +44 -44
- package/src/ui/dist/assets/{AnalysisPlugin-DnSm0GZn.js → AnalysisPlugin-BCKAfjba.js} +1 -1
- package/src/ui/dist/assets/{CliPlugin-CvwCmDQ5.js → CliPlugin-BCKcpc35.js} +4 -4
- package/src/ui/dist/assets/{CodeEditorPlugin-cOqSa0xq.js → CodeEditorPlugin-DbOfSJ8K.js} +1 -1
- package/src/ui/dist/assets/{CodeViewerPlugin-itb0tltR.js → CodeViewerPlugin-CbaFRrUU.js} +3 -3
- package/src/ui/dist/assets/{DocViewerPlugin-DqKkiCI6.js → DocViewerPlugin-DAjLVeQD.js} +3 -3
- package/src/ui/dist/assets/{GitCommitViewerPlugin-DVgNHBCS.js → GitCommitViewerPlugin-CIUqbUDO.js} +1 -1
- package/src/ui/dist/assets/{GitDiffViewerPlugin-DxL2ezFG.js → GitDiffViewerPlugin-CQACjoAA.js} +1 -1
- package/src/ui/dist/assets/{GitSnapshotViewer-B_RQm1YZ.js → GitSnapshotViewer-0r4nLPke.js} +1 -1
- package/src/ui/dist/assets/{ImageViewerPlugin-tHqlXY3n.js → ImageViewerPlugin-nBOmI2v_.js} +3 -3
- package/src/ui/dist/assets/{LabCopilotPanel-ClMbq5Yu.js → LabCopilotPanel-BHxOxF4z.js} +1 -1
- package/src/ui/dist/assets/{LabPlugin-L_SuE8ow.js → LabPlugin-BKoZGs95.js} +1 -1
- package/src/ui/dist/assets/{LatexPlugin-B495DTXC.js → LatexPlugin-ZwtV8pIp.js} +1 -1
- package/src/ui/dist/assets/{MarkdownViewerPlugin-DG28-61B.js → MarkdownViewerPlugin-DKqVfKyW.js} +3 -3
- package/src/ui/dist/assets/{MarketplacePlugin-BiOGT-Kj.js → MarketplacePlugin-BwxStZ9D.js} +1 -1
- package/src/ui/dist/assets/{NotebookEditor-C-4Kt1p9.js → NotebookEditor-BEQhaQbt.js} +1 -1
- package/src/ui/dist/assets/{NotebookEditor-CVsj8h_T.js → NotebookEditor-DB9N_T9q.js} +23 -23
- package/src/ui/dist/assets/{PdfLoader-CASDQmxJ.js → PdfLoader-eWBONbQP.js} +1 -1
- package/src/ui/dist/assets/{PdfMarkdownPlugin-BFhwoKsY.js → PdfMarkdownPlugin-D22YOZL3.js} +1 -1
- package/src/ui/dist/assets/{PdfViewerPlugin-DcOzU9vd.js → PdfViewerPlugin-c-RK9DLM.js} +3 -3
- package/src/ui/dist/assets/{SearchPlugin-CHj7M58O.js → SearchPlugin-CxF9ytAx.js} +1 -1
- package/src/ui/dist/assets/{TextViewerPlugin-CB4DYfWO.js → TextViewerPlugin-C5xqeeUH.js} +2 -2
- package/src/ui/dist/assets/{VNCViewer-CjlbyCB3.js → VNCViewer-BoLGLnHz.js} +1 -1
- package/src/ui/dist/assets/{bot-CFkZY-JP.js → bot-DREQOxzP.js} +1 -1
- package/src/ui/dist/assets/{chevron-up-Dq5ofbht.js → chevron-up-C9Qpx4DE.js} +1 -1
- package/src/ui/dist/assets/{code-DLC6G24T.js → code-WlFHE7z_.js} +1 -1
- package/src/ui/dist/assets/{file-content-Dv4LoZec.js → file-content-BZMz3RYp.js} +1 -1
- package/src/ui/dist/assets/{file-diff-panel-Denq-lC3.js → file-diff-panel-CQhw0jS2.js} +1 -1
- package/src/ui/dist/assets/{file-socket-Cu4Qln7Y.js → file-socket-CfQPKQKj.js} +1 -1
- package/src/ui/dist/assets/{git-commit-horizontal-BUh6G52n.js → git-commit-horizontal-DxZ8DCZh.js} +1 -1
- package/src/ui/dist/assets/{image-B9HUUddG.js → image-Bgl4VIyx.js} +1 -1
- package/src/ui/dist/assets/{index-Cgla8biy.css → index-BpV6lusQ.css} +1 -1
- package/src/ui/dist/assets/{index-Gbl53BNp.js → index-CBNVuWcP.js} +363 -363
- package/src/ui/dist/assets/{index-wQ7RIIRd.js → index-CwNu1aH4.js} +1 -1
- package/src/ui/dist/assets/{index-B2B1sg-M.js → index-DrUnlf6K.js} +1 -1
- package/src/ui/dist/assets/{index-DRyx7vAc.js → index-NW-h8VzN.js} +1 -1
- package/src/ui/dist/assets/{pdf-effect-queue-ZtnHFCAi.js → pdf-effect-queue-J8OnM0jE.js} +1 -1
- package/src/ui/dist/assets/{popover-DL6h35vr.js → popover-CLc0pPP8.js} +1 -1
- package/src/ui/dist/assets/{project-sync-CsX08Qno.js → project-sync-C9IdzdZW.js} +1 -1
- package/src/ui/dist/assets/{select-DvmXt1yY.js → select-Cs2PmzwL.js} +1 -1
- package/src/ui/dist/assets/{sigma-7jpXazui.js → sigma-ClKcHAXm.js} +1 -1
- package/src/ui/dist/assets/{trash-xA7kFt8i.js → trash-DwpbFr3w.js} +1 -1
- package/src/ui/dist/assets/{useCliAccess-DsMwDjOp.js → useCliAccess-NQ8m0Let.js} +1 -1
- package/src/ui/dist/assets/{wrap-text-CwMn-iqb.js → wrap-text-BC-Hltpd.js} +1 -1
- package/src/ui/dist/assets/{zoom-out-R-GWEhzS.js → zoom-out-E_gaeAxL.js} +1 -1
- package/src/ui/dist/index.html +2 -2
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
# 21 本地模型后端指南:vLLM、Ollama 与 SGLang
|
|
2
|
+
|
|
3
|
+
这篇文档说明如何通过 Codex,把 DeepScientist 接到本地 OpenAI-compatible 模型后端。
|
|
4
|
+
|
|
5
|
+
最关键的一点只有一句话:
|
|
6
|
+
|
|
7
|
+
- 当前 Codex CLI 要求 `wire_api = "responses"`
|
|
8
|
+
- 只有 `/v1/chat/completions` 能工作还不够
|
|
9
|
+
- 在期待 `ds` 或 `ds doctor` 成功之前,必须先验证 `/v1/responses`
|
|
10
|
+
|
|
11
|
+
同时还有一个现实 fallback:
|
|
12
|
+
|
|
13
|
+
- 如果你的后端只有 chat 接口,仍然有机会通过 **Codex CLI `0.57.0`** 跑通
|
|
14
|
+
- 这条旧路径通常需要使用顶层 `model_provider` / `model`,并把 `wire_api` 设为 `chat`
|
|
15
|
+
- DeepScientist 现在会在 Codex 启动探测阶段自动检查这一点;只要发现当前生效 provider 使用的是 `wire_api = "chat"`,就会要求 `codex-cli 0.57.0` 才继续
|
|
16
|
+
|
|
17
|
+
## 1. DeepScientist 实际依赖的是什么
|
|
18
|
+
|
|
19
|
+
DeepScientist 并不会直接和 vLLM、Ollama、SGLang 通信。
|
|
20
|
+
|
|
21
|
+
它真正依赖的是:
|
|
22
|
+
|
|
23
|
+
- `codex`
|
|
24
|
+
- 然后由 `codex` 去调用你在 `~/.codex/config.toml` 里配置的 provider profile
|
|
25
|
+
|
|
26
|
+
所以真实兼容链路是:
|
|
27
|
+
|
|
28
|
+
1. 你的本地模型后端
|
|
29
|
+
2. Codex profile
|
|
30
|
+
3. Codex 启动探测
|
|
31
|
+
4. DeepScientist runner
|
|
32
|
+
|
|
33
|
+
如果第 2 步或第 3 步过不了,DeepScientist 就无法正常启动 Codex runner。
|
|
34
|
+
|
|
35
|
+
## 2. 当前 Codex 必须知道的限制
|
|
36
|
+
|
|
37
|
+
在当前 Codex CLI 中:
|
|
38
|
+
|
|
39
|
+
- 支持 `wire_api = "responses"`
|
|
40
|
+
- 不再接受 `wire_api = "chat"`
|
|
41
|
+
|
|
42
|
+
这意味着:
|
|
43
|
+
|
|
44
|
+
- `vLLM`:如果 OpenAI-compatible server 暴露了 `/v1/responses`,这是当前最推荐的路径
|
|
45
|
+
- `Ollama`:只有在你的版本真的支持 `/v1/responses` 时才建议使用
|
|
46
|
+
- `SGLang`:如果你的部署只有 `/v1/chat/completions` 能工作,那么它和最新版 Codex runner 不兼容
|
|
47
|
+
|
|
48
|
+
## 2.1 支持程度总览
|
|
49
|
+
|
|
50
|
+
| 后端 | `/v1/chat/completions` | `/v1/responses` | 最新版 Codex | `0.57.0` 回退路径 |
|
|
51
|
+
|---|---|---|---|---|
|
|
52
|
+
| vLLM | 支持 | 支持 | 支持 | 通常不需要 |
|
|
53
|
+
| Ollama | 支持 | 取决于版本 | 只有 `/v1/responses` 正常时才支持 | 如果只有 chat,可以尝试 |
|
|
54
|
+
| SGLang | 支持 | 经常缺失或不完整 | chat-only 时不支持 | 可以尝试回退到 `0.57.0` |
|
|
55
|
+
|
|
56
|
+
## 3. 先直接测试后端
|
|
57
|
+
|
|
58
|
+
在动 DeepScientist 之前,先直接验证后端。
|
|
59
|
+
|
|
60
|
+
### 第一步:列模型
|
|
61
|
+
|
|
62
|
+
```bash
|
|
63
|
+
curl http://127.0.0.1:8004/v1/models \
|
|
64
|
+
-H "Authorization: Bearer 1234"
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
你需要从这里拿到一个真实模型名,例如:
|
|
68
|
+
|
|
69
|
+
```text
|
|
70
|
+
/model/gpt-oss-120b
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### 第二步:测试 chat completions
|
|
74
|
+
|
|
75
|
+
```bash
|
|
76
|
+
curl http://127.0.0.1:8004/v1/chat/completions \
|
|
77
|
+
-H "Content-Type: application/json" \
|
|
78
|
+
-H "Authorization: Bearer 1234" \
|
|
79
|
+
-d '{
|
|
80
|
+
"model": "/model/gpt-oss-120b",
|
|
81
|
+
"messages": [
|
|
82
|
+
{ "role": "user", "content": "Reply with exactly HELLO." }
|
|
83
|
+
]
|
|
84
|
+
}'
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
如果这一步成功,说明后端至少具备 OpenAI chat-compatible 能力。
|
|
88
|
+
|
|
89
|
+
### 第三步:测试 Responses API
|
|
90
|
+
|
|
91
|
+
```bash
|
|
92
|
+
curl http://127.0.0.1:8004/v1/responses \
|
|
93
|
+
-H "Content-Type: application/json" \
|
|
94
|
+
-H "Authorization: Bearer 1234" \
|
|
95
|
+
-d '{
|
|
96
|
+
"model": "/model/gpt-oss-120b",
|
|
97
|
+
"input": "Reply with exactly HELLO."
|
|
98
|
+
}'
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
这一步才是决定性检查。
|
|
102
|
+
|
|
103
|
+
如果 `/v1/responses` 失败,最新版 Codex CLI 就不能正常使用这个后端 profile。
|
|
104
|
+
|
|
105
|
+
## 4. 我们在这台服务器上的实际观察
|
|
106
|
+
|
|
107
|
+
我们已经测试了本机的 `http://127.0.0.1:8004/v1`。
|
|
108
|
+
|
|
109
|
+
结果是:
|
|
110
|
+
|
|
111
|
+
- `GET /v1/models` 成功
|
|
112
|
+
- `POST /v1/chat/completions` 成功
|
|
113
|
+
- `POST /v1/responses` 返回 `500 Internal Server Error`
|
|
114
|
+
- `/v1/models` 返回里显示 `owned_by: "sglang"`
|
|
115
|
+
|
|
116
|
+
所以这条 `8004` 服务当前更像一个 chat-compatible 的 SGLang 风格后端,而不是一个对最新版 Codex 友好的 Responses 后端。
|
|
117
|
+
|
|
118
|
+
这意味着:
|
|
119
|
+
|
|
120
|
+
- 它可以响应原始 chat 请求
|
|
121
|
+
- 但它目前不能直接给最新版 Codex runner 使用
|
|
122
|
+
- 因而 DeepScientist 也不能通过正常 Codex 路径使用它
|
|
123
|
+
|
|
124
|
+
我们还额外做了旧版 Codex 对照测试:
|
|
125
|
+
|
|
126
|
+
- 最新版 Codex + `wire_api = "responses"`:失败
|
|
127
|
+
- Codex `0.57.0` + 顶层 `model_provider` / `model` + `wire_api = "chat"`:成功
|
|
128
|
+
|
|
129
|
+
所以对这台机器上的 `8004` 来说:
|
|
130
|
+
|
|
131
|
+
- **最新版 Codex 路径**:不通
|
|
132
|
+
- **Codex `0.57.0` 回退路径**:可行
|
|
133
|
+
|
|
134
|
+
## 5. 给本地 Responses 后端配置 Codex profile
|
|
135
|
+
|
|
136
|
+
如果你的后端真的支持 `/v1/responses`,可以写成这样:
|
|
137
|
+
|
|
138
|
+
```toml
|
|
139
|
+
[model_providers.local_vllm]
|
|
140
|
+
name = "local_vllm"
|
|
141
|
+
base_url = "http://127.0.0.1:8004/v1"
|
|
142
|
+
env_key = "LOCAL_API_KEY"
|
|
143
|
+
wire_api = "responses"
|
|
144
|
+
requires_openai_auth = false
|
|
145
|
+
|
|
146
|
+
[profiles.local_vllm]
|
|
147
|
+
model = "/model/gpt-oss-120b"
|
|
148
|
+
model_provider = "local_vllm"
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
然后先直接测试 Codex:
|
|
152
|
+
|
|
153
|
+
```bash
|
|
154
|
+
export LOCAL_API_KEY=1234
|
|
155
|
+
codex exec --profile local_vllm --json --cd /tmp --skip-git-repo-check - <<'EOF'
|
|
156
|
+
Reply with exactly HELLO.
|
|
157
|
+
EOF
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
如果这一步过不了,就先不要继续尝试 DeepScientist。
|
|
161
|
+
|
|
162
|
+
## 5.1 只支持 chat 时,回退到 Codex `0.57.0`
|
|
163
|
+
|
|
164
|
+
如果你的后端只有 `/v1/chat/completions`,可以尝试这条回退路径:
|
|
165
|
+
|
|
166
|
+
1. 安装 Codex `0.57.0`
|
|
167
|
+
2. 使用 `wire_api = "chat"`
|
|
168
|
+
3. 把 `model_provider` 和 `model` 写到顶层
|
|
169
|
+
|
|
170
|
+
示例:
|
|
171
|
+
|
|
172
|
+
```toml
|
|
173
|
+
model = "/model/gpt-oss-120b"
|
|
174
|
+
model_provider = "localchat"
|
|
175
|
+
approval_policy = "never"
|
|
176
|
+
sandbox_mode = "workspace-write"
|
|
177
|
+
|
|
178
|
+
[model_providers.localchat]
|
|
179
|
+
name = "localchat"
|
|
180
|
+
base_url = "http://127.0.0.1:8004/v1"
|
|
181
|
+
env_key = "LOCAL_API_KEY"
|
|
182
|
+
wire_api = "chat"
|
|
183
|
+
requires_openai_auth = false
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
然后直接测试:
|
|
187
|
+
|
|
188
|
+
```bash
|
|
189
|
+
export LOCAL_API_KEY=1234
|
|
190
|
+
codex exec --json --cd /tmp --skip-git-repo-check - <<'EOF'
|
|
191
|
+
Reply with exactly HELLO.
|
|
192
|
+
EOF
|
|
193
|
+
```
|
|
194
|
+
|
|
195
|
+
如果这条旧版 Codex 路径能通过,DeepScientist 通常也可以沿用同样的 runner binary 和 provider 思路。
|
|
196
|
+
|
|
197
|
+
## 6. Codex 成功后,再测试 DeepScientist
|
|
198
|
+
|
|
199
|
+
只有当上面的 `codex exec` 能通过时,再继续:
|
|
200
|
+
|
|
201
|
+
```bash
|
|
202
|
+
ds doctor --codex-profile local_vllm
|
|
203
|
+
ds --codex-profile local_vllm
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
这里推荐使用 `ds doctor`。
|
|
207
|
+
|
|
208
|
+
`ds docker` 只是 `ds doctor` 的历史别名,不是 Docker 部署命令。
|
|
209
|
+
|
|
210
|
+
如果你想持久化配置:
|
|
211
|
+
|
|
212
|
+
```yaml
|
|
213
|
+
codex:
|
|
214
|
+
enabled: true
|
|
215
|
+
binary: codex
|
|
216
|
+
config_dir: ~/.codex
|
|
217
|
+
profile: local_vllm
|
|
218
|
+
model: inherit
|
|
219
|
+
model_reasoning_effort: high
|
|
220
|
+
approval_policy: never
|
|
221
|
+
sandbox_mode: danger-full-access
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
## 7. 后端兼容性结论
|
|
225
|
+
|
|
226
|
+
### vLLM
|
|
227
|
+
|
|
228
|
+
推荐。
|
|
229
|
+
|
|
230
|
+
满足下面三个条件时,是当前最稳妥的本地路径:
|
|
231
|
+
|
|
232
|
+
- `/v1/models` 正常
|
|
233
|
+
- `/v1/responses` 正常
|
|
234
|
+
- 模型名稳定可见
|
|
235
|
+
|
|
236
|
+
### Ollama
|
|
237
|
+
|
|
238
|
+
条件支持。
|
|
239
|
+
|
|
240
|
+
只有在下面条件满足时才建议使用:
|
|
241
|
+
|
|
242
|
+
- 当前 Ollama 版本真的暴露了 `/v1/responses`
|
|
243
|
+
- 目标模型可以通过该接口稳定工作
|
|
244
|
+
|
|
245
|
+
如果只有 chat-completions 兼容,不足以支持最新版 Codex,但仍然可以尝试 `0.57.0` 这条旧版 Codex 路径。
|
|
246
|
+
|
|
247
|
+
### SGLang
|
|
248
|
+
|
|
249
|
+
要特别小心。
|
|
250
|
+
|
|
251
|
+
如果你的 SGLang 部署表现是:
|
|
252
|
+
|
|
253
|
+
- `/v1/chat/completions` 正常
|
|
254
|
+
- `/v1/responses` 失败
|
|
255
|
+
|
|
256
|
+
那么它当前就和最新版 Codex runner 不兼容。
|
|
257
|
+
|
|
258
|
+
如果你必须使用这类后端,现实可行的 fallback 就是回退到 Codex `0.57.0` 并使用 `wire_api = "chat"`。
|
|
259
|
+
|
|
260
|
+
## 8. 如果你现在只有 chat-completions
|
|
261
|
+
|
|
262
|
+
如果你的后端只有 `/v1/chat/completions`,当前有三种现实选择:
|
|
263
|
+
|
|
264
|
+
1. 切到支持 Responses 的 vLLM
|
|
265
|
+
2. 升级到真正支持 `/v1/responses` 的 Ollama
|
|
266
|
+
3. 回退到 Codex `0.57.0` 并使用 `wire_api = "chat"`
|
|
267
|
+
4. 在后端前面加一层 Responses-compatible 代理
|
|
268
|
+
|
|
269
|
+
这里的问题本质上是 Codex CLI 的当前要求,不是 DeepScientist 单独某个配置写错了。
|
|
270
|
+
|
|
271
|
+
## 9. 推荐的实际顺序
|
|
272
|
+
|
|
273
|
+
每次都按这个顺序来:
|
|
274
|
+
|
|
275
|
+
1. 先测 `/v1/models`
|
|
276
|
+
2. 再测 `/v1/responses`
|
|
277
|
+
3. 再测 `codex exec --profile <name>`
|
|
278
|
+
4. 再测 `ds doctor --codex-profile <name>`
|
|
279
|
+
5. 最后再启动 `ds --codex-profile <name>`
|
|
280
|
+
|
|
281
|
+
如果第 2 步失败,就先停在那里。不要期待最新版 Codex 路径下的 DeepScientist 可以正常工作。
|
package/docs/zh/README.md
CHANGED
|
@@ -85,7 +85,9 @@ DeepScientist 灵活且易于使用,支持:
|
|
|
85
85
|
- [05 TUI 端到端指南](./05_TUI_GUIDE.md)
|
|
86
86
|
如果你主要在服务器或终端里工作,这篇会带你从 `ds --tui` 一路走到 quest、connector 和跨端协作跑通。
|
|
87
87
|
- [15 Codex Provider 配置](./15_CODEX_PROVIDER_SETUP.md)
|
|
88
|
-
如果你准备通过 MiniMax、GLM
|
|
88
|
+
如果你准备通过 MiniMax、GLM、火山方舟、阿里百炼 Coding Plan 或其他 Codex profile 来运行 DeepScientist,先看这一篇。
|
|
89
|
+
- [21 本地模型后端指南](./21_LOCAL_MODEL_BACKENDS_GUIDE.md)
|
|
90
|
+
如果你想通过 vLLM、Ollama、SGLang 等本地 OpenAI-compatible 后端运行 DeepScientist,先看这一篇。
|
|
89
91
|
- [12 引导式工作流教程](./12_GUIDED_WORKFLOW_TOUR.md)
|
|
90
92
|
按真实产品流程,逐步理解从首页到工作区应该怎么使用。
|
|
91
93
|
- [02 Start Research 参考](./02_START_RESEARCH_GUIDE.md)
|
|
@@ -136,6 +138,8 @@ DeepScientist 灵活且易于使用,支持:
|
|
|
136
138
|
启动诊断、排查常见运行问题,先看这篇。
|
|
137
139
|
- [15 Codex Provider 配置](./15_CODEX_PROVIDER_SETUP.md)
|
|
138
140
|
如果问题更像出在 Codex profile、provider endpoint、API key 或模型配置上,优先看这篇。
|
|
141
|
+
- [21 本地模型后端指南](./21_LOCAL_MODEL_BACKENDS_GUIDE.md)
|
|
142
|
+
如果问题具体出在本地 OpenAI-compatible 后端以及 `/v1/responses` 支持上,优先看这篇。
|
|
139
143
|
- [01 设置参考](./01_SETTINGS_REFERENCE.md)
|
|
140
144
|
如果问题可能和配置、凭据或 connector 有关,再查这篇。
|
|
141
145
|
|
|
@@ -144,12 +148,12 @@ DeepScientist 灵活且易于使用,支持:
|
|
|
144
148
|
- [90 Architecture](../en/90_ARCHITECTURE.md)
|
|
145
149
|
说明系统级约束、核心契约和仓库结构。
|
|
146
150
|
- [91 Development](../en/91_DEVELOPMENT.md)
|
|
147
|
-
|
|
151
|
+
面向维护者的开发工作流、实现说明,以及新增 MCP 工具、skills、connector 的具体清单。
|
|
148
152
|
|
|
149
153
|
## 社群交流
|
|
150
154
|
|
|
151
155
|
欢迎加群讨论。
|
|
152
156
|
|
|
153
157
|
<p align="center">
|
|
154
|
-
<img src="../../assets/readme/
|
|
158
|
+
<img src="../../assets/readme/wechat4.jpg" alt="DeepScientist 微信群" width="360" />
|
|
155
159
|
</p>
|
package/install.sh
CHANGED
|
@@ -112,10 +112,6 @@ else
|
|
|
112
112
|
INSTALL_DIR="$BASE_DIR/cli"
|
|
113
113
|
fi
|
|
114
114
|
|
|
115
|
-
if [ "$DIR_SET" -eq 1 ] && [ "$BIN_DIR_SET" -eq 0 ] && [ -z "$ENV_BIN_DIR" ]; then
|
|
116
|
-
BIN_DIR="$BASE_DIR/bin"
|
|
117
|
-
fi
|
|
118
|
-
|
|
119
115
|
print_step() {
|
|
120
116
|
printf '[install] %s\n' "$1"
|
|
121
117
|
}
|
|
@@ -431,6 +427,47 @@ EOF
|
|
|
431
427
|
chmod +x "$target_path"
|
|
432
428
|
}
|
|
433
429
|
|
|
430
|
+
record_install_index() {
|
|
431
|
+
node - "$BASE_DIR" "$INSTALL_DIR" "$BIN_DIR" <<'NODE'
|
|
432
|
+
const fs = require('node:fs');
|
|
433
|
+
const os = require('node:os');
|
|
434
|
+
const path = require('node:path');
|
|
435
|
+
|
|
436
|
+
const home = path.resolve(process.argv[2] || '');
|
|
437
|
+
const installDir = path.resolve(process.argv[3] || '');
|
|
438
|
+
const binDir = path.resolve(process.argv[4] || '');
|
|
439
|
+
const indexPath = path.join(os.homedir(), '.deepscientist', 'install-index.json');
|
|
440
|
+
const wrapperPaths = ['ds', 'ds-cli', 'research', 'resear'].map((name) => path.join(binDir, name));
|
|
441
|
+
const entry = {
|
|
442
|
+
home,
|
|
443
|
+
install_mode: 'install-local',
|
|
444
|
+
install_dir: installDir,
|
|
445
|
+
package_root: installDir,
|
|
446
|
+
launcher_path: path.join(installDir, 'bin', 'ds.js'),
|
|
447
|
+
wrapper_paths: wrapperPaths,
|
|
448
|
+
updated_at: new Date().toISOString(),
|
|
449
|
+
created_at: new Date().toISOString(),
|
|
450
|
+
};
|
|
451
|
+
let installs = [];
|
|
452
|
+
try {
|
|
453
|
+
const payload = JSON.parse(fs.readFileSync(indexPath, 'utf8'));
|
|
454
|
+
installs = Array.isArray(payload?.installs) ? payload.installs : [];
|
|
455
|
+
} catch {}
|
|
456
|
+
const sameEntry = (item) =>
|
|
457
|
+
String(item?.home || '') === home
|
|
458
|
+
&& String(item?.install_dir || '') === installDir
|
|
459
|
+
&& String(item?.install_mode || '') === 'install-local';
|
|
460
|
+
const existing = installs.find((item) => sameEntry(item));
|
|
461
|
+
if (existing && existing.created_at) {
|
|
462
|
+
entry.created_at = existing.created_at;
|
|
463
|
+
}
|
|
464
|
+
installs = installs.filter((item) => !sameEntry(item));
|
|
465
|
+
installs.push(entry);
|
|
466
|
+
fs.mkdirSync(path.dirname(indexPath), { recursive: true });
|
|
467
|
+
fs.writeFileSync(indexPath, `${JSON.stringify({ installs }, null, 2)}\n`, 'utf8');
|
|
468
|
+
NODE
|
|
469
|
+
}
|
|
470
|
+
|
|
434
471
|
require_command node
|
|
435
472
|
require_command npm
|
|
436
473
|
|
|
@@ -466,6 +503,7 @@ write_global_wrapper "$BIN_DIR/ds" "ds"
|
|
|
466
503
|
write_global_wrapper "$BIN_DIR/ds-cli" "ds-cli"
|
|
467
504
|
write_global_wrapper "$BIN_DIR/research" "research"
|
|
468
505
|
write_global_wrapper "$BIN_DIR/resear" "resear"
|
|
506
|
+
record_install_index
|
|
469
507
|
|
|
470
508
|
print_step "Install complete"
|
|
471
509
|
printf 'Install dir: %s\n' "$INSTALL_DIR"
|
|
@@ -475,6 +513,10 @@ printf 'Start web workspace: %s\n' "$BIN_DIR/ds --web"
|
|
|
475
513
|
printf 'Default start: %s\n' "$BIN_DIR/ds"
|
|
476
514
|
printf 'When `ds` starts, it prints the local Web URL and opens it automatically when supported.\n'
|
|
477
515
|
printf 'If `uv` is missing, the first `ds` start will bootstrap a local copy automatically under the DeepScientist home.\n'
|
|
516
|
+
if [ "$DIR_SET" -eq 1 ] && [ "$BIN_DIR_SET" -eq 0 ] && [ -z "$ENV_BIN_DIR" ]; then
|
|
517
|
+
printf 'Custom install dir detected; launcher wrappers were still refreshed in the default global bin dir: %s\n' "$BIN_DIR"
|
|
518
|
+
printf 'If you prefer install-local wrappers instead, rerun with: --bin-dir %s/bin\n' "$BASE_DIR"
|
|
519
|
+
fi
|
|
478
520
|
if [ "$WITH_TINYTEX" -eq 1 ]; then
|
|
479
521
|
print_step "Installing TinyTeX pdflatex runtime"
|
|
480
522
|
"$INSTALL_DIR/bin/ds" latex install-runtime
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@researai/deepscientist",
|
|
3
|
-
"version": "1.5.
|
|
3
|
+
"version": "1.5.17",
|
|
4
4
|
"description": "DeepScientist is not just a fully open-source autonomous scientific discovery system. It is also a research map that keeps growing from every round.",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"files": [
|
|
@@ -47,6 +47,7 @@
|
|
|
47
47
|
"ds": "node ./bin/ds.js",
|
|
48
48
|
"start": "node ./bin/ds.js daemon",
|
|
49
49
|
"install:local": "bash ./install.sh",
|
|
50
|
+
"preuninstall": "node ./bin/ds.js uninstall --yes --origin npm",
|
|
50
51
|
"ui:install": "npm --prefix src/ui install",
|
|
51
52
|
"ui:dev": "npm --prefix src/ui run dev",
|
|
52
53
|
"ui:build": "npm --prefix src/ui run build",
|
package/pyproject.toml
CHANGED
|
@@ -1021,8 +1021,13 @@ class WeixinConnectorBridge(BaseConnectorBridge):
|
|
|
1021
1021
|
@classmethod
|
|
1022
1022
|
def _retry_delays_for_item(cls, item: dict[str, Any], exc: Exception) -> tuple[float, ...]:
|
|
1023
1023
|
message = str(exc or "").strip().lower()
|
|
1024
|
-
if "ret=-2" in message
|
|
1024
|
+
if "ret=-2" not in message:
|
|
1025
|
+
return ()
|
|
1026
|
+
item_type = cls._item_type(item)
|
|
1027
|
+
if item_type in {4, 5}:
|
|
1025
1028
|
return cls._MEDIA_SEND_RETRY_DELAYS_SECONDS
|
|
1029
|
+
if item_type == 1:
|
|
1030
|
+
return cls._TEXT_SEND_RETRY_DELAYS_SECONDS
|
|
1026
1031
|
return ()
|
|
1027
1032
|
|
|
1028
1033
|
def _send_items(
|
|
@@ -1044,7 +1049,8 @@ class WeixinConnectorBridge(BaseConnectorBridge):
|
|
|
1044
1049
|
if media_item:
|
|
1045
1050
|
time.sleep(self._MEDIA_SEND_INITIAL_DELAY_SECONDS)
|
|
1046
1051
|
retry_delays: tuple[float, ...] = ()
|
|
1047
|
-
|
|
1052
|
+
max_retries = max(len(self._TEXT_SEND_RETRY_DELAYS_SECONDS), len(self._MEDIA_SEND_RETRY_DELAYS_SECONDS))
|
|
1053
|
+
for attempt in range(1 + max_retries):
|
|
1048
1054
|
client_id = self._next_client_id()
|
|
1049
1055
|
try:
|
|
1050
1056
|
send_weixin_message(
|