@researai/deepscientist 1.5.11 → 1.5.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/bin/ds.js +375 -61
- package/docs/en/00_QUICK_START.md +55 -4
- package/docs/en/01_SETTINGS_REFERENCE.md +15 -0
- package/docs/en/02_START_RESEARCH_GUIDE.md +68 -4
- package/docs/en/09_DOCTOR.md +48 -4
- package/docs/en/12_GUIDED_WORKFLOW_TOUR.md +21 -2
- package/docs/en/15_CODEX_PROVIDER_SETUP.md +382 -0
- package/docs/en/README.md +4 -0
- package/docs/zh/00_QUICK_START.md +54 -3
- package/docs/zh/01_SETTINGS_REFERENCE.md +15 -0
- package/docs/zh/02_START_RESEARCH_GUIDE.md +69 -3
- package/docs/zh/09_DOCTOR.md +48 -2
- package/docs/zh/12_GUIDED_WORKFLOW_TOUR.md +21 -2
- package/docs/zh/15_CODEX_PROVIDER_SETUP.md +383 -0
- package/docs/zh/README.md +4 -1
- package/package.json +2 -1
- package/pyproject.toml +1 -1
- package/src/deepscientist/__init__.py +1 -1
- package/src/deepscientist/bash_exec/monitor.py +7 -5
- package/src/deepscientist/bash_exec/service.py +84 -21
- package/src/deepscientist/channels/local.py +3 -3
- package/src/deepscientist/channels/qq.py +7 -7
- package/src/deepscientist/channels/relay.py +7 -7
- package/src/deepscientist/channels/weixin_ilink.py +90 -19
- package/src/deepscientist/cli.py +3 -0
- package/src/deepscientist/codex_cli_compat.py +117 -0
- package/src/deepscientist/config/models.py +1 -0
- package/src/deepscientist/config/service.py +173 -25
- package/src/deepscientist/daemon/app.py +314 -6
- package/src/deepscientist/doctor.py +1 -5
- package/src/deepscientist/mcp/server.py +124 -3
- package/src/deepscientist/prompts/builder.py +113 -11
- package/src/deepscientist/quest/service.py +247 -31
- package/src/deepscientist/runners/codex.py +132 -24
- package/src/deepscientist/runners/runtime_overrides.py +9 -0
- package/src/deepscientist/shared.py +33 -14
- package/src/prompts/connectors/qq.md +2 -1
- package/src/prompts/connectors/weixin.md +2 -1
- package/src/prompts/contracts/shared_interaction.md +4 -1
- package/src/prompts/system.md +59 -9
- package/src/skills/analysis-campaign/SKILL.md +46 -6
- package/src/skills/analysis-campaign/references/campaign-plan-template.md +21 -8
- package/src/skills/baseline/SKILL.md +1 -1
- package/src/skills/baseline/references/artifact-payload-examples.md +39 -0
- package/src/skills/decision/SKILL.md +1 -1
- package/src/skills/experiment/SKILL.md +1 -1
- package/src/skills/finalize/SKILL.md +1 -1
- package/src/skills/idea/SKILL.md +1 -1
- package/src/skills/intake-audit/SKILL.md +1 -1
- package/src/skills/rebuttal/SKILL.md +74 -1
- package/src/skills/rebuttal/references/response-letter-template.md +55 -11
- package/src/skills/review/SKILL.md +118 -1
- package/src/skills/review/references/experiment-todo-template.md +23 -0
- package/src/skills/review/references/review-report-template.md +16 -0
- package/src/skills/review/references/revision-log-template.md +4 -0
- package/src/skills/scout/SKILL.md +1 -1
- package/src/skills/write/SKILL.md +168 -7
- package/src/skills/write/references/paper-experiment-matrix-template.md +131 -0
- package/src/tui/dist/lib/connectorConfig.js +90 -0
- package/src/tui/dist/lib/qr.js +21 -0
- package/src/tui/package.json +2 -1
- package/src/ui/dist/assets/{AiManusChatView-D0mTXG4-.js → AiManusChatView-CnJcXynW.js} +12 -12
- package/src/ui/dist/assets/{AnalysisPlugin-Db0cTXxm.js → AnalysisPlugin-DeyzPEhV.js} +1 -1
- package/src/ui/dist/assets/{CliPlugin-DrV8je02.js → CliPlugin-CB1YODQn.js} +9 -9
- package/src/ui/dist/assets/{CodeEditorPlugin-QXMSCH71.js → CodeEditorPlugin-B-xicq1e.js} +8 -8
- package/src/ui/dist/assets/{CodeViewerPlugin-7hhtWj_E.js → CodeViewerPlugin-DT54ysXa.js} +5 -5
- package/src/ui/dist/assets/{DocViewerPlugin-BWMSnRJe.js → DocViewerPlugin-DQtKT-VD.js} +3 -3
- package/src/ui/dist/assets/{GitDiffViewerPlugin-7J9h9Vy_.js → GitDiffViewerPlugin-hqHbCfnv.js} +20 -20
- package/src/ui/dist/assets/{ImageViewerPlugin-CHJl_0lr.js → ImageViewerPlugin-OcVo33jV.js} +5 -5
- package/src/ui/dist/assets/{LabCopilotPanel-1qSow1es.js → LabCopilotPanel-DdGwhEUV.js} +11 -11
- package/src/ui/dist/assets/{LabPlugin-eQpPPCEp.js → LabPlugin-Ciz1gDaX.js} +2 -2
- package/src/ui/dist/assets/{LatexPlugin-BwRfi89Z.js → LatexPlugin-BhmjNQRC.js} +37 -11
- package/src/ui/dist/assets/{MarkdownViewerPlugin-836PVQWV.js → MarkdownViewerPlugin-BzdVH9Bx.js} +4 -4
- package/src/ui/dist/assets/{MarketplacePlugin-C2y_556i.js → MarketplacePlugin-DmyHspXt.js} +3 -3
- package/src/ui/dist/assets/{NotebookEditor-DIX7Mlzu.js → NotebookEditor-BMXKrDRk.js} +1 -1
- package/src/ui/dist/assets/{NotebookEditor-BRzJbGsn.js → NotebookEditor-BTVYRGkm.js} +11 -11
- package/src/ui/dist/assets/{PdfLoader-DzRaTAlq.js → PdfLoader-CvcjJHXv.js} +1 -1
- package/src/ui/dist/assets/{PdfMarkdownPlugin-DZUfIUnp.js → PdfMarkdownPlugin-DW2ej8Vk.js} +2 -2
- package/src/ui/dist/assets/{PdfViewerPlugin-BwtICzue.js → PdfViewerPlugin-CmlDxbhU.js} +10 -10
- package/src/ui/dist/assets/{SearchPlugin-DHeIAMsx.js → SearchPlugin-DAjQZPSv.js} +1 -1
- package/src/ui/dist/assets/{TextViewerPlugin-C3tCmFox.js → TextViewerPlugin-C-nVAZb_.js} +5 -5
- package/src/ui/dist/assets/{VNCViewer-CQsKVm3t.js → VNCViewer-D7-dIYon.js} +10 -10
- package/src/ui/dist/assets/{bot-BEA2vWuK.js → bot-C_G4WtNI.js} +1 -1
- package/src/ui/dist/assets/{code-XfbSR8K2.js → code-Cd7WfiWq.js} +1 -1
- package/src/ui/dist/assets/{file-content-BjxNaIfy.js → file-content-B57zsL9y.js} +1 -1
- package/src/ui/dist/assets/{file-diff-panel-D_lLVQk0.js → file-diff-panel-DVoheLFq.js} +1 -1
- package/src/ui/dist/assets/{file-socket-D9x_5vlY.js → file-socket-B5kXFxZP.js} +1 -1
- package/src/ui/dist/assets/{image-BhWT33W1.js → image-LLOjkMHF.js} +1 -1
- package/src/ui/dist/assets/{index-Dqj-Mjb4.css → index-BQG-1s2o.css} +40 -2
- package/src/ui/dist/assets/{index--c4iXtuy.js → index-C3r2iGrp.js} +12 -12
- package/src/ui/dist/assets/{index-DZTZ8mWP.js → index-CLQauncb.js} +911 -120
- package/src/ui/dist/assets/{index-PJbSbPTy.js → index-Dxa2eYMY.js} +1 -1
- package/src/ui/dist/assets/{index-BDxipwrC.js → index-hOUOWbW2.js} +2 -2
- package/src/ui/dist/assets/{monaco-K8izTGgo.js → monaco-BGGAEii3.js} +1 -1
- package/src/ui/dist/assets/{pdf-effect-queue-DfBors6y.js → pdf-effect-queue-DlEr1_y5.js} +1 -1
- package/src/ui/dist/assets/{popover-yFK1J4fL.js → popover-CWJbJuYY.js} +1 -1
- package/src/ui/dist/assets/{project-sync-PENr2zcz.js → project-sync-CRJiucYO.js} +18 -4
- package/src/ui/dist/assets/{select-CAbJDfYv.js → select-CoHB7pvH.js} +2 -2
- package/src/ui/dist/assets/{sigma-DEuYJqTl.js → sigma-D5aJWR8J.js} +1 -1
- package/src/ui/dist/assets/{square-check-big-omoSUmcd.js → square-check-big-DUK_mnkS.js} +1 -1
- package/src/ui/dist/assets/{trash--F119N47.js → trash-ChU3SEE3.js} +1 -1
- package/src/ui/dist/assets/{useCliAccess-D31UR23I.js → useCliAccess-BrJBV3tY.js} +1 -1
- package/src/ui/dist/assets/{useFileDiffOverlay-BH6KcMzq.js → useFileDiffOverlay-C2OQaVWc.js} +1 -1
- package/src/ui/dist/assets/{wrap-text-CZ613PM5.js → wrap-text-C7Qqh-om.js} +1 -1
- package/src/ui/dist/assets/{zoom-out-BgDLAv3z.js → zoom-out-rtX0FKya.js} +1 -1
- package/src/ui/dist/index.html +2 -2
|
@@ -0,0 +1,382 @@
|
|
|
1
|
+
# 15 Codex Provider Setup
|
|
2
|
+
|
|
3
|
+
DeepScientist does not implement separate provider adapters for MiniMax, GLM, Volcengine Ark, or Alibaba Bailian.
|
|
4
|
+
|
|
5
|
+
Instead, it reuses the same Codex CLI setup that already works in your terminal.
|
|
6
|
+
|
|
7
|
+
The recommended order is always:
|
|
8
|
+
|
|
9
|
+
1. make Codex itself work first
|
|
10
|
+
2. confirm `codex` or `codex --profile <name>` works in a terminal
|
|
11
|
+
3. run `ds doctor`
|
|
12
|
+
4. run `ds` or `ds --codex-profile <name>`
|
|
13
|
+
|
|
14
|
+
## Three supported patterns
|
|
15
|
+
|
|
16
|
+
### 1. Default OpenAI login path
|
|
17
|
+
|
|
18
|
+
Use this when your Codex CLI works through the standard OpenAI login flow.
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
codex --login
|
|
22
|
+
ds doctor
|
|
23
|
+
ds
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
### 2. One-off provider profile
|
|
27
|
+
|
|
28
|
+
Use this when you already have a named Codex profile such as `m27`, `glm`, `ark`, or `bailian`.
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
codex --profile m27
|
|
32
|
+
ds doctor --codex-profile m27
|
|
33
|
+
ds --codex-profile m27
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
If you need one specific Codex binary for this run, use:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
ds doctor --codex /absolute/path/to/codex --codex-profile m27
|
|
40
|
+
ds --codex /absolute/path/to/codex --codex-profile m27
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
This is the simplest path. You do not need to edit `runners.yaml` just to try one provider-backed session.
|
|
44
|
+
|
|
45
|
+
### 3. Persistent provider profile
|
|
46
|
+
|
|
47
|
+
Use this when you want DeepScientist to keep using the same profile by default.
|
|
48
|
+
|
|
49
|
+
```yaml
|
|
50
|
+
codex:
|
|
51
|
+
enabled: true
|
|
52
|
+
binary: codex
|
|
53
|
+
config_dir: ~/.codex
|
|
54
|
+
profile: minimax
|
|
55
|
+
model: inherit
|
|
56
|
+
model_reasoning_effort: xhigh
|
|
57
|
+
approval_policy: on-request
|
|
58
|
+
sandbox_mode: workspace-write
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
Important:
|
|
62
|
+
|
|
63
|
+
- keep `model: inherit` for provider-backed Codex profiles unless you are certain the provider accepts the explicit model id you plan to send
|
|
64
|
+
- DeepScientist will reuse the same `~/.codex/config.toml` and environment that your terminal Codex already uses
|
|
65
|
+
|
|
66
|
+
## Provider matrix
|
|
67
|
+
|
|
68
|
+
| Provider | Official docs | Codex login needed | What DeepScientist should use |
|
|
69
|
+
|---|---|---|---|
|
|
70
|
+
| OpenAI | use the normal Codex setup | Yes | no profile; run `ds` |
|
|
71
|
+
| MiniMax | [MiniMax Codex CLI](https://platform.minimaxi.com/docs/coding-plan/codex-cli) | No | your Codex profile, for example `ds --codex-profile m27` |
|
|
72
|
+
| GLM | [GLM Coding Plan: Other Tools](https://docs.bigmodel.cn/cn/coding-plan/tool/others) | No | a Codex profile that targets the GLM coding endpoint |
|
|
73
|
+
| Volcengine Ark | [Ark Coding Plan Overview](https://www.volcengine.com/docs/82379/1925114?lang=zh) | No | a Codex profile that targets the Ark coding endpoint |
|
|
74
|
+
| Alibaba Bailian | [Bailian Coding Plan: Other Tools](https://help.aliyun.com/zh/model-studio/other-tools-coding-plan) | No | a Codex profile that targets the Bailian coding endpoint |
|
|
75
|
+
|
|
76
|
+
## OpenAI
|
|
77
|
+
|
|
78
|
+
### What to prepare
|
|
79
|
+
|
|
80
|
+
- a normal Codex CLI install
|
|
81
|
+
- a successful `codex --login` or `codex` interactive first-run setup
|
|
82
|
+
|
|
83
|
+
### DeepScientist commands
|
|
84
|
+
|
|
85
|
+
```bash
|
|
86
|
+
ds doctor
|
|
87
|
+
ds
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### Persistent runner config
|
|
91
|
+
|
|
92
|
+
```yaml
|
|
93
|
+
codex:
|
|
94
|
+
enabled: true
|
|
95
|
+
binary: codex
|
|
96
|
+
config_dir: ~/.codex
|
|
97
|
+
profile: ""
|
|
98
|
+
model: gpt-5.4
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
## MiniMax
|
|
102
|
+
|
|
103
|
+
MiniMax is the clearest profile-based case. Its official Codex CLI guide configures a custom Codex provider and sets `requires_openai_auth = false`.
|
|
104
|
+
|
|
105
|
+
Official doc:
|
|
106
|
+
|
|
107
|
+
- <https://platform.minimaxi.com/docs/coding-plan/codex-cli>
|
|
108
|
+
|
|
109
|
+
### Verified compatibility note
|
|
110
|
+
|
|
111
|
+
Checked against MiniMax's current Codex CLI doc and local compatibility validation on 2026-03-25:
|
|
112
|
+
|
|
113
|
+
- MiniMax's Codex CLI page currently recommends `@openai/codex@0.57.0`
|
|
114
|
+
- the Coding Plan endpoint to use is `https://api.minimaxi.com/v1`
|
|
115
|
+
- MiniMax's official page uses `m21` as the profile name, but that profile name is only a local alias; this repo uses `m27` consistently in examples
|
|
116
|
+
- the `codex-MiniMax-*` model names shown on MiniMax's page did not pass reliably through Codex CLI in local testing with the provided key
|
|
117
|
+
- the locally verified working path was `MiniMax-M2.7` + `m27` + `model: inherit` + Codex CLI `0.57.0`
|
|
118
|
+
- the current `@openai/codex` latest release still does not line up cleanly with MiniMax's current guide
|
|
119
|
+
|
|
120
|
+
If you want the most reproducible DeepScientist + MiniMax path today, use Codex CLI `0.57.0`.
|
|
121
|
+
|
|
122
|
+
### What to prepare
|
|
123
|
+
|
|
124
|
+
- Codex CLI `0.57.0`
|
|
125
|
+
- a MiniMax `Coding Plan Key`
|
|
126
|
+
- `MINIMAX_API_KEY` available in the shell that starts Codex and DeepScientist
|
|
127
|
+
- the current shell cleared of `OPENAI_API_KEY` and `OPENAI_BASE_URL`
|
|
128
|
+
- a working Codex profile in `~/.codex/config.toml`
|
|
129
|
+
|
|
130
|
+
### Install Codex CLI `0.57.0`
|
|
131
|
+
|
|
132
|
+
The simplest path is to pin the global Codex install:
|
|
133
|
+
|
|
134
|
+
```bash
|
|
135
|
+
npm install -g @openai/codex@0.57.0
|
|
136
|
+
codex --version
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
Expected output:
|
|
140
|
+
|
|
141
|
+
```text
|
|
142
|
+
codex-cli 0.57.0
|
|
143
|
+
```
|
|
144
|
+
|
|
145
|
+
If you want to keep another Codex version elsewhere, create a small wrapper script and point `runners.codex.binary` at that absolute path.
|
|
146
|
+
|
|
147
|
+
### Codex-side setup
|
|
148
|
+
|
|
149
|
+
Use `https://api.minimaxi.com/v1`, not `https://api.minimax.io/v1`.
|
|
150
|
+
|
|
151
|
+
MiniMax's doc requires clearing the OpenAI environment variables first:
|
|
152
|
+
|
|
153
|
+
```bash
|
|
154
|
+
unset OPENAI_API_KEY
|
|
155
|
+
unset OPENAI_BASE_URL
|
|
156
|
+
export MINIMAX_API_KEY="..."
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
MiniMax's official page uses `m21` as the example profile name. Since the profile name is only a local alias, this repo rewrites that example to `m27`.
|
|
160
|
+
|
|
161
|
+
The important difference is the model name:
|
|
162
|
+
|
|
163
|
+
- MiniMax's page currently shows `codex-MiniMax-M2.5`
|
|
164
|
+
- in local testing, direct MiniMax API calls worked with `MiniMax-M2.7`
|
|
165
|
+
- with the same key, `codex-MiniMax-M2.5` and `codex-MiniMax-M2.7` both failed through Codex CLI
|
|
166
|
+
|
|
167
|
+
So the config below is the currently recommended DeepScientist working configuration:
|
|
168
|
+
|
|
169
|
+
```toml
|
|
170
|
+
[model_providers.minimax]
|
|
171
|
+
name = "MiniMax Chat Completions API"
|
|
172
|
+
base_url = "https://api.minimaxi.com/v1"
|
|
173
|
+
env_key = "MINIMAX_API_KEY"
|
|
174
|
+
wire_api = "chat"
|
|
175
|
+
requires_openai_auth = false
|
|
176
|
+
request_max_retries = 4
|
|
177
|
+
stream_max_retries = 10
|
|
178
|
+
stream_idle_timeout_ms = 300000
|
|
179
|
+
|
|
180
|
+
[profiles.m27]
|
|
181
|
+
model = "MiniMax-M2.7"
|
|
182
|
+
model_provider = "minimax"
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
What DeepScientist supports now:
|
|
186
|
+
|
|
187
|
+
- if you use this profile-only MiniMax config with Codex CLI `0.57.0`, DeepScientist automatically promotes the selected profile's `model_provider` and `model` to the top level inside its probe/runtime copy of `.codex/config.toml`
|
|
188
|
+
- this means DeepScientist can start even when plain terminal `codex --profile m27` still fails on that exact profile-only shape
|
|
189
|
+
|
|
190
|
+
If you want plain terminal `codex --profile <name>` to work too, use the explicit top-level compatibility form instead:
|
|
191
|
+
|
|
192
|
+
```toml
|
|
193
|
+
model = "MiniMax-M2.7"
|
|
194
|
+
model_provider = "minimax"
|
|
195
|
+
approval_policy = "never"
|
|
196
|
+
sandbox_mode = "workspace-write"
|
|
197
|
+
|
|
198
|
+
[model_providers.minimax]
|
|
199
|
+
name = "MiniMax Chat Completions API"
|
|
200
|
+
base_url = "https://api.minimaxi.com/v1"
|
|
201
|
+
env_key = "MINIMAX_API_KEY"
|
|
202
|
+
wire_api = "chat"
|
|
203
|
+
requires_openai_auth = false
|
|
204
|
+
request_max_retries = 4
|
|
205
|
+
stream_max_retries = 10
|
|
206
|
+
stream_idle_timeout_ms = 300000
|
|
207
|
+
|
|
208
|
+
[profiles.m27]
|
|
209
|
+
model = "MiniMax-M2.7"
|
|
210
|
+
model_provider = "minimax"
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
Then:
|
|
214
|
+
|
|
215
|
+
```bash
|
|
216
|
+
codex --profile m27
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
### DeepScientist commands
|
|
220
|
+
|
|
221
|
+
```bash
|
|
222
|
+
ds doctor --codex-profile m27
|
|
223
|
+
ds --codex-profile m27
|
|
224
|
+
```
|
|
225
|
+
|
|
226
|
+
### Persistent runner config
|
|
227
|
+
|
|
228
|
+
```yaml
|
|
229
|
+
codex:
|
|
230
|
+
enabled: true
|
|
231
|
+
binary: /tmp/codex057-wrapper
|
|
232
|
+
config_dir: ~/.codex
|
|
233
|
+
profile: m27
|
|
234
|
+
model: inherit
|
|
235
|
+
model_reasoning_effort: high
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
If you already pinned your global `codex` binary to `0.57.0`, you can set `binary: codex` instead. The absolute wrapper path here is only to make the version choice explicit.
|
|
239
|
+
|
|
240
|
+
If you do not want to persist that path in `runners.yaml`, you can keep `binary: codex` there and launch ad hoc with:
|
|
241
|
+
|
|
242
|
+
```bash
|
|
243
|
+
ds --codex /absolute/path/to/codex --codex-profile m27
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
DeepScientist now does two MiniMax-specific compatibility steps for the `0.57.0` path:
|
|
247
|
+
|
|
248
|
+
- it downgrades `xhigh` to `high` automatically when the Codex CLI does not support `xhigh`
|
|
249
|
+
- it auto-adapts MiniMax's profile-only `model_provider` / `model` shape inside the temporary DeepScientist Codex home when needed
|
|
250
|
+
|
|
251
|
+
## GLM
|
|
252
|
+
|
|
253
|
+
GLM documents the Coding Plan as an OpenAI-compatible coding endpoint rather than a dedicated Codex login flow.
|
|
254
|
+
|
|
255
|
+
Official docs:
|
|
256
|
+
|
|
257
|
+
- <https://docs.bigmodel.cn/cn/coding-plan/tool/others>
|
|
258
|
+
- <https://docs.bigmodel.cn/cn/coding-plan/faq>
|
|
259
|
+
|
|
260
|
+
### Official provider values
|
|
261
|
+
|
|
262
|
+
- Base URL: `https://open.bigmodel.cn/api/coding/paas/v4`
|
|
263
|
+
- API key: your GLM Coding Plan key
|
|
264
|
+
- Model: `GLM-4.7` for the documented path, or `GLM-5` where supported
|
|
265
|
+
|
|
266
|
+
### Recommended Codex adaptation
|
|
267
|
+
|
|
268
|
+
GLM does not currently publish a separate Codex CLI page in the same style as MiniMax. The practical DeepScientist path is:
|
|
269
|
+
|
|
270
|
+
1. create a Codex profile in `~/.codex/config.toml` that points to the GLM coding endpoint above
|
|
271
|
+
2. make sure `codex --profile glm` works in a terminal first
|
|
272
|
+
3. run DeepScientist with the same profile
|
|
273
|
+
|
|
274
|
+
### DeepScientist commands
|
|
275
|
+
|
|
276
|
+
```bash
|
|
277
|
+
ds doctor --codex-profile glm
|
|
278
|
+
ds --codex-profile glm
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
### Persistent runner config
|
|
282
|
+
|
|
283
|
+
```yaml
|
|
284
|
+
codex:
|
|
285
|
+
enabled: true
|
|
286
|
+
binary: codex
|
|
287
|
+
config_dir: ~/.codex
|
|
288
|
+
profile: glm
|
|
289
|
+
model: inherit
|
|
290
|
+
```
|
|
291
|
+
|
|
292
|
+
## Volcengine Ark
|
|
293
|
+
|
|
294
|
+
Volcengine Ark explicitly lists Codex CLI as a supported coding tool.
|
|
295
|
+
|
|
296
|
+
Official doc:
|
|
297
|
+
|
|
298
|
+
- <https://www.volcengine.com/docs/82379/1925114?lang=zh>
|
|
299
|
+
|
|
300
|
+
### Official provider values
|
|
301
|
+
|
|
302
|
+
- OpenAI-compatible Base URL: `https://ark.cn-beijing.volces.com/api/coding/v3`
|
|
303
|
+
- Supported coding models: `doubao-seed-code-preview-latest`, `ark-code-latest`
|
|
304
|
+
- Use the Coding Plan key and the exact Coding Plan endpoint
|
|
305
|
+
|
|
306
|
+
### Recommended Codex adaptation
|
|
307
|
+
|
|
308
|
+
Create a Codex profile that targets the Ark coding endpoint and test it directly first:
|
|
309
|
+
|
|
310
|
+
```bash
|
|
311
|
+
codex --profile ark
|
|
312
|
+
```
|
|
313
|
+
|
|
314
|
+
Then start DeepScientist with the same profile:
|
|
315
|
+
|
|
316
|
+
```bash
|
|
317
|
+
ds doctor --codex-profile ark
|
|
318
|
+
ds --codex-profile ark
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
### Persistent runner config
|
|
322
|
+
|
|
323
|
+
```yaml
|
|
324
|
+
codex:
|
|
325
|
+
enabled: true
|
|
326
|
+
binary: codex
|
|
327
|
+
config_dir: ~/.codex
|
|
328
|
+
profile: ark
|
|
329
|
+
model: inherit
|
|
330
|
+
```
|
|
331
|
+
|
|
332
|
+
## Alibaba Bailian
|
|
333
|
+
|
|
334
|
+
Bailian documents Coding Plan as an OpenAI-compatible coding endpoint. It requires the Coding Plan-specific key and endpoint, not the generic platform endpoint.
|
|
335
|
+
|
|
336
|
+
Official docs:
|
|
337
|
+
|
|
338
|
+
- <https://help.aliyun.com/zh/model-studio/other-tools-coding-plan>
|
|
339
|
+
- <https://help.aliyun.com/zh/model-studio/coding-plan-faq>
|
|
340
|
+
|
|
341
|
+
### Official provider values
|
|
342
|
+
|
|
343
|
+
- OpenAI-compatible Base URL: `https://coding.dashscope.aliyuncs.com/v1`
|
|
344
|
+
- API key: Coding Plan-specific key, typically `sk-sp-...`
|
|
345
|
+
- Model: choose a Coding Plan-supported model from the current Bailian overview
|
|
346
|
+
|
|
347
|
+
### Recommended Codex adaptation
|
|
348
|
+
|
|
349
|
+
Create a Codex profile that points to the Bailian Coding Plan endpoint and test it directly first:
|
|
350
|
+
|
|
351
|
+
```bash
|
|
352
|
+
codex --profile bailian
|
|
353
|
+
```
|
|
354
|
+
|
|
355
|
+
Then start DeepScientist with the same profile:
|
|
356
|
+
|
|
357
|
+
```bash
|
|
358
|
+
ds doctor --codex-profile bailian
|
|
359
|
+
ds --codex-profile bailian
|
|
360
|
+
```
|
|
361
|
+
|
|
362
|
+
### Persistent runner config
|
|
363
|
+
|
|
364
|
+
```yaml
|
|
365
|
+
codex:
|
|
366
|
+
enabled: true
|
|
367
|
+
binary: codex
|
|
368
|
+
config_dir: ~/.codex
|
|
369
|
+
profile: bailian
|
|
370
|
+
model: inherit
|
|
371
|
+
```
|
|
372
|
+
|
|
373
|
+
## Troubleshooting checklist
|
|
374
|
+
|
|
375
|
+
If a provider-backed profile still fails in DeepScientist:
|
|
376
|
+
|
|
377
|
+
1. run `codex --profile <name>` manually first
|
|
378
|
+
2. confirm the provider API key is present in the same shell
|
|
379
|
+
3. confirm the provider-specific Base URL is the Coding Plan endpoint, not the generic API endpoint
|
|
380
|
+
4. keep DeepScientist runner `model: inherit` unless you need an explicit override
|
|
381
|
+
5. run `ds doctor --codex-profile <name>`
|
|
382
|
+
6. only after that run `ds --codex-profile <name>`
|
package/docs/en/README.md
CHANGED
|
@@ -30,6 +30,8 @@ This page is the shortest path to the right document.
|
|
|
30
30
|
|
|
31
31
|
- [00 Quick Start](./00_QUICK_START.md)
|
|
32
32
|
Start here if you want to install DeepScientist, launch it locally, and create your first project.
|
|
33
|
+
- [15 Codex Provider Setup](./15_CODEX_PROVIDER_SETUP.md)
|
|
34
|
+
Read this when you want to run DeepScientist through MiniMax, GLM, Volcengine Ark, Alibaba Bailian, or another Codex profile.
|
|
33
35
|
- [12 Guided Workflow Tour](./12_GUIDED_WORKFLOW_TOUR.md)
|
|
34
36
|
Follow the real product flow from landing page to workspace, step by step.
|
|
35
37
|
- [02 Start Research Guide](./02_START_RESEARCH_GUIDE.md)
|
|
@@ -68,6 +70,8 @@ This page is the shortest path to the right document.
|
|
|
68
70
|
|
|
69
71
|
- [09 Doctor](./09_DOCTOR.md)
|
|
70
72
|
Start here for diagnostics and common runtime problems.
|
|
73
|
+
- [15 Codex Provider Setup](./15_CODEX_PROVIDER_SETUP.md)
|
|
74
|
+
Check this if the problem is likely in your Codex profile, provider endpoint, API key, or model configuration.
|
|
71
75
|
- [01 Settings Reference](./01_SETTINGS_REFERENCE.md)
|
|
72
76
|
Check this if the problem is likely caused by config, credentials, or connector setup.
|
|
73
77
|
|
|
@@ -36,7 +36,9 @@
|
|
|
36
36
|
建议你先准备好这些:
|
|
37
37
|
|
|
38
38
|
- 安装好 Node.js `>=18.18` 和 npm `>=9`;请优先参考官方页面安装:https://nodejs.org/en/download
|
|
39
|
-
-
|
|
39
|
+
- 一条已经可用的 Codex 路径:
|
|
40
|
+
- 默认 OpenAI 登录路径:`codex --login`(或 `codex`)
|
|
41
|
+
- provider-backed 路径:一个已经可用的 Codex profile,例如 `minimax`、`glm`、`ark`、`bailian`
|
|
40
42
|
- 模型或 API 凭证
|
|
41
43
|
- 如果任务比较重,准备好 GPU 或远程服务器
|
|
42
44
|
- 如果你要长期运行,优先准备 Docker 或其他隔离环境,并准备一个非 root 账号专门启动 DeepScientist
|
|
@@ -52,6 +54,10 @@
|
|
|
52
54
|
- 阿里百炼 Coding Plan:https://help.aliyun.com/zh/model-studio/coding-plan
|
|
53
55
|
- 火山引擎 Ark Coding Plan:https://www.volcengine.com/docs/82379/1925115?lang=zh
|
|
54
56
|
|
|
57
|
+
如果你准备使用 provider-backed 的 Codex profile,而不是默认 OpenAI 登录流,请继续看:
|
|
58
|
+
|
|
59
|
+
- [15 Codex Provider 配置](./15_CODEX_PROVIDER_SETUP.md)
|
|
60
|
+
|
|
55
61
|
## 1. 先安装 Node.js,再安装 DeepScientist
|
|
56
62
|
|
|
57
63
|
DeepScientist 目前仅支持 Linux 和 macOS。
|
|
@@ -73,7 +79,7 @@ npm install -g @researai/deepscientist
|
|
|
73
79
|
|
|
74
80
|
这一步会把 `ds` 命令安装到你的机器上。
|
|
75
81
|
|
|
76
|
-
DeepScientist 依赖一个可用的 Codex CLI
|
|
82
|
+
DeepScientist 依赖一个可用的 Codex CLI。它会优先使用你机器上已经可用的 `codex`,只有在本机找不到时才回退到 npm 包内置的依赖。如果安装完成后 `codex` 仍然不可用,请显式修复:
|
|
77
83
|
|
|
78
84
|
```bash
|
|
79
85
|
npm install -g @openai/codex
|
|
@@ -89,6 +95,10 @@ ds latex install-runtime
|
|
|
89
95
|
|
|
90
96
|
## 2. 第一次运行 `ds` 前,先完成 Codex 配置
|
|
91
97
|
|
|
98
|
+
这里有两条路径,二选一即可。
|
|
99
|
+
|
|
100
|
+
### 2.1 默认 OpenAI 登录路径
|
|
101
|
+
|
|
92
102
|
运行:
|
|
93
103
|
|
|
94
104
|
```bash
|
|
@@ -109,7 +119,48 @@ codex
|
|
|
109
119
|
ds doctor
|
|
110
120
|
```
|
|
111
121
|
|
|
112
|
-
|
|
122
|
+
### 2.2 provider-backed 的 Codex profile 路径
|
|
123
|
+
|
|
124
|
+
如果你已经在 MiniMax、GLM、火山方舟、阿里百炼或其他 provider 上配置了一个命名的 Codex profile,请先在终端里确认这个 profile 本身可用:
|
|
125
|
+
|
|
126
|
+
```bash
|
|
127
|
+
codex --profile m27
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
然后用同一个 profile 去跑 DeepScientist:
|
|
131
|
+
|
|
132
|
+
```bash
|
|
133
|
+
ds doctor --codex-profile m27
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
之后启动:
|
|
137
|
+
|
|
138
|
+
```bash
|
|
139
|
+
ds --codex-profile m27
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
如果你这一轮还想强制指定某一个 Codex 可执行文件,也可以一起加上 `--codex`:
|
|
143
|
+
|
|
144
|
+
```bash
|
|
145
|
+
ds doctor --codex /absolute/path/to/codex --codex-profile m27
|
|
146
|
+
ds --codex /absolute/path/to/codex --codex-profile m27
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
这里的 `m27` 是本仓库统一使用的 MiniMax profile 示例名。MiniMax 官方页面当前示例名是 `m21`,但 profile 名只是本地别名;如果你自己用了别的名字,就把命令里的名字一起改掉。
|
|
150
|
+
|
|
151
|
+
DeepScientist 会在启动前强制做一次真实的 Codex hello 探测。默认情况下,`~/DeepScientist/config/runners.yaml` 里的 runner 模型还是 `gpt-5.4`。如果你的 profile 希望模型由 profile 自己决定,请把 `runners.yaml` 里的 `model` 改成 `inherit`;或者直接使用 `--codex-profile <name>`,让这一轮启动自动继承 profile 对应的模型。
|
|
152
|
+
|
|
153
|
+
MiniMax 额外说明:
|
|
154
|
+
|
|
155
|
+
- 如果当前最新版 `@openai/codex` 和 MiniMax 走不通,直接安装 `npm install -g @openai/codex@0.57.0`
|
|
156
|
+
- 先创建 MiniMax `Coding Plan Key`
|
|
157
|
+
- 在当前 shell 里先执行 `unset OPENAI_API_KEY` 和 `unset OPENAI_BASE_URL`
|
|
158
|
+
- 使用 `https://api.minimaxi.com/v1`
|
|
159
|
+
- MiniMax 官方 Codex CLI 页面当前给出的 `codex-MiniMax-*` 模型名,在本地用提供的 key 实测并不能稳定通过 Codex CLI
|
|
160
|
+
- 当前本地实测可用的模型名是 `MiniMax-M2.7`
|
|
161
|
+
- DeepScientist 现在可以在 probe 和运行时自动适配 MiniMax profile-only 的 `model_provider` / `model` 配置形态
|
|
162
|
+
- 如果你还希望终端里的 `codex --profile <name>` 也直接可用,再在 `~/.codex/config.toml` 顶层补上 `model_provider = "minimax"` 和 `model = "MiniMax-M2.7"`
|
|
163
|
+
- 当 DeepScientist 检测到旧版 Codex CLI 不支持 `xhigh` 时,会自动把它降级成 `high`
|
|
113
164
|
|
|
114
165
|
## 3. 启动本地运行时
|
|
115
166
|
|
|
@@ -406,6 +406,7 @@ codex:
|
|
|
406
406
|
enabled: true
|
|
407
407
|
binary: codex
|
|
408
408
|
config_dir: ~/.codex
|
|
409
|
+
profile: ""
|
|
409
410
|
model: gpt-5.4
|
|
410
411
|
model_reasoning_effort: xhigh
|
|
411
412
|
approval_policy: on-request
|
|
@@ -443,6 +444,8 @@ claude:
|
|
|
443
444
|
- 页面标签:`Binary`
|
|
444
445
|
- 作用:启动 runner 时使用的命令名或绝对路径。
|
|
445
446
|
- `Test` 行为:检查该二进制是否在 `PATH` 上。
|
|
447
|
+
- `codex` 的解析顺序:环境变量覆盖、显式路径、本机 `PATH`、最后才是 bundled fallback。
|
|
448
|
+
- 临时使用说明:你也可以直接用 `ds --codex /absolute/path/to/codex` 临时覆盖这里的设置。
|
|
446
449
|
- 首次使用说明:DeepScientist 不会替你完成 Codex 认证。第一次运行 `ds` 前,必须先确保 `codex --login`(或 `codex`)已经成功完成。
|
|
447
450
|
- 修复说明:如果执行 `npm install -g @researai/deepscientist` 之后 bundled Codex 依赖仍然缺失,请显式安装 `npm install -g @openai/codex`。
|
|
448
451
|
|
|
@@ -453,6 +456,16 @@ claude:
|
|
|
453
456
|
- 页面标签:`Config directory`
|
|
454
457
|
- 作用:runner 的全局配置目录,通常存放认证和全局配置。
|
|
455
458
|
|
|
459
|
+
**`profile`**
|
|
460
|
+
|
|
461
|
+
- 类型:`string`
|
|
462
|
+
- 默认值:`""`
|
|
463
|
+
- 页面标签:`Codex profile`
|
|
464
|
+
- 作用:可选的 Codex profile 名称,会直接透传为 `codex --profile <name>`。
|
|
465
|
+
- 当你的 Codex CLI 已经配置成 MiniMax、GLM、火山方舟、阿里百炼或其他 provider-backed 路径时,就在这里填写。
|
|
466
|
+
- 临时使用说明:如果你不想持久化写配置,也可以保持这里为空,直接使用 `ds --codex-profile <name>` 启动。
|
|
467
|
+
- 组合使用说明:如果你还想临时指定 Codex 可执行文件,也可以组合成 `ds --codex /absolute/path/to/codex --codex-profile <name>`。
|
|
468
|
+
|
|
456
469
|
**`model`**
|
|
457
470
|
|
|
458
471
|
- 类型:`string`
|
|
@@ -460,6 +473,7 @@ claude:
|
|
|
460
473
|
- 页面标签:`Default model`
|
|
461
474
|
- 作用:项目和单次请求没有覆盖时的默认模型。
|
|
462
475
|
- 启动说明:DeepScientist 的 Codex 就绪探测会优先使用这里配置的模型。如果你的 Codex 账号无法访问它,DeepScientist 会自动回退到当前 Codex 默认模型,并持久化为 `model: inherit`。
|
|
476
|
+
- provider-profile 说明:当 `profile` 已设置时,通常推荐使用 `model: inherit`,让 Codex profile 自己决定 provider 侧模型。
|
|
463
477
|
|
|
464
478
|
**`model_reasoning_effort`**
|
|
465
479
|
|
|
@@ -469,6 +483,7 @@ claude:
|
|
|
469
483
|
- 允许值:`""`、`minimal`、`low`、`medium`、`high`、`xhigh`
|
|
470
484
|
- 作用:默认推理强度。
|
|
471
485
|
- 推荐:当前仓库的 Codex 默认就是 `xhigh`。
|
|
486
|
+
- 兼容性说明:当 DeepScientist 检测到 Codex CLI 低于 `0.63.0` 时,会在启动探测和实际 runner 命令里自动把 `xhigh` 降级成 `high`。这也覆盖了 MiniMax 当前推荐的 `@openai/codex@0.57.0` 路径。
|
|
472
487
|
|
|
473
488
|
**`approval_policy`**
|
|
474
489
|
|
|
@@ -115,13 +115,21 @@ type StartResearchTemplate = {
|
|
|
115
115
|
baseline_variant_id: string
|
|
116
116
|
baseline_urls: string
|
|
117
117
|
paper_urls: string
|
|
118
|
+
review_materials: string
|
|
118
119
|
runtime_constraints: string
|
|
119
120
|
objectives: string
|
|
120
121
|
need_research_paper: boolean
|
|
121
122
|
research_intensity: 'light' | 'balanced' | 'sprint'
|
|
122
123
|
decision_policy: 'autonomous' | 'user_gated'
|
|
123
124
|
launch_mode: 'standard' | 'custom'
|
|
124
|
-
custom_profile: 'continue_existing_state' | 'revision_rebuttal' | 'freeform'
|
|
125
|
+
custom_profile: 'continue_existing_state' | 'review_audit' | 'revision_rebuttal' | 'freeform'
|
|
126
|
+
review_followup_policy: 'audit_only' | 'auto_execute_followups' | 'user_gated_followups'
|
|
127
|
+
baseline_execution_policy:
|
|
128
|
+
| 'auto'
|
|
129
|
+
| 'must_reproduce_or_verify'
|
|
130
|
+
| 'reuse_existing_only'
|
|
131
|
+
| 'skip_unless_blocking'
|
|
132
|
+
manuscript_edit_mode: 'none' | 'copy_ready_text' | 'latex_required'
|
|
125
133
|
entry_state_summary: string
|
|
126
134
|
review_summary: string
|
|
127
135
|
custom_brief: string
|
|
@@ -187,6 +195,9 @@ type StartResearchContractFields = {
|
|
|
187
195
|
decision_policy,
|
|
188
196
|
launch_mode,
|
|
189
197
|
custom_profile,
|
|
198
|
+
review_followup_policy,
|
|
199
|
+
baseline_execution_policy,
|
|
200
|
+
manuscript_edit_mode,
|
|
190
201
|
scope,
|
|
191
202
|
baseline_mode,
|
|
192
203
|
resource_policy,
|
|
@@ -196,6 +207,7 @@ type StartResearchContractFields = {
|
|
|
196
207
|
objectives: string[],
|
|
197
208
|
baseline_urls: string[],
|
|
198
209
|
paper_urls: string[],
|
|
210
|
+
review_materials: string[],
|
|
199
211
|
entry_state_summary,
|
|
200
212
|
review_summary,
|
|
201
213
|
custom_brief,
|
|
@@ -265,11 +277,19 @@ type StartResearchContractFields = {
|
|
|
265
277
|
**`baseline_urls`**
|
|
266
278
|
|
|
267
279
|
- 当没有 registry baseline 时,作为恢复 baseline 的候选来源。
|
|
280
|
+
- 可以填写网络链接,也可以直接填写绝对本地文件 / 文件夹路径。
|
|
268
281
|
- 提交时转成 `string[]`。
|
|
269
282
|
|
|
270
283
|
**`paper_urls`**
|
|
271
284
|
|
|
272
|
-
- 论文、代码仓库、benchmark、leaderboard
|
|
285
|
+
- 论文、代码仓库、benchmark、leaderboard、manuscript 路径等参考资料。
|
|
286
|
+
- 可以填写网络链接,也可以直接填写绝对本地文件 / 文件夹路径。
|
|
287
|
+
- 提交时转成 `string[]`。
|
|
288
|
+
|
|
289
|
+
**`review_materials`**
|
|
290
|
+
|
|
291
|
+
- 主要用于 `review_audit` 或 `revision_rebuttal`。
|
|
292
|
+
- 每行填写一个 URL,或一个绝对本地文件 / 文件夹路径,用于 reviewer comments、decision letter、meta-review 或 revision packet。
|
|
273
293
|
- 提交时转成 `string[]`。
|
|
274
294
|
|
|
275
295
|
### 约束与目标
|
|
@@ -325,12 +345,49 @@ type StartResearchContractFields = {
|
|
|
325
345
|
- `continue_existing_state`
|
|
326
346
|
- 先审计已有 baseline、结果、草稿或混合资产
|
|
327
347
|
- prompt builder 会显式引导 agent 优先打开 `intake-audit`
|
|
348
|
+
- `review_audit`
|
|
349
|
+
- 这是一个对现有 draft / paper package 做独立 skeptical 审计的任务
|
|
350
|
+
- prompt builder 会显式引导 agent 优先打开 `review`
|
|
328
351
|
- `revision_rebuttal`
|
|
329
352
|
- 这是一个审稿回复、revision、rebuttal 类型任务
|
|
330
353
|
- prompt builder 会显式引导 agent 优先打开 `rebuttal`
|
|
331
354
|
- `freeform`
|
|
355
|
+
- 这是“其它”入口
|
|
332
356
|
- 以自定义 brief 为主,尽量少做额外假设
|
|
333
357
|
|
|
358
|
+
**`baseline_execution_policy`**
|
|
359
|
+
|
|
360
|
+
- 仅在 `launch_mode = custom` 时有意义。
|
|
361
|
+
- `auto`
|
|
362
|
+
- 让启动合同和当前证据自己决定
|
|
363
|
+
- `must_reproduce_or_verify`
|
|
364
|
+
- 在 reviewer-linked 的后续工作之前,先验证或恢复 rebuttal 关键依赖的 baseline / comparator
|
|
365
|
+
- `reuse_existing_only`
|
|
366
|
+
- 默认信任当前 baseline / 结果,除非它们明显不一致或不可用
|
|
367
|
+
- `skip_unless_blocking`
|
|
368
|
+
- 默认跳过 baseline 重跑,只有当某个 review / rebuttal 条目明确依赖缺失 comparator 时才补跑
|
|
369
|
+
|
|
370
|
+
**`review_followup_policy`**
|
|
371
|
+
|
|
372
|
+
- 主要用于 `review_audit`。
|
|
373
|
+
- `audit_only`
|
|
374
|
+
- 只完成审计产物和路由建议
|
|
375
|
+
- `auto_execute_followups`
|
|
376
|
+
- 审计后自动继续进入合理的实验和论文修改
|
|
377
|
+
- `user_gated_followups`
|
|
378
|
+
- 先完成审计,再在昂贵后续动作前等待你的批准
|
|
379
|
+
|
|
380
|
+
**`manuscript_edit_mode`**
|
|
381
|
+
|
|
382
|
+
- 主要用于 `review_audit` 和 `revision_rebuttal`。
|
|
383
|
+
- `none`
|
|
384
|
+
- 只输出规划产物
|
|
385
|
+
- `copy_ready_text`
|
|
386
|
+
- 输出 section-level 的可直接粘贴修改文本
|
|
387
|
+
- `latex_required`
|
|
388
|
+
- 优先把提供的 LaTeX 树当作写作表面,并输出 LaTeX-ready 的替换文本
|
|
389
|
+
- 如果选择这个模式,最好同时通过本地路径 / 文件夹输入提供 LaTeX 源目录
|
|
390
|
+
|
|
334
391
|
**`entry_state_summary`**
|
|
335
392
|
|
|
336
393
|
- 用自然语言概括当前已经存在什么。
|
|
@@ -387,6 +444,9 @@ type StartResearchContractFields = {
|
|
|
387
444
|
- `custom + continue_existing_state`
|
|
388
445
|
- 告诉 agent 先整理和信任排序已有资产
|
|
389
446
|
- 明确优先 `intake-audit`
|
|
447
|
+
- `custom + review_audit`
|
|
448
|
+
- 告诉 agent 当前 draft / paper 状态就是主动合同
|
|
449
|
+
- 明确优先 `review`
|
|
390
450
|
- `custom + revision_rebuttal`
|
|
391
451
|
- 告诉 agent 先理解 reviewer comments 和当前论文状态
|
|
392
452
|
- 明确优先 `rebuttal`
|
|
@@ -486,6 +546,9 @@ type StartResearchContractFields = {
|
|
|
486
546
|
"decision_policy": "user_gated",
|
|
487
547
|
"launch_mode": "custom",
|
|
488
548
|
"custom_profile": "revision_rebuttal",
|
|
549
|
+
"review_followup_policy": "audit_only",
|
|
550
|
+
"baseline_execution_policy": "skip_unless_blocking",
|
|
551
|
+
"manuscript_edit_mode": "latex_required",
|
|
489
552
|
"scope": "baseline_plus_direction",
|
|
490
553
|
"baseline_mode": "restore_from_url",
|
|
491
554
|
"resource_policy": "balanced",
|
|
@@ -499,6 +562,9 @@ type StartResearchContractFields = {
|
|
|
499
562
|
],
|
|
500
563
|
"baseline_urls": [],
|
|
501
564
|
"paper_urls": [],
|
|
565
|
+
"review_materials": [
|
|
566
|
+
"/absolute/path/to/review-comments.md"
|
|
567
|
+
],
|
|
502
568
|
"entry_state_summary": "A draft and previous experiment outputs already exist.",
|
|
503
569
|
"review_summary": "Reviewers asked for one stronger ablation, one extra baseline, and a clearer limitation paragraph.",
|
|
504
570
|
"custom_brief": "Treat the current manuscript and review packet as the active contract."
|
|
@@ -509,7 +575,7 @@ type StartResearchContractFields = {
|
|
|
509
575
|
## 运行时意义
|
|
510
576
|
|
|
511
577
|
- `startup_contract` 是项目的持久状态,不只是 UI 临时字段。
|
|
512
|
-
- 后续 prompt builder 还会继续读取 `launch_mode`、`custom_profile`、`entry_state_summary`、`review_summary`、`custom_brief`。
|
|
578
|
+
- 后续 prompt builder 还会继续读取 `launch_mode`、`custom_profile`、`review_followup_policy`、`baseline_execution_policy`、`manuscript_edit_mode`、`entry_state_summary`、`review_summary`、`review_materials`、`custom_brief`。
|
|
513
579
|
- 所以 `Start Research` 不只影响第一轮,还会影响后续路由判断。
|
|
514
580
|
|
|
515
581
|
## 修改检查清单
|