@researai/deepscientist 1.5.16 → 1.5.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/README.md +66 -23
  2. package/bin/ds.js +550 -19
  3. package/docs/en/00_QUICK_START.md +65 -5
  4. package/docs/en/01_SETTINGS_REFERENCE.md +1 -1
  5. package/docs/en/09_DOCTOR.md +14 -3
  6. package/docs/en/15_CODEX_PROVIDER_SETUP.md +12 -3
  7. package/docs/en/21_LOCAL_MODEL_BACKENDS_GUIDE.md +283 -0
  8. package/docs/en/91_DEVELOPMENT.md +237 -0
  9. package/docs/en/README.md +7 -3
  10. package/docs/zh/00_QUICK_START.md +54 -5
  11. package/docs/zh/01_SETTINGS_REFERENCE.md +1 -1
  12. package/docs/zh/09_DOCTOR.md +15 -4
  13. package/docs/zh/15_CODEX_PROVIDER_SETUP.md +12 -3
  14. package/docs/zh/21_LOCAL_MODEL_BACKENDS_GUIDE.md +281 -0
  15. package/docs/zh/README.md +7 -3
  16. package/install.sh +46 -4
  17. package/package.json +2 -1
  18. package/pyproject.toml +1 -1
  19. package/src/deepscientist/__init__.py +1 -1
  20. package/src/deepscientist/bridges/connectors.py +8 -2
  21. package/src/deepscientist/codex_cli_compat.py +185 -72
  22. package/src/deepscientist/config/service.py +154 -6
  23. package/src/deepscientist/daemon/api/handlers.py +130 -25
  24. package/src/deepscientist/daemon/api/router.py +5 -0
  25. package/src/deepscientist/daemon/app.py +446 -22
  26. package/src/deepscientist/diagnostics/__init__.py +6 -0
  27. package/src/deepscientist/diagnostics/runner_failures.py +130 -0
  28. package/src/deepscientist/doctor.py +207 -3
  29. package/src/deepscientist/prompts/builder.py +22 -4
  30. package/src/deepscientist/quest/service.py +413 -13
  31. package/src/deepscientist/runners/codex.py +59 -14
  32. package/src/deepscientist/shared.py +19 -0
  33. package/src/prompts/contracts/shared_interaction.md +3 -2
  34. package/src/prompts/system.md +13 -0
  35. package/src/prompts/system_copilot.md +13 -0
  36. package/src/tui/package.json +1 -1
  37. package/src/ui/dist/assets/{AiManusChatView-COFACy7V.js → AiManusChatView-Bv-Z8YpU.js} +44 -44
  38. package/src/ui/dist/assets/{AnalysisPlugin-DnSm0GZn.js → AnalysisPlugin-BCKAfjba.js} +1 -1
  39. package/src/ui/dist/assets/{CliPlugin-CvwCmDQ5.js → CliPlugin-BCKcpc35.js} +4 -4
  40. package/src/ui/dist/assets/{CodeEditorPlugin-cOqSa0xq.js → CodeEditorPlugin-DbOfSJ8K.js} +1 -1
  41. package/src/ui/dist/assets/{CodeViewerPlugin-itb0tltR.js → CodeViewerPlugin-CbaFRrUU.js} +3 -3
  42. package/src/ui/dist/assets/{DocViewerPlugin-DqKkiCI6.js → DocViewerPlugin-DAjLVeQD.js} +3 -3
  43. package/src/ui/dist/assets/{GitCommitViewerPlugin-DVgNHBCS.js → GitCommitViewerPlugin-CIUqbUDO.js} +1 -1
  44. package/src/ui/dist/assets/{GitDiffViewerPlugin-DxL2ezFG.js → GitDiffViewerPlugin-CQACjoAA.js} +1 -1
  45. package/src/ui/dist/assets/{GitSnapshotViewer-B_RQm1YZ.js → GitSnapshotViewer-0r4nLPke.js} +1 -1
  46. package/src/ui/dist/assets/{ImageViewerPlugin-tHqlXY3n.js → ImageViewerPlugin-nBOmI2v_.js} +3 -3
  47. package/src/ui/dist/assets/{LabCopilotPanel-ClMbq5Yu.js → LabCopilotPanel-BHxOxF4z.js} +1 -1
  48. package/src/ui/dist/assets/{LabPlugin-L_SuE8ow.js → LabPlugin-BKoZGs95.js} +1 -1
  49. package/src/ui/dist/assets/{LatexPlugin-B495DTXC.js → LatexPlugin-ZwtV8pIp.js} +1 -1
  50. package/src/ui/dist/assets/{MarkdownViewerPlugin-DG28-61B.js → MarkdownViewerPlugin-DKqVfKyW.js} +3 -3
  51. package/src/ui/dist/assets/{MarketplacePlugin-BiOGT-Kj.js → MarketplacePlugin-BwxStZ9D.js} +1 -1
  52. package/src/ui/dist/assets/{NotebookEditor-C-4Kt1p9.js → NotebookEditor-BEQhaQbt.js} +1 -1
  53. package/src/ui/dist/assets/{NotebookEditor-CVsj8h_T.js → NotebookEditor-DB9N_T9q.js} +23 -23
  54. package/src/ui/dist/assets/{PdfLoader-CASDQmxJ.js → PdfLoader-eWBONbQP.js} +1 -1
  55. package/src/ui/dist/assets/{PdfMarkdownPlugin-BFhwoKsY.js → PdfMarkdownPlugin-D22YOZL3.js} +1 -1
  56. package/src/ui/dist/assets/{PdfViewerPlugin-DcOzU9vd.js → PdfViewerPlugin-c-RK9DLM.js} +3 -3
  57. package/src/ui/dist/assets/{SearchPlugin-CHj7M58O.js → SearchPlugin-CxF9ytAx.js} +1 -1
  58. package/src/ui/dist/assets/{TextViewerPlugin-CB4DYfWO.js → TextViewerPlugin-C5xqeeUH.js} +2 -2
  59. package/src/ui/dist/assets/{VNCViewer-CjlbyCB3.js → VNCViewer-BoLGLnHz.js} +1 -1
  60. package/src/ui/dist/assets/{bot-CFkZY-JP.js → bot-DREQOxzP.js} +1 -1
  61. package/src/ui/dist/assets/{chevron-up-Dq5ofbht.js → chevron-up-C9Qpx4DE.js} +1 -1
  62. package/src/ui/dist/assets/{code-DLC6G24T.js → code-WlFHE7z_.js} +1 -1
  63. package/src/ui/dist/assets/{file-content-Dv4LoZec.js → file-content-BZMz3RYp.js} +1 -1
  64. package/src/ui/dist/assets/{file-diff-panel-Denq-lC3.js → file-diff-panel-CQhw0jS2.js} +1 -1
  65. package/src/ui/dist/assets/{file-socket-Cu4Qln7Y.js → file-socket-CfQPKQKj.js} +1 -1
  66. package/src/ui/dist/assets/{git-commit-horizontal-BUh6G52n.js → git-commit-horizontal-DxZ8DCZh.js} +1 -1
  67. package/src/ui/dist/assets/{image-B9HUUddG.js → image-Bgl4VIyx.js} +1 -1
  68. package/src/ui/dist/assets/{index-Cgla8biy.css → index-BpV6lusQ.css} +1 -1
  69. package/src/ui/dist/assets/{index-Gbl53BNp.js → index-CBNVuWcP.js} +363 -363
  70. package/src/ui/dist/assets/{index-wQ7RIIRd.js → index-CwNu1aH4.js} +1 -1
  71. package/src/ui/dist/assets/{index-B2B1sg-M.js → index-DrUnlf6K.js} +1 -1
  72. package/src/ui/dist/assets/{index-DRyx7vAc.js → index-NW-h8VzN.js} +1 -1
  73. package/src/ui/dist/assets/{pdf-effect-queue-ZtnHFCAi.js → pdf-effect-queue-J8OnM0jE.js} +1 -1
  74. package/src/ui/dist/assets/{popover-DL6h35vr.js → popover-CLc0pPP8.js} +1 -1
  75. package/src/ui/dist/assets/{project-sync-CsX08Qno.js → project-sync-C9IdzdZW.js} +1 -1
  76. package/src/ui/dist/assets/{select-DvmXt1yY.js → select-Cs2PmzwL.js} +1 -1
  77. package/src/ui/dist/assets/{sigma-7jpXazui.js → sigma-ClKcHAXm.js} +1 -1
  78. package/src/ui/dist/assets/{trash-xA7kFt8i.js → trash-DwpbFr3w.js} +1 -1
  79. package/src/ui/dist/assets/{useCliAccess-DsMwDjOp.js → useCliAccess-NQ8m0Let.js} +1 -1
  80. package/src/ui/dist/assets/{wrap-text-CwMn-iqb.js → wrap-text-BC-Hltpd.js} +1 -1
  81. package/src/ui/dist/assets/{zoom-out-R-GWEhzS.js → zoom-out-E_gaeAxL.js} +1 -1
  82. package/src/ui/dist/index.html +2 -2
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import ipaddress
3
4
  import json
4
5
  import re
5
6
  import shutil
@@ -7,14 +8,22 @@ import subprocess
7
8
  import tomllib
8
9
  from functools import lru_cache
9
10
  from pathlib import Path
11
+ from urllib.parse import urlparse
10
12
 
11
13
  from .shared import ensure_dir, read_text, write_text
12
14
 
13
15
  _MIN_XHIGH_SUPPORTED_VERSION = (0, 63, 0)
16
+ _CHAT_WIRE_COMPAT_VERSION = (0, 57, 0)
14
17
  _CODEX_VERSION_PATTERN = re.compile(r"codex-cli\s+(\d+)\.(\d+)\.(\d+)", re.IGNORECASE)
15
18
  _CODEX_HOME_SYNCED_FILES = ("config.toml", "auth.json")
16
19
  _CODEX_HOME_SYNCED_DIRS = ("skills", "agents", "prompts")
17
20
  _CODEX_HOME_QUEST_OVERLAY_DIRS = ("skills", "prompts")
21
+ _ROOT_TABLE_SECTION_PATTERN = re.compile(r"^\s*\[")
22
+ _ROOT_MODEL_ASSIGNMENT_PATTERN = re.compile(r"^\s*(model_provider|model)\s*=")
23
+ _COMPAT_BEGIN_MARKER = "# BEGIN DEEPSCIENTIST PROFILE COMPAT"
24
+ _COMPAT_END_MARKER = "# END DEEPSCIENTIST PROFILE COMPAT"
25
+ _MISSING_ENV_PATTERN = re.compile(r"Missing environment variable:\s*[`'\"]?([^`'\"\s]+)", re.IGNORECASE)
26
+ _LOCAL_PROVIDER_HOST_ALIASES = {"localhost", "host.docker.internal"}
18
27
 
19
28
 
20
29
  def parse_codex_cli_version(text: str) -> tuple[int, int, int] | None:
@@ -48,6 +57,50 @@ def format_codex_cli_version(version: tuple[int, int, int] | None) -> str:
48
57
  return ".".join(str(part) for part in version)
49
58
 
50
59
 
60
+ def chat_wire_compatible_codex_version() -> tuple[int, int, int]:
61
+ return _CHAT_WIRE_COMPAT_VERSION
62
+
63
+
64
+ def _split_root_table_lines(config_text: str) -> tuple[list[str], list[str]]:
65
+ lines = str(config_text or "").splitlines()
66
+ for index, line in enumerate(lines):
67
+ if _ROOT_TABLE_SECTION_PATTERN.match(line):
68
+ return lines[:index], lines[index:]
69
+ return lines, []
70
+
71
+
72
+ def _strip_root_model_assignments(lines: list[str]) -> list[str]:
73
+ filtered: list[str] = []
74
+ skipping_compat_block = False
75
+ for line in lines:
76
+ stripped = line.strip()
77
+ if stripped == _COMPAT_BEGIN_MARKER:
78
+ skipping_compat_block = True
79
+ continue
80
+ if skipping_compat_block:
81
+ if stripped == _COMPAT_END_MARKER:
82
+ skipping_compat_block = False
83
+ continue
84
+ if _ROOT_MODEL_ASSIGNMENT_PATTERN.match(line):
85
+ continue
86
+ filtered.append(line)
87
+ while filtered and not filtered[0].strip():
88
+ filtered.pop(0)
89
+ while filtered and not filtered[-1].strip():
90
+ filtered.pop()
91
+ return filtered
92
+
93
+
94
+ def _join_field_names(fields: list[str]) -> str:
95
+ if not fields:
96
+ return ""
97
+ if len(fields) == 1:
98
+ return fields[0]
99
+ if len(fields) == 2:
100
+ return f"{fields[0]} and {fields[1]}"
101
+ return ", ".join(fields[:-1]) + f", and {fields[-1]}"
102
+
103
+
51
104
  def normalize_codex_reasoning_effort(
52
105
  reasoning_effort: str | None,
53
106
  *,
@@ -93,33 +146,54 @@ def adapt_profile_only_provider_config(
93
146
  if not isinstance(profile_payload, dict):
94
147
  return config_text, None
95
148
 
96
- prefix_lines: list[str] = []
97
- injected_fields: list[str] = []
98
- if "model_provider" not in parsed:
99
- model_provider = str(profile_payload.get("model_provider") or "").strip()
100
- if model_provider:
101
- prefix_lines.append(f"model_provider = {json.dumps(model_provider, ensure_ascii=False)}")
102
- injected_fields.append("model_provider")
103
- if "model" not in parsed:
104
- model = str(profile_payload.get("model") or "").strip()
105
- if model:
106
- prefix_lines.append(f"model = {json.dumps(model, ensure_ascii=False)}")
107
- injected_fields.append("model")
108
-
109
- if not prefix_lines:
149
+ profile_model_provider = str(profile_payload.get("model_provider") or "").strip()
150
+ profile_model = str(profile_payload.get("model") or "").strip()
151
+ top_level_model_provider = str(parsed.get("model_provider") or "").strip()
152
+ top_level_model = str(parsed.get("model") or "").strip()
153
+
154
+ root_lines: list[str] = []
155
+ changed_fields: list[str] = []
156
+ conflicted_fields: list[str] = []
157
+ if profile_model_provider and top_level_model_provider != profile_model_provider:
158
+ root_lines.append(f"model_provider = {json.dumps(profile_model_provider, ensure_ascii=False)}")
159
+ changed_fields.append("model_provider")
160
+ if top_level_model_provider:
161
+ conflicted_fields.append("model_provider")
162
+ elif profile_model_provider:
163
+ root_lines.append(f"model_provider = {json.dumps(profile_model_provider, ensure_ascii=False)}")
164
+ if profile_model and top_level_model != profile_model:
165
+ root_lines.append(f"model = {json.dumps(profile_model, ensure_ascii=False)}")
166
+ changed_fields.append("model")
167
+ if top_level_model:
168
+ conflicted_fields.append("model")
169
+ elif profile_model:
170
+ root_lines.append(f"model = {json.dumps(profile_model, ensure_ascii=False)}")
171
+
172
+ if not changed_fields:
110
173
  return config_text, None
111
174
 
112
- adapted = (
113
- "# BEGIN DEEPSCIENTIST PROFILE COMPAT\n"
114
- + "\n".join(prefix_lines)
115
- + "\n# END DEEPSCIENTIST PROFILE COMPAT\n\n"
116
- + config_text.lstrip()
117
- )
175
+ root_prefix, body_lines = _split_root_table_lines(config_text)
176
+ cleaned_root = _strip_root_model_assignments(root_prefix)
177
+ adapted_lines: list[str] = [
178
+ _COMPAT_BEGIN_MARKER,
179
+ *root_lines,
180
+ _COMPAT_END_MARKER,
181
+ ]
182
+ if cleaned_root:
183
+ adapted_lines.append("")
184
+ adapted_lines.extend(cleaned_root)
185
+ if body_lines:
186
+ adapted_lines.append("")
187
+ adapted_lines.extend(body_lines)
188
+ adapted = "\n".join(adapted_lines).rstrip() + "\n"
189
+ field_text = _join_field_names(changed_fields)
118
190
  return (
119
191
  adapted,
120
192
  (
121
- f"DeepScientist promoted `{normalized_profile}` profile "
122
- f"{', '.join(injected_fields)} to the top level for Codex compatibility."
193
+ f"DeepScientist overrode conflicting top-level {field_text} with values from profile "
194
+ f"`{normalized_profile}` for Codex compatibility."
195
+ if conflicted_fields
196
+ else f"DeepScientist promoted `{normalized_profile}` profile {field_text} to the top level for Codex compatibility."
123
197
  ),
124
198
  )
125
199
 
@@ -235,61 +309,47 @@ def materialize_codex_runtime_home(
235
309
  return warning
236
310
 
237
311
 
238
- def provider_profile_metadata(
312
+ def _empty_provider_metadata() -> dict[str, str | bool | None]:
313
+ return {
314
+ "provider": None,
315
+ "model": None,
316
+ "env_key": None,
317
+ "base_url": None,
318
+ "wire_api": None,
319
+ "requires_openai_auth": None,
320
+ }
321
+
322
+
323
+ def active_provider_metadata(
239
324
  config_text: str,
240
325
  *,
241
- profile: str,
326
+ profile: str | None = None,
242
327
  ) -> dict[str, str | bool | None]:
243
328
  normalized_profile = str(profile or "").strip()
244
- if not normalized_profile or not str(config_text or "").strip():
245
- return {
246
- "provider": None,
247
- "model": None,
248
- "env_key": None,
249
- "base_url": None,
250
- "wire_api": None,
251
- "requires_openai_auth": None,
252
- }
329
+ if not str(config_text or "").strip():
330
+ return _empty_provider_metadata()
253
331
  try:
254
332
  parsed = tomllib.loads(config_text)
255
333
  except tomllib.TOMLDecodeError:
256
- return {
257
- "provider": None,
258
- "model": None,
259
- "env_key": None,
260
- "base_url": None,
261
- "wire_api": None,
262
- "requires_openai_auth": None,
263
- }
264
-
265
- profiles = parsed.get("profiles")
266
- if not isinstance(profiles, dict):
267
- return {
268
- "provider": None,
269
- "model": None,
270
- "env_key": None,
271
- "base_url": None,
272
- "wire_api": None,
273
- "requires_openai_auth": None,
274
- }
275
- profile_payload = profiles.get(normalized_profile)
276
- if not isinstance(profile_payload, dict):
277
- return {
278
- "provider": None,
279
- "model": None,
280
- "env_key": None,
281
- "base_url": None,
282
- "wire_api": None,
283
- "requires_openai_auth": None,
284
- }
334
+ return _empty_provider_metadata()
335
+
336
+ profile_payload: dict | None = None
337
+ if normalized_profile:
338
+ profiles = parsed.get("profiles")
339
+ if not isinstance(profiles, dict):
340
+ return _empty_provider_metadata()
341
+ candidate_profile = profiles.get(normalized_profile)
342
+ if not isinstance(candidate_profile, dict):
343
+ return _empty_provider_metadata()
344
+ profile_payload = candidate_profile
285
345
 
286
346
  model_provider = str(
287
- profile_payload.get("model_provider")
347
+ (profile_payload or {}).get("model_provider")
288
348
  or parsed.get("model_provider")
289
349
  or ""
290
350
  ).strip() or None
291
351
  model = str(
292
- profile_payload.get("model")
352
+ (profile_payload or {}).get("model")
293
353
  or parsed.get("model")
294
354
  or ""
295
355
  ).strip() or None
@@ -331,6 +391,17 @@ def provider_profile_metadata(
331
391
  }
332
392
 
333
393
 
394
+ def provider_profile_metadata(
395
+ config_text: str,
396
+ *,
397
+ profile: str,
398
+ ) -> dict[str, str | bool | None]:
399
+ normalized_profile = str(profile or "").strip()
400
+ if not normalized_profile:
401
+ return _empty_provider_metadata()
402
+ return active_provider_metadata(config_text, profile=normalized_profile)
403
+
404
+
334
405
  def provider_profile_metadata_from_home(
335
406
  config_home: str | Path,
336
407
  *,
@@ -338,12 +409,54 @@ def provider_profile_metadata_from_home(
338
409
  ) -> dict[str, str | bool | None]:
339
410
  config_path = Path(config_home).expanduser() / "config.toml"
340
411
  if not config_path.exists():
341
- return {
342
- "provider": None,
343
- "model": None,
344
- "env_key": None,
345
- "base_url": None,
346
- "wire_api": None,
347
- "requires_openai_auth": None,
348
- }
412
+ return _empty_provider_metadata()
349
413
  return provider_profile_metadata(config_path.read_text(encoding="utf-8"), profile=profile)
414
+
415
+
416
+ def provider_base_url_looks_local(base_url: str | None) -> bool:
417
+ normalized = str(base_url or "").strip()
418
+ if not normalized:
419
+ return False
420
+ parsed = urlparse(normalized)
421
+ hostname = str(parsed.hostname or "").strip().lower()
422
+ if not hostname:
423
+ return False
424
+ if hostname in _LOCAL_PROVIDER_HOST_ALIASES or hostname.endswith(".local"):
425
+ return True
426
+ try:
427
+ ip = ipaddress.ip_address(hostname)
428
+ except ValueError:
429
+ return False
430
+ return ip.is_loopback or ip.is_private or ip.is_link_local or ip.is_unspecified
431
+
432
+
433
+ def missing_provider_env_key(
434
+ metadata: dict[str, str | bool | None],
435
+ env: dict[str, str] | None,
436
+ ) -> str | None:
437
+ env_key = str((metadata or {}).get("env_key") or "").strip()
438
+ if not env_key:
439
+ return None
440
+ env_value = str((env or {}).get(env_key) or "").strip()
441
+ if env_value:
442
+ return None
443
+ return env_key
444
+
445
+
446
+ def missing_provider_env_key_from_text(*texts: str) -> str | None:
447
+ for text in texts:
448
+ match = _MISSING_ENV_PATTERN.search(str(text or ""))
449
+ if match:
450
+ return str(match.group(1) or "").strip() or None
451
+ return None
452
+
453
+
454
+ def active_provider_metadata_from_home(
455
+ config_home: str | Path,
456
+ *,
457
+ profile: str | None = None,
458
+ ) -> dict[str, str | bool | None]:
459
+ config_path = Path(config_home).expanduser() / "config.toml"
460
+ if not config_path.exists():
461
+ return _empty_provider_metadata()
462
+ return active_provider_metadata(config_path.read_text(encoding="utf-8"), profile=profile)
@@ -10,10 +10,16 @@ from urllib.error import URLError
10
10
  from urllib.request import Request
11
11
 
12
12
  from ..codex_cli_compat import (
13
+ active_provider_metadata_from_home,
13
14
  adapt_profile_only_provider_config,
15
+ chat_wire_compatible_codex_version,
16
+ codex_cli_version,
17
+ format_codex_cli_version,
14
18
  materialize_codex_runtime_home,
19
+ missing_provider_env_key,
20
+ missing_provider_env_key_from_text,
15
21
  normalize_codex_reasoning_effort,
16
- provider_profile_metadata_from_home,
22
+ provider_base_url_looks_local,
17
23
  )
18
24
  from ..connector.connector_profiles import PROFILEABLE_CONNECTOR_NAMES, list_connector_profiles, normalize_connector_config
19
25
  from ..connector_runtime import build_discovered_target, infer_connector_transport
@@ -1267,32 +1273,142 @@ Use **Test** when the file exposes runtime dependencies.
1267
1273
  ]
1268
1274
  )
1269
1275
  else:
1270
- guidance.append("Run `codex --login` (or `codex`) once and finish authentication before starting DeepScientist.")
1276
+ guidance.append("Run `codex login` (or just `codex`) once and finish authentication before starting DeepScientist.")
1271
1277
  guidance.append(
1272
1278
  "If you use a custom Codex path, either set `runners.codex.binary` or launch with `ds --codex /absolute/path/to/codex`."
1273
1279
  )
1274
1280
  return guidance
1275
1281
 
1282
+ @staticmethod
1283
+ def _provider_profile_probe_hints(metadata: dict[str, object]) -> list[str]:
1284
+ base_url = str(metadata.get("base_url") or "").strip().lower()
1285
+ model = str(metadata.get("model") or "").strip().lower()
1286
+ provider = str(metadata.get("provider") or "").strip().lower()
1287
+ if "dashscope.aliyuncs.com" not in base_url and "bailian" not in provider and "qwen" not in model:
1288
+ return []
1289
+ if "coding.dashscope.aliyuncs.com" not in base_url:
1290
+ return [
1291
+ "Alibaba Bailian's generic DashScope / Qwen platform API is not supported by the Codex-backed DeepScientist path.",
1292
+ "If you want to use Qwen here, switch the profile to the Bailian Coding Plan endpoint: `https://coding.dashscope.aliyuncs.com/v1`.",
1293
+ ]
1294
+ return [
1295
+ "For Qwen on Alibaba Bailian, only the Coding Plan endpoint is supported here; do not switch back to the generic Bailian / DashScope Qwen API.",
1296
+ ]
1297
+
1298
+ @staticmethod
1299
+ def _local_provider_probe_hints(metadata: dict[str, object]) -> list[str]:
1300
+ base_url = str(metadata.get("base_url") or "").strip()
1301
+ wire_api = str(metadata.get("wire_api") or "").strip().lower()
1302
+ requires_openai_auth = metadata.get("requires_openai_auth")
1303
+ if not base_url:
1304
+ return []
1305
+ is_local_provider = provider_base_url_looks_local(base_url)
1306
+ if requires_openai_auth is not False and not is_local_provider:
1307
+ return []
1308
+ hints = [
1309
+ f"Verify the local provider directly: `curl {base_url}/models`.",
1310
+ f"Then verify the Responses API explicitly: `curl {base_url}/responses ...`.",
1311
+ "Latest Codex CLI requires `wire_api = \"responses\"`; chat-only provider configs are no longer accepted.",
1312
+ "If `/v1/chat/completions` works but `/v1/responses` fails, that backend is not currently compatible with the latest Codex runner.",
1313
+ "If the backend is chat-only and you still want to test it through Codex, try `@openai/codex@0.57.0` with top-level `model_provider` / `model` plus `wire_api = \"chat\"`.",
1314
+ "For local model backends, vLLM is the safest path. Ollama only works when its `/v1/responses` endpoint works; chat-only SGLang deployments will fail with the latest Codex.",
1315
+ ]
1316
+ if requires_openai_auth is not False:
1317
+ hints.insert(
1318
+ 0,
1319
+ "For local or self-hosted providers, add `requires_openai_auth = false` so DeepScientist can remove conflicting `OPENAI_*` auth variables.",
1320
+ )
1321
+ if not wire_api:
1322
+ hints.insert(0, "Your current provider config does not declare `wire_api`; set `wire_api = \"responses\"` first.")
1323
+ elif wire_api != "responses":
1324
+ hints.insert(0, f"Your current provider config uses `wire_api = \"{wire_api}\"`; switch it to `wire_api = \"responses\"` first.")
1325
+ return hints
1326
+
1327
+ @staticmethod
1328
+ def _missing_provider_env_guidance(
1329
+ *,
1330
+ profile: str,
1331
+ env_key: str,
1332
+ metadata: dict[str, object],
1333
+ ) -> list[str]:
1334
+ guidance = [
1335
+ f"Set `runners.codex.env.{env_key}` in `~/DeepScientist/config/runners.yaml`, or export `{env_key}` before launching `ds`.",
1336
+ ]
1337
+ if provider_base_url_looks_local(str(metadata.get("base_url") or "").strip()):
1338
+ guidance.append(
1339
+ f"If `{env_key}` is only a placeholder for a local OpenAI-compatible backend, any non-empty value such as `1234` is usually enough."
1340
+ )
1341
+ if metadata.get("requires_openai_auth") is not False:
1342
+ guidance.append(
1343
+ "Also add `requires_openai_auth = false` to that local provider profile so DeepScientist can remove conflicting `OPENAI_*` auth variables."
1344
+ )
1345
+ guidance.append(
1346
+ f"Before retrying DeepScientist, run a real request such as `codex exec --profile {profile} --json --cd /tmp --skip-git-repo-check -` and verify it returns `HELLO`."
1347
+ )
1348
+ return guidance
1349
+
1350
+ @staticmethod
1351
+ def _chat_wire_probe_version_block(
1352
+ metadata: dict[str, object],
1353
+ *,
1354
+ resolved_binary: str,
1355
+ ) -> tuple[tuple[int, int, int] | None, dict[str, object] | None]:
1356
+ wire_api = str(metadata.get("wire_api") or "").strip().lower()
1357
+ if wire_api != "chat":
1358
+ return None, None
1359
+ detected_version = codex_cli_version(str(resolved_binary or ""))
1360
+ required_version = chat_wire_compatible_codex_version()
1361
+ if detected_version == required_version:
1362
+ return detected_version, None
1363
+ required_text = format_codex_cli_version(required_version)
1364
+ detected_text = format_codex_cli_version(detected_version)
1365
+ errors = [
1366
+ "This provider uses `wire_api = \"chat\"`, but DeepScientist only probes chat-mode providers with `codex-cli 0.57.0`.",
1367
+ ]
1368
+ if detected_text:
1369
+ errors.append(f"Detected Codex CLI version: `{detected_text}`.")
1370
+ else:
1371
+ errors.append("DeepScientist could not determine the active Codex CLI version from the configured binary.")
1372
+ guidance = [
1373
+ "Install `npm install -g @openai/codex@0.57.0`, or point DeepScientist at a dedicated `0.57.0` binary with `ds --codex /absolute/path/to/codex`.",
1374
+ "If you want to stay on a newer Codex CLI, switch the provider/backend to `wire_api = \"responses\"` instead.",
1375
+ "For chat-mode fallback configs, keep the compatible top-level `model_provider` / `model` entries in `~/.codex/config.toml`.",
1376
+ ]
1377
+ return (
1378
+ detected_version,
1379
+ {
1380
+ "summary": f"Codex startup probe blocked by chat-mode provider compatibility. Required Codex CLI: `{required_text}`.",
1381
+ "errors": errors,
1382
+ "guidance": guidance,
1383
+ },
1384
+ )
1385
+
1276
1386
  def _codex_probe_failure_guidance(self, config: dict) -> tuple[list[str], list[str]]:
1277
1387
  profile = self._codex_profile_name(config)
1388
+ config_dir = str(config.get("config_dir") or "~/.codex").strip()
1389
+ metadata = active_provider_metadata_from_home(config_dir, profile=profile or None) if config_dir else {}
1278
1390
  if profile:
1391
+ provider_hints = self._provider_profile_probe_hints(metadata)
1392
+ local_hints = self._local_provider_probe_hints(metadata)
1279
1393
  return (
1280
1394
  [
1281
1395
  f"Codex profile `{profile}` did not complete the startup hello probe successfully.",
1282
1396
  ],
1283
1397
  [
1284
- f"Run `codex --profile {profile}` in a terminal and confirm that profile can start normally.",
1398
+ f"Run `codex exec --profile {profile} --json --cd /tmp --skip-git-repo-check -` in a terminal and confirm that a real `HELLO` request succeeds.",
1285
1399
  "If the profile uses a custom provider, make sure its API key, Base URL, and model configuration are available to Codex.",
1286
1400
  "If the provider expects the model from the Codex profile itself, set `model: inherit` in `~/DeepScientist/config/runners.yaml`.",
1401
+ *provider_hints,
1402
+ *local_hints,
1287
1403
  "Then run `ds doctor` and start DeepScientist again.",
1288
1404
  ],
1289
1405
  )
1290
1406
  return (
1291
1407
  [
1292
- "Run `codex --login` (or `codex`) once and complete login before starting DeepScientist.",
1408
+ "Run `codex login` (or just `codex`) once and complete login before starting DeepScientist.",
1293
1409
  ],
1294
1410
  [
1295
- "Run `codex --login` (or `codex`) in a terminal and complete login or first-run setup.",
1411
+ "Run `codex login` (or just `codex`) in a terminal and complete login or first-run setup.",
1296
1412
  "If `codex` is missing, install it explicitly with `npm install -g @openai/codex`.",
1297
1413
  "If the configured model is not available to your Codex account, update `~/DeepScientist/config/runners.yaml` and try again.",
1298
1414
  "Then run `ds doctor` and start DeepScientist again.",
@@ -1430,10 +1546,20 @@ Use **Test** when the file exposes runtime dependencies.
1430
1546
  env["CODEX_HOME"] = prepared_home
1431
1547
  if profile_config_warning:
1432
1548
  compatibility_warnings.append(profile_config_warning)
1433
- metadata = provider_profile_metadata_from_home(env.get("CODEX_HOME") or config_dir, profile=profile)
1549
+ metadata = active_provider_metadata_from_home(env.get("CODEX_HOME") or config_dir, profile=profile or None)
1434
1550
  if metadata.get("requires_openai_auth") is False:
1435
1551
  env.pop("OPENAI_API_KEY", None)
1436
1552
  env.pop("OPENAI_BASE_URL", None)
1553
+ configured_provider_env_key = missing_provider_env_key(metadata, env)
1554
+ details["provider_env_key"] = str(metadata.get("env_key") or "").strip() or None
1555
+ details["provider_env_missing"] = bool(configured_provider_env_key)
1556
+ details["provider_wire_api"] = str(metadata.get("wire_api") or "").strip() or None
1557
+ detected_codex_version, chat_wire_block = self._chat_wire_probe_version_block(
1558
+ metadata,
1559
+ resolved_binary=resolved_binary,
1560
+ )
1561
+ if detected_codex_version is not None:
1562
+ details["codex_cli_version"] = format_codex_cli_version(detected_codex_version) or None
1437
1563
  prompt = "Reply with exactly HELLO."
1438
1564
  if reasoning_effort_warning:
1439
1565
  compatibility_warnings.append(reasoning_effort_warning)
@@ -1442,6 +1568,15 @@ Use **Test** when the file exposes runtime dependencies.
1442
1568
  f"Codex profile `{profile}` is provider-backed. DeepScientist is probing it with `model: inherit`."
1443
1569
  )
1444
1570
  base_warnings: list[str] = list(compatibility_warnings)
1571
+ if chat_wire_block is not None:
1572
+ return {
1573
+ "ok": False,
1574
+ "summary": str(chat_wire_block["summary"]),
1575
+ "warnings": base_warnings,
1576
+ "errors": list(chat_wire_block["errors"]),
1577
+ "details": details,
1578
+ "guidance": list(chat_wire_block["guidance"]),
1579
+ }
1445
1580
 
1446
1581
  def run_probe_once(model_for_command: str) -> tuple[list[str], subprocess.CompletedProcess[str] | None, subprocess.TimeoutExpired | None]:
1447
1582
  command = self._build_codex_probe_command(
@@ -1568,7 +1703,20 @@ Use **Test** when the file exposes runtime dependencies.
1568
1703
  if details.get("model_fallback_attempted") and not details.get("model_fallback_used"):
1569
1704
  warnings.append("DeepScientist also tried the current Codex default model, but that fallback probe did not succeed.")
1570
1705
  errors.extend(self._codex_probe_failure_guidance(config)[0])
1706
+ missing_env_key = missing_provider_env_key_from_text(stdout_text, stderr_text) or configured_provider_env_key
1571
1707
  failure_guidance = self._codex_probe_failure_guidance(config)[1]
1708
+ if not ok and missing_env_key and profile:
1709
+ errors.append(
1710
+ f"Codex profile `{profile}` requires environment variable `{missing_env_key}`, but DeepScientist did not receive it."
1711
+ )
1712
+ failure_guidance = [
1713
+ *self._missing_provider_env_guidance(
1714
+ profile=profile,
1715
+ env_key=missing_env_key,
1716
+ metadata=metadata,
1717
+ ),
1718
+ *failure_guidance,
1719
+ ]
1572
1720
  return {
1573
1721
  "ok": ok,
1574
1722
  "summary": "Codex startup probe completed." if ok else "Codex startup probe failed.",