deepresearch-flow 0.2.1__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepresearch_flow/cli.py +2 -0
- deepresearch_flow/paper/config.py +15 -0
- deepresearch_flow/paper/db.py +9 -0
- deepresearch_flow/paper/llm.py +2 -0
- deepresearch_flow/paper/web/app.py +413 -20
- deepresearch_flow/recognize/cli.py +157 -3
- deepresearch_flow/recognize/organize.py +58 -0
- deepresearch_flow/translator/__init__.py +1 -0
- deepresearch_flow/translator/cli.py +451 -0
- deepresearch_flow/translator/config.py +19 -0
- deepresearch_flow/translator/engine.py +959 -0
- deepresearch_flow/translator/fixers.py +451 -0
- deepresearch_flow/translator/placeholder.py +62 -0
- deepresearch_flow/translator/prompts.py +116 -0
- deepresearch_flow/translator/protector.py +291 -0
- deepresearch_flow/translator/segment.py +180 -0
- deepresearch_flow-0.3.0.dist-info/METADATA +306 -0
- {deepresearch_flow-0.2.1.dist-info → deepresearch_flow-0.3.0.dist-info}/RECORD +22 -13
- deepresearch_flow-0.2.1.dist-info/METADATA +0 -424
- {deepresearch_flow-0.2.1.dist-info → deepresearch_flow-0.3.0.dist-info}/WHEEL +0 -0
- {deepresearch_flow-0.2.1.dist-info → deepresearch_flow-0.3.0.dist-info}/entry_points.txt +0 -0
- {deepresearch_flow-0.2.1.dist-info → deepresearch_flow-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {deepresearch_flow-0.2.1.dist-info → deepresearch_flow-0.3.0.dist-info}/top_level.txt +0 -0
deepresearch_flow/cli.py
CHANGED
|
@@ -4,6 +4,7 @@ import click
|
|
|
4
4
|
|
|
5
5
|
from deepresearch_flow.paper.cli import paper
|
|
6
6
|
from deepresearch_flow.recognize.cli import recognize
|
|
7
|
+
from deepresearch_flow.translator.cli import translator
|
|
7
8
|
|
|
8
9
|
|
|
9
10
|
@click.group()
|
|
@@ -13,6 +14,7 @@ def cli() -> None:
|
|
|
13
14
|
|
|
14
15
|
cli.add_command(paper)
|
|
15
16
|
cli.add_command(recognize)
|
|
17
|
+
cli.add_command(translator)
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
def main() -> None:
|
|
@@ -40,6 +40,7 @@ class ProviderConfig:
|
|
|
40
40
|
location: str | None
|
|
41
41
|
credentials_path: str | None
|
|
42
42
|
anthropic_version: str | None
|
|
43
|
+
max_tokens: int | None
|
|
43
44
|
structured_mode: str
|
|
44
45
|
extra_headers: dict[str, str]
|
|
45
46
|
system_prompt: str | None
|
|
@@ -102,6 +103,15 @@ def _as_str(value: Any, default: str | None = None) -> str | None:
|
|
|
102
103
|
return str(value)
|
|
103
104
|
|
|
104
105
|
|
|
106
|
+
def _ensure_http_scheme(base_url: str, *, default_scheme: str = "http://") -> str:
|
|
107
|
+
normalized = base_url.strip()
|
|
108
|
+
if normalized.startswith(("http://", "https://")):
|
|
109
|
+
scheme, rest = normalized.split("://", 1)
|
|
110
|
+
rest = rest.lstrip("/")
|
|
111
|
+
return f"{scheme}://{rest}" if rest else f"{scheme}://"
|
|
112
|
+
return f"{default_scheme}{normalized.lstrip('/')}"
|
|
113
|
+
|
|
114
|
+
|
|
105
115
|
def load_config(path: str) -> PaperConfig:
|
|
106
116
|
config_path = Path(path)
|
|
107
117
|
if not config_path.exists():
|
|
@@ -158,6 +168,8 @@ def load_config(path: str) -> PaperConfig:
|
|
|
158
168
|
raise ValueError(f"Provider '{name}' requires base_url")
|
|
159
169
|
elif provider_type == "azure_openai" and endpoint:
|
|
160
170
|
base_url = endpoint
|
|
171
|
+
if provider_type == "ollama" and base_url:
|
|
172
|
+
base_url = _ensure_http_scheme(base_url)
|
|
161
173
|
|
|
162
174
|
api_keys = _as_list(provider.get("api_keys"))
|
|
163
175
|
if not api_keys:
|
|
@@ -188,6 +200,8 @@ def load_config(path: str) -> PaperConfig:
|
|
|
188
200
|
location = _as_str(provider.get("location"), None)
|
|
189
201
|
credentials_path = _as_str(provider.get("credentials_path"), None)
|
|
190
202
|
anthropic_version = _as_str(provider.get("anthropic_version"), None)
|
|
203
|
+
max_tokens = provider.get("max_tokens")
|
|
204
|
+
max_tokens_value = int(max_tokens) if max_tokens is not None else None
|
|
191
205
|
|
|
192
206
|
if provider_type == "azure_openai":
|
|
193
207
|
if not base_url:
|
|
@@ -221,6 +235,7 @@ def load_config(path: str) -> PaperConfig:
|
|
|
221
235
|
location=location,
|
|
222
236
|
credentials_path=credentials_path,
|
|
223
237
|
anthropic_version=anthropic_version,
|
|
238
|
+
max_tokens=max_tokens_value,
|
|
224
239
|
structured_mode=structured_mode,
|
|
225
240
|
extra_headers=extra_headers,
|
|
226
241
|
system_prompt=_as_str(provider.get("system_prompt"), None),
|
deepresearch_flow/paper/db.py
CHANGED
|
@@ -584,6 +584,13 @@ def register_db_commands(db_group: click.Group) -> None:
|
|
|
584
584
|
default=(),
|
|
585
585
|
help="Optional markdown root directory (repeatable) for source viewing",
|
|
586
586
|
)
|
|
587
|
+
@click.option(
|
|
588
|
+
"--md-translated-root",
|
|
589
|
+
"md_translated_roots",
|
|
590
|
+
multiple=True,
|
|
591
|
+
default=(),
|
|
592
|
+
help="Optional markdown root directory (repeatable) for translated viewing",
|
|
593
|
+
)
|
|
587
594
|
@click.option(
|
|
588
595
|
"--pdf-root",
|
|
589
596
|
"pdf_roots",
|
|
@@ -606,6 +613,7 @@ def register_db_commands(db_group: click.Group) -> None:
|
|
|
606
613
|
input_paths: tuple[str, ...],
|
|
607
614
|
bibtex_path: str | None,
|
|
608
615
|
md_roots: tuple[str, ...],
|
|
616
|
+
md_translated_roots: tuple[str, ...],
|
|
609
617
|
pdf_roots: tuple[str, ...],
|
|
610
618
|
cache_dir: str | None,
|
|
611
619
|
no_cache: bool,
|
|
@@ -623,6 +631,7 @@ def register_db_commands(db_group: click.Group) -> None:
|
|
|
623
631
|
fallback_language=fallback_language,
|
|
624
632
|
bibtex_path=Path(bibtex_path) if bibtex_path else None,
|
|
625
633
|
md_roots=[Path(root) for root in md_roots],
|
|
634
|
+
md_translated_roots=[Path(root) for root in md_translated_roots],
|
|
626
635
|
pdf_roots=[Path(root) for root in pdf_roots],
|
|
627
636
|
cache_dir=Path(cache_dir) if cache_dir else None,
|
|
628
637
|
use_cache=not no_cache,
|
deepresearch_flow/paper/llm.py
CHANGED
|
@@ -32,6 +32,7 @@ async def call_provider(
|
|
|
32
32
|
timeout: float,
|
|
33
33
|
structured_mode: str,
|
|
34
34
|
client: httpx.AsyncClient,
|
|
35
|
+
max_tokens: int | None = None,
|
|
35
36
|
) -> str:
|
|
36
37
|
headers = dict(provider.extra_headers)
|
|
37
38
|
if api_key and provider.type == "openai_compatible":
|
|
@@ -98,6 +99,7 @@ async def call_provider(
|
|
|
98
99
|
model=model,
|
|
99
100
|
messages=messages,
|
|
100
101
|
anthropic_version=provider.anthropic_version,
|
|
102
|
+
max_tokens=max_tokens or 2048,
|
|
101
103
|
)
|
|
102
104
|
|
|
103
105
|
if provider.type == "openai_compatible":
|