scitex 2.17.0__py3-none-any.whl → 2.17.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- scitex/_dev/__init__.py +122 -0
- scitex/_dev/_config.py +391 -0
- scitex/_dev/_dashboard/__init__.py +11 -0
- scitex/_dev/_dashboard/_app.py +89 -0
- scitex/_dev/_dashboard/_routes.py +182 -0
- scitex/_dev/_dashboard/_scripts.py +422 -0
- scitex/_dev/_dashboard/_styles.py +295 -0
- scitex/_dev/_dashboard/_templates.py +130 -0
- scitex/_dev/_dashboard/static/version-dashboard-favicon.svg +12 -0
- scitex/_dev/_ecosystem.py +109 -0
- scitex/_dev/_github.py +360 -0
- scitex/_dev/_mcp/__init__.py +11 -0
- scitex/_dev/_mcp/handlers.py +182 -0
- scitex/_dev/_rtd.py +122 -0
- scitex/_dev/_ssh.py +362 -0
- scitex/_dev/_versions.py +272 -0
- scitex/_mcp_tools/__init__.py +2 -0
- scitex/_mcp_tools/dev.py +186 -0
- scitex/audio/_audio_check.py +84 -41
- scitex/cli/capture.py +45 -22
- scitex/cli/dev.py +494 -0
- scitex/cli/main.py +2 -0
- scitex/cli/stats.py +48 -20
- scitex/cli/verify.py +33 -36
- scitex/plt/__init__.py +16 -6
- scitex/scholar/_mcp/crossref_handlers.py +45 -7
- scitex/scholar/_mcp/openalex_handlers.py +45 -7
- scitex/scholar/config/default.yaml +2 -0
- scitex/scholar/local_dbs/__init__.py +5 -1
- scitex/scholar/local_dbs/export.py +93 -0
- scitex/scholar/local_dbs/unified.py +505 -0
- scitex/scholar/metadata_engines/ScholarEngine.py +11 -0
- scitex/scholar/metadata_engines/individual/OpenAlexLocalEngine.py +346 -0
- scitex/scholar/metadata_engines/individual/__init__.py +1 -0
- scitex/template/__init__.py +18 -1
- scitex/template/clone_research_minimal.py +111 -0
- scitex/verify/README.md +0 -12
- scitex/verify/__init__.py +0 -4
- scitex/verify/_visualize.py +0 -4
- scitex/verify/_viz/__init__.py +0 -18
- {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/METADATA +2 -1
- {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/RECORD +45 -24
- scitex/verify/_viz/_plotly.py +0 -193
- {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/WHEEL +0 -0
- {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/entry_points.txt +0 -0
- {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/licenses/LICENSE +0 -0
scitex/plt/__init__.py
CHANGED
|
@@ -75,8 +75,8 @@ if _FIGRECIPE_AVAILABLE:
|
|
|
75
75
|
from figrecipe import (
|
|
76
76
|
compose,
|
|
77
77
|
crop,
|
|
78
|
-
edit,
|
|
79
78
|
extract_data,
|
|
79
|
+
gui,
|
|
80
80
|
info,
|
|
81
81
|
list_presets,
|
|
82
82
|
load_style,
|
|
@@ -87,11 +87,17 @@ if _FIGRECIPE_AVAILABLE:
|
|
|
87
87
|
validate,
|
|
88
88
|
)
|
|
89
89
|
|
|
90
|
+
# Backward compatibility alias
|
|
91
|
+
edit = gui
|
|
92
|
+
|
|
90
93
|
# Internal imports (not part of figrecipe public API)
|
|
91
94
|
from figrecipe._api._notebook import enable_svg
|
|
92
95
|
from figrecipe._api._seaborn_proxy import sns
|
|
93
96
|
from figrecipe._api._style_manager import STYLE, apply_style
|
|
94
|
-
from figrecipe._composition import align_panels,
|
|
97
|
+
from figrecipe._composition import align_panels, align_smart, distribute_panels
|
|
98
|
+
|
|
99
|
+
# Backward compatibility alias
|
|
100
|
+
smart_align = align_smart
|
|
95
101
|
from figrecipe._graph_presets import get_preset as get_graph_preset
|
|
96
102
|
from figrecipe._graph_presets import list_presets as list_graph_presets
|
|
97
103
|
from figrecipe._graph_presets import register_preset as register_graph_preset
|
|
@@ -120,11 +126,13 @@ else:
|
|
|
120
126
|
validate = _not_available
|
|
121
127
|
extract_data = _not_available
|
|
122
128
|
info = _not_available
|
|
123
|
-
|
|
129
|
+
gui = _not_available
|
|
130
|
+
edit = _not_available # Backward compatibility alias
|
|
124
131
|
compose = _not_available
|
|
125
132
|
align_panels = _not_available
|
|
126
133
|
distribute_panels = _not_available
|
|
127
|
-
|
|
134
|
+
align_smart = _not_available
|
|
135
|
+
smart_align = _not_available # Backward compatibility alias
|
|
128
136
|
sns = None
|
|
129
137
|
enable_svg = _not_available
|
|
130
138
|
get_graph_preset = _not_available
|
|
@@ -438,7 +446,8 @@ __all__ = [
|
|
|
438
446
|
"validate",
|
|
439
447
|
"extract_data",
|
|
440
448
|
"info",
|
|
441
|
-
"
|
|
449
|
+
"gui",
|
|
450
|
+
"edit", # Backward compatibility alias for gui
|
|
442
451
|
# Style management
|
|
443
452
|
"STYLE",
|
|
444
453
|
"load_style",
|
|
@@ -449,7 +458,8 @@ __all__ = [
|
|
|
449
458
|
"compose",
|
|
450
459
|
"align_panels",
|
|
451
460
|
"distribute_panels",
|
|
452
|
-
"
|
|
461
|
+
"align_smart",
|
|
462
|
+
"smart_align", # Backward compatibility alias for align_smart
|
|
453
463
|
# Graph visualization
|
|
454
464
|
"draw_graph",
|
|
455
465
|
"get_graph_preset",
|
|
@@ -40,6 +40,8 @@ async def crossref_search_handler(
|
|
|
40
40
|
year_min: int | None = None,
|
|
41
41
|
year_max: int | None = None,
|
|
42
42
|
enrich: bool = False,
|
|
43
|
+
save_path: str | None = None,
|
|
44
|
+
save_format: str = "json",
|
|
43
45
|
) -> dict:
|
|
44
46
|
"""Search CrossRef database (167M+ papers) via crossref-local.
|
|
45
47
|
|
|
@@ -50,6 +52,8 @@ async def crossref_search_handler(
|
|
|
50
52
|
year_min: Minimum publication year filter
|
|
51
53
|
year_max: Maximum publication year filter
|
|
52
54
|
enrich: If True, add citation counts and references
|
|
55
|
+
save_path: Optional file path to save results (e.g., "results.json", "papers.bib")
|
|
56
|
+
save_format: Output format for save_path: "text", "json", or "bibtex" (default: "json")
|
|
53
57
|
"""
|
|
54
58
|
try:
|
|
55
59
|
crossref = _ensure_crossref()
|
|
@@ -91,11 +95,21 @@ async def crossref_search_handler(
|
|
|
91
95
|
if len(papers) >= limit:
|
|
92
96
|
break
|
|
93
97
|
|
|
94
|
-
return papers, results.total
|
|
98
|
+
return papers, results.total, results
|
|
95
99
|
|
|
96
|
-
papers, total = await loop.run_in_executor(None, do_search)
|
|
100
|
+
papers, total, search_results = await loop.run_in_executor(None, do_search)
|
|
97
101
|
|
|
98
|
-
|
|
102
|
+
# Save to file if requested
|
|
103
|
+
saved_path = None
|
|
104
|
+
if save_path:
|
|
105
|
+
try:
|
|
106
|
+
from crossref_local import save as cr_save
|
|
107
|
+
|
|
108
|
+
saved_path = cr_save(search_results, save_path, format=save_format)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
return {"success": False, "error": f"Failed to save: {e}"}
|
|
111
|
+
|
|
112
|
+
result = {
|
|
99
113
|
"success": True,
|
|
100
114
|
"query": query,
|
|
101
115
|
"total": total,
|
|
@@ -107,6 +121,11 @@ async def crossref_search_handler(
|
|
|
107
121
|
"timestamp": datetime.now().isoformat(),
|
|
108
122
|
}
|
|
109
123
|
|
|
124
|
+
if saved_path:
|
|
125
|
+
result["saved_to"] = saved_path
|
|
126
|
+
|
|
127
|
+
return result
|
|
128
|
+
|
|
110
129
|
except Exception as e:
|
|
111
130
|
return {"success": False, "error": str(e)}
|
|
112
131
|
|
|
@@ -115,6 +134,8 @@ async def crossref_get_handler(
|
|
|
115
134
|
doi: str,
|
|
116
135
|
include_citations: bool = False,
|
|
117
136
|
include_references: bool = False,
|
|
137
|
+
save_path: str | None = None,
|
|
138
|
+
save_format: str = "json",
|
|
118
139
|
) -> dict:
|
|
119
140
|
"""Get a paper by DOI from CrossRef database.
|
|
120
141
|
|
|
@@ -122,6 +143,8 @@ async def crossref_get_handler(
|
|
|
122
143
|
doi: DOI of the paper
|
|
123
144
|
include_citations: Include list of citing DOIs
|
|
124
145
|
include_references: Include list of referenced DOIs
|
|
146
|
+
save_path: Optional file path to save result (e.g., "paper.json", "paper.bib")
|
|
147
|
+
save_format: Output format for save_path: "text", "json", or "bibtex" (default: "json")
|
|
125
148
|
"""
|
|
126
149
|
try:
|
|
127
150
|
crossref = _ensure_crossref()
|
|
@@ -130,7 +153,7 @@ async def crossref_get_handler(
|
|
|
130
153
|
def do_get():
|
|
131
154
|
work = crossref.get(doi)
|
|
132
155
|
if not work:
|
|
133
|
-
return None
|
|
156
|
+
return None, None
|
|
134
157
|
|
|
135
158
|
result = {
|
|
136
159
|
"doi": work.doi,
|
|
@@ -152,9 +175,9 @@ async def crossref_get_handler(
|
|
|
152
175
|
if include_references:
|
|
153
176
|
result["referenced_dois"] = crossref.get_cited(doi)
|
|
154
177
|
|
|
155
|
-
return result
|
|
178
|
+
return result, work
|
|
156
179
|
|
|
157
|
-
result = await loop.run_in_executor(None, do_get)
|
|
180
|
+
result, work_obj = await loop.run_in_executor(None, do_get)
|
|
158
181
|
|
|
159
182
|
if result is None:
|
|
160
183
|
return {
|
|
@@ -163,13 +186,28 @@ async def crossref_get_handler(
|
|
|
163
186
|
"doi": doi,
|
|
164
187
|
}
|
|
165
188
|
|
|
166
|
-
|
|
189
|
+
# Save to file if requested
|
|
190
|
+
saved_path = None
|
|
191
|
+
if save_path and work_obj:
|
|
192
|
+
try:
|
|
193
|
+
from crossref_local import save as cr_save
|
|
194
|
+
|
|
195
|
+
saved_path = cr_save(work_obj, save_path, format=save_format)
|
|
196
|
+
except Exception as e:
|
|
197
|
+
return {"success": False, "error": f"Failed to save: {e}"}
|
|
198
|
+
|
|
199
|
+
response = {
|
|
167
200
|
"success": True,
|
|
168
201
|
"paper": result,
|
|
169
202
|
"source": "crossref_local",
|
|
170
203
|
"timestamp": datetime.now().isoformat(),
|
|
171
204
|
}
|
|
172
205
|
|
|
206
|
+
if saved_path:
|
|
207
|
+
response["saved_to"] = saved_path
|
|
208
|
+
|
|
209
|
+
return response
|
|
210
|
+
|
|
173
211
|
except Exception as e:
|
|
174
212
|
return {"success": False, "error": str(e)}
|
|
175
213
|
|
|
@@ -38,6 +38,8 @@ async def openalex_search_handler(
|
|
|
38
38
|
offset: int = 0,
|
|
39
39
|
year_min: int | None = None,
|
|
40
40
|
year_max: int | None = None,
|
|
41
|
+
save_path: str | None = None,
|
|
42
|
+
save_format: str = "json",
|
|
41
43
|
) -> dict:
|
|
42
44
|
"""Search OpenAlex database (250M+ papers) via openalex-local.
|
|
43
45
|
|
|
@@ -47,6 +49,8 @@ async def openalex_search_handler(
|
|
|
47
49
|
offset: Number of results to skip for pagination
|
|
48
50
|
year_min: Minimum publication year filter
|
|
49
51
|
year_max: Maximum publication year filter
|
|
52
|
+
save_path: Optional file path to save results (e.g., "results.json", "papers.bib")
|
|
53
|
+
save_format: Output format for save_path: "text", "json", or "bibtex" (default: "json")
|
|
50
54
|
"""
|
|
51
55
|
try:
|
|
52
56
|
openalex = _ensure_openalex()
|
|
@@ -86,11 +90,21 @@ async def openalex_search_handler(
|
|
|
86
90
|
if len(papers) >= limit:
|
|
87
91
|
break
|
|
88
92
|
|
|
89
|
-
return papers, getattr(results, "total", len(papers))
|
|
93
|
+
return papers, getattr(results, "total", len(papers)), results
|
|
90
94
|
|
|
91
|
-
papers, total = await loop.run_in_executor(None, do_search)
|
|
95
|
+
papers, total, search_results = await loop.run_in_executor(None, do_search)
|
|
92
96
|
|
|
93
|
-
|
|
97
|
+
# Save to file if requested
|
|
98
|
+
saved_path = None
|
|
99
|
+
if save_path:
|
|
100
|
+
try:
|
|
101
|
+
from openalex_local import save as oa_save
|
|
102
|
+
|
|
103
|
+
saved_path = oa_save(search_results, save_path, format=save_format)
|
|
104
|
+
except Exception as e:
|
|
105
|
+
return {"success": False, "error": f"Failed to save: {e}"}
|
|
106
|
+
|
|
107
|
+
result = {
|
|
94
108
|
"success": True,
|
|
95
109
|
"query": query,
|
|
96
110
|
"total": total,
|
|
@@ -102,6 +116,11 @@ async def openalex_search_handler(
|
|
|
102
116
|
"timestamp": datetime.now().isoformat(),
|
|
103
117
|
}
|
|
104
118
|
|
|
119
|
+
if saved_path:
|
|
120
|
+
result["saved_to"] = saved_path
|
|
121
|
+
|
|
122
|
+
return result
|
|
123
|
+
|
|
105
124
|
except Exception as e:
|
|
106
125
|
return {"success": False, "error": str(e)}
|
|
107
126
|
|
|
@@ -109,12 +128,16 @@ async def openalex_search_handler(
|
|
|
109
128
|
async def openalex_get_handler(
|
|
110
129
|
doi: str = None,
|
|
111
130
|
openalex_id: str = None,
|
|
131
|
+
save_path: str | None = None,
|
|
132
|
+
save_format: str = "json",
|
|
112
133
|
) -> dict:
|
|
113
134
|
"""Get a paper by DOI or OpenAlex ID from OpenAlex database.
|
|
114
135
|
|
|
115
136
|
Args:
|
|
116
137
|
doi: DOI of the paper (e.g., '10.1038/nature12373')
|
|
117
138
|
openalex_id: OpenAlex ID (e.g., 'W2100837269')
|
|
139
|
+
save_path: Optional file path to save result (e.g., "paper.json", "paper.bib")
|
|
140
|
+
save_format: Output format for save_path: "text", "json", or "bibtex" (default: "json")
|
|
118
141
|
"""
|
|
119
142
|
if not doi and not openalex_id:
|
|
120
143
|
return {"success": False, "error": "Must provide either doi or openalex_id"}
|
|
@@ -127,7 +150,7 @@ async def openalex_get_handler(
|
|
|
127
150
|
identifier = doi or openalex_id
|
|
128
151
|
work = openalex.get(identifier)
|
|
129
152
|
if not work:
|
|
130
|
-
return None
|
|
153
|
+
return None, None
|
|
131
154
|
|
|
132
155
|
result = {
|
|
133
156
|
"doi": work.doi,
|
|
@@ -144,9 +167,9 @@ async def openalex_get_handler(
|
|
|
144
167
|
"url": getattr(work, "url", None),
|
|
145
168
|
}
|
|
146
169
|
|
|
147
|
-
return result
|
|
170
|
+
return result, work
|
|
148
171
|
|
|
149
|
-
result = await loop.run_in_executor(None, do_get)
|
|
172
|
+
result, work_obj = await loop.run_in_executor(None, do_get)
|
|
150
173
|
|
|
151
174
|
if result is None:
|
|
152
175
|
identifier = doi or openalex_id
|
|
@@ -156,13 +179,28 @@ async def openalex_get_handler(
|
|
|
156
179
|
"identifier": identifier,
|
|
157
180
|
}
|
|
158
181
|
|
|
159
|
-
|
|
182
|
+
# Save to file if requested
|
|
183
|
+
saved_path = None
|
|
184
|
+
if save_path and work_obj:
|
|
185
|
+
try:
|
|
186
|
+
from openalex_local import save as oa_save
|
|
187
|
+
|
|
188
|
+
saved_path = oa_save(work_obj, save_path, format=save_format)
|
|
189
|
+
except Exception as e:
|
|
190
|
+
return {"success": False, "error": f"Failed to save: {e}"}
|
|
191
|
+
|
|
192
|
+
response = {
|
|
160
193
|
"success": True,
|
|
161
194
|
"paper": result,
|
|
162
195
|
"source": "openalex_local",
|
|
163
196
|
"timestamp": datetime.now().isoformat(),
|
|
164
197
|
}
|
|
165
198
|
|
|
199
|
+
if saved_path:
|
|
200
|
+
response["saved_to"] = saved_path
|
|
201
|
+
|
|
202
|
+
return response
|
|
203
|
+
|
|
166
204
|
except Exception as e:
|
|
167
205
|
return {"success": False, "error": str(e)}
|
|
168
206
|
|
|
@@ -16,6 +16,7 @@ enable_auto_download: ${SCITEX_SCHOLAR_AUTO_DOWNLOAD:-false}
|
|
|
16
16
|
engines:
|
|
17
17
|
- URL
|
|
18
18
|
- CrossRefLocal
|
|
19
|
+
- OpenAlexLocal
|
|
19
20
|
- Semantic_Scholar
|
|
20
21
|
- CrossRef
|
|
21
22
|
- OpenAlex
|
|
@@ -110,6 +111,7 @@ unpaywall_email: ${SCITEX_SCHOLAR_UNPAYWALL_EMAIL:-"research@scitex.io"}
|
|
|
110
111
|
semantic_scholar_api_key: ${SCITEX_SCHOLAR_SEMANTIC_SCHOLAR_API_KEY:-null}
|
|
111
112
|
crossref_api_key: ${SCITEX_SCHOLAR_CROSSREF_API_KEY:-null}
|
|
112
113
|
crossref_api_url: ${SCITEX_SCHOLAR_CROSSREF_API_URL:-"http://127.0.0.1:3333"}
|
|
114
|
+
openalex_api_url: ${SCITEX_SCHOLAR_OPENALEX_API_URL:-"http://127.0.0.1:31292"}
|
|
113
115
|
pubmed_api_key: ${SCITEX_SCHOLAR_PUBMED_API_KEY:-null}
|
|
114
116
|
twocaptcha_api_key: ${SCITEX_SCHOLAR_2CAPTCHA_API_KEY:-null}
|
|
115
117
|
|
|
@@ -20,11 +20,15 @@ Usage:
|
|
|
20
20
|
|
|
21
21
|
from __future__ import annotations
|
|
22
22
|
|
|
23
|
-
from . import crossref_scitex, openalex_scitex
|
|
23
|
+
from . import crossref_scitex, openalex_scitex, unified
|
|
24
|
+
from .export import SUPPORTED_FORMATS, save
|
|
24
25
|
|
|
25
26
|
__all__ = [
|
|
26
27
|
"crossref_scitex",
|
|
27
28
|
"openalex_scitex",
|
|
29
|
+
"unified",
|
|
30
|
+
"save",
|
|
31
|
+
"SUPPORTED_FORMATS",
|
|
28
32
|
]
|
|
29
33
|
|
|
30
34
|
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# Timestamp: 2026-02-04
|
|
3
|
+
# File: src/scitex/scholar/local_dbs/export.py
|
|
4
|
+
"""Export functionality for unified local database results.
|
|
5
|
+
|
|
6
|
+
Supports multiple output formats:
|
|
7
|
+
- text: Human-readable formatted text
|
|
8
|
+
- json: JSON format with all fields
|
|
9
|
+
- bibtex: BibTeX bibliography format
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import TYPE_CHECKING, List, Union
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from .unified import UnifiedSearchResult, UnifiedWork
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"save",
|
|
22
|
+
"SUPPORTED_FORMATS",
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
SUPPORTED_FORMATS = ["text", "json", "bibtex"]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def save(
|
|
29
|
+
data: Union[UnifiedWork, UnifiedSearchResult, List[UnifiedWork]],
|
|
30
|
+
path: str,
|
|
31
|
+
format: str = "json",
|
|
32
|
+
) -> str:
|
|
33
|
+
"""Save UnifiedWork(s) or UnifiedSearchResult to a file.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
data: UnifiedWork, UnifiedSearchResult, or list of UnifiedWorks
|
|
37
|
+
path: Output file path
|
|
38
|
+
format: Output format ("text", "json", "bibtex")
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
Path to saved file
|
|
43
|
+
|
|
44
|
+
Raises
|
|
45
|
+
------
|
|
46
|
+
ValueError: If format is not supported
|
|
47
|
+
|
|
48
|
+
Examples
|
|
49
|
+
--------
|
|
50
|
+
>>> from scitex.scholar.local_dbs import search, save
|
|
51
|
+
>>> results = search("machine learning", limit=10)
|
|
52
|
+
>>> save(results, "results.json")
|
|
53
|
+
>>> save(results, "results.bib", format="bibtex")
|
|
54
|
+
>>> save(results, "results.txt", format="text")
|
|
55
|
+
"""
|
|
56
|
+
from .unified import UnifiedSearchResult, UnifiedWork, to_bibtex, to_json, to_text
|
|
57
|
+
|
|
58
|
+
if format not in SUPPORTED_FORMATS:
|
|
59
|
+
raise ValueError(
|
|
60
|
+
f"Unsupported format: {format}. "
|
|
61
|
+
f"Supported formats: {', '.join(SUPPORTED_FORMATS)}"
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
path = Path(path)
|
|
65
|
+
|
|
66
|
+
# Extract works
|
|
67
|
+
if isinstance(data, UnifiedWork):
|
|
68
|
+
works = [data]
|
|
69
|
+
elif isinstance(data, UnifiedSearchResult):
|
|
70
|
+
works = data.works
|
|
71
|
+
elif isinstance(data, list):
|
|
72
|
+
works = data
|
|
73
|
+
else:
|
|
74
|
+
raise TypeError(f"Unsupported data type: {type(data)}")
|
|
75
|
+
|
|
76
|
+
# Generate content
|
|
77
|
+
if format == "text":
|
|
78
|
+
content = to_text(works)
|
|
79
|
+
elif format == "json":
|
|
80
|
+
content = to_json(works)
|
|
81
|
+
elif format == "bibtex":
|
|
82
|
+
content = to_bibtex(works)
|
|
83
|
+
else:
|
|
84
|
+
raise ValueError(f"Unsupported format: {format}")
|
|
85
|
+
|
|
86
|
+
# Write to file
|
|
87
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
88
|
+
path.write_text(content, encoding="utf-8")
|
|
89
|
+
|
|
90
|
+
return str(path)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
# EOF
|