scitex 2.17.0__py3-none-any.whl → 2.17.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. scitex/_dev/__init__.py +122 -0
  2. scitex/_dev/_config.py +391 -0
  3. scitex/_dev/_dashboard/__init__.py +11 -0
  4. scitex/_dev/_dashboard/_app.py +89 -0
  5. scitex/_dev/_dashboard/_routes.py +182 -0
  6. scitex/_dev/_dashboard/_scripts.py +422 -0
  7. scitex/_dev/_dashboard/_styles.py +295 -0
  8. scitex/_dev/_dashboard/_templates.py +130 -0
  9. scitex/_dev/_dashboard/static/version-dashboard-favicon.svg +12 -0
  10. scitex/_dev/_ecosystem.py +109 -0
  11. scitex/_dev/_github.py +360 -0
  12. scitex/_dev/_mcp/__init__.py +11 -0
  13. scitex/_dev/_mcp/handlers.py +182 -0
  14. scitex/_dev/_rtd.py +122 -0
  15. scitex/_dev/_ssh.py +362 -0
  16. scitex/_dev/_versions.py +272 -0
  17. scitex/_mcp_tools/__init__.py +2 -0
  18. scitex/_mcp_tools/dev.py +186 -0
  19. scitex/audio/_audio_check.py +84 -41
  20. scitex/cli/capture.py +45 -22
  21. scitex/cli/dev.py +494 -0
  22. scitex/cli/main.py +2 -0
  23. scitex/cli/stats.py +48 -20
  24. scitex/cli/verify.py +33 -36
  25. scitex/plt/__init__.py +16 -6
  26. scitex/scholar/_mcp/crossref_handlers.py +45 -7
  27. scitex/scholar/_mcp/openalex_handlers.py +45 -7
  28. scitex/scholar/config/default.yaml +2 -0
  29. scitex/scholar/local_dbs/__init__.py +5 -1
  30. scitex/scholar/local_dbs/export.py +93 -0
  31. scitex/scholar/local_dbs/unified.py +505 -0
  32. scitex/scholar/metadata_engines/ScholarEngine.py +11 -0
  33. scitex/scholar/metadata_engines/individual/OpenAlexLocalEngine.py +346 -0
  34. scitex/scholar/metadata_engines/individual/__init__.py +1 -0
  35. scitex/template/__init__.py +18 -1
  36. scitex/template/clone_research_minimal.py +111 -0
  37. scitex/verify/README.md +0 -12
  38. scitex/verify/__init__.py +0 -4
  39. scitex/verify/_visualize.py +0 -4
  40. scitex/verify/_viz/__init__.py +0 -18
  41. {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/METADATA +2 -1
  42. {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/RECORD +45 -24
  43. scitex/verify/_viz/_plotly.py +0 -193
  44. {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/WHEEL +0 -0
  45. {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/entry_points.txt +0 -0
  46. {scitex-2.17.0.dist-info → scitex-2.17.4.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,346 @@
1
+ #!/usr/bin/env python3
2
+ # Timestamp: "2026-02-03"
3
+ # File: src/scitex/scholar/metadata_engines/individual/OpenAlexLocalEngine.py
4
+ # ----------------------------------------
5
+ from __future__ import annotations
6
+
7
+ import os
8
+
9
+ __FILE__ = __file__
10
+ __DIR__ = os.path.dirname(__FILE__)
11
+ # ----------------------------------------
12
+
13
+ import json
14
+ from typing import Dict, List, Optional, Union
15
+
16
+ from scitex import logging
17
+
18
+ from ..utils import standardize_metadata
19
+ from ._BaseDOIEngine import BaseDOIEngine
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ class OpenAlexLocalEngine(BaseDOIEngine):
25
+ """OpenAlex Local Engine using local FastAPI or external public API
26
+
27
+ Supports both:
28
+ - Internal API: http://openalex:31292 (Docker network)
29
+ - External API: https://scitex.ai/scholar/api/openalex (Public internet)
30
+
31
+ Automatically detects API format and adjusts endpoints accordingly.
32
+ """
33
+
34
+ def __init__(
35
+ self,
36
+ email: str = "research@example.com",
37
+ api_url: str = "http://127.0.0.1:31292",
38
+ ):
39
+ super().__init__(email)
40
+ self.api_url = api_url.rstrip("/")
41
+
42
+ # Detect API type: external (public) vs internal (Docker/local)
43
+ self._is_external_api = (
44
+ "/api/openalex" in self.api_url or "scitex.ai" in self.api_url
45
+ )
46
+
47
+ @property
48
+ def name(self) -> str:
49
+ return "OpenAlexLocal"
50
+
51
+ @property
52
+ def rate_limit_delay(self) -> float:
53
+ return 0.01
54
+
55
+ def _build_endpoint_url(self, endpoint: str) -> str:
56
+ """Build the correct endpoint URL based on API type
57
+
58
+ Args:
59
+ endpoint: Endpoint name (e.g., 'works', 'info')
60
+
61
+ Returns
62
+ -------
63
+ Full URL for the endpoint
64
+
65
+ Examples
66
+ --------
67
+ Internal: http://openalex:31292/works
68
+ External: https://scitex.ai/scholar/api/openalex/works
69
+ """
70
+ if self._is_external_api:
71
+ # External API: base URL already includes /scholar/api/openalex
72
+ return f"{self.api_url}/{endpoint}"
73
+ else:
74
+ # Internal API: direct endpoint
75
+ return f"{self.api_url}/{endpoint}"
76
+
77
+ def search(
78
+ self,
79
+ title: Optional[str] = None,
80
+ year: Optional[Union[int, str]] = None,
81
+ authors: Optional[List[str]] = None,
82
+ doi: Optional[str] = None,
83
+ max_results=1,
84
+ return_as: Optional[str] = "dict",
85
+ **kwargs,
86
+ ) -> Optional[Dict]:
87
+ """Search using local OpenAlex API with all parameters"""
88
+ if doi:
89
+ return self._search_by_doi(doi, return_as)
90
+ else:
91
+ return self._search_by_metadata(title, year, authors, max_results, return_as)
92
+
93
+ def _search_by_doi(self, doi: str, return_as: str) -> Optional[Dict]:
94
+ """Get work metadata by DOI"""
95
+ doi = doi.replace("https://doi.org/", "").replace("http://doi.org/", "")
96
+ url = self._build_endpoint_url(f"works/{doi}")
97
+
98
+ try:
99
+ assert return_as in [
100
+ "dict",
101
+ "json",
102
+ ], "return_as must be either 'dict' or 'json'"
103
+
104
+ response = self.session.get(url, timeout=10)
105
+ if response.status_code == 404:
106
+ return self._create_minimal_metadata(doi=doi, return_as=return_as)
107
+ response.raise_for_status()
108
+ data = response.json()
109
+
110
+ return self._extract_metadata_from_work(data, return_as)
111
+
112
+ except Exception as e:
113
+ if "Connection refused" in str(e) or "Max retries exceeded" in str(e):
114
+ logger.warning(
115
+ f"OpenAlex Local server not available at {self.api_url} (connection refused)"
116
+ )
117
+ else:
118
+ logger.warning(f"OpenAlex Local DOI search error: {e}")
119
+ return self._create_minimal_metadata(doi=doi, return_as=return_as)
120
+
121
+ def _search_by_metadata(
122
+ self,
123
+ title: Optional[str],
124
+ year: Optional[Union[int, str]],
125
+ authors: Optional[List[str]],
126
+ max_results: int,
127
+ return_as: str,
128
+ ) -> Optional[Dict]:
129
+ """Search by metadata (title, year, authors)"""
130
+ if not title:
131
+ return self._create_minimal_metadata(
132
+ title=title,
133
+ year=year,
134
+ authors=authors,
135
+ return_as=return_as,
136
+ )
137
+
138
+ # Build search query
139
+ query = title
140
+ if year:
141
+ query = f"{query} {year}"
142
+ if authors and isinstance(authors, list) and authors:
143
+ query = f"{query} {authors[0]}"
144
+
145
+ url = self._build_endpoint_url("works")
146
+ params = {"q": query, "limit": max(5, max_results)}
147
+
148
+ try:
149
+ assert return_as in [
150
+ "dict",
151
+ "json",
152
+ ], "return_as must be either 'dict' or 'json'"
153
+
154
+ response = self.session.get(url, params=params, timeout=10)
155
+ response.raise_for_status()
156
+ data = response.json()
157
+
158
+ results = data.get("results", [])
159
+ if not results:
160
+ return self._create_minimal_metadata(
161
+ title=title,
162
+ year=year,
163
+ authors=authors,
164
+ return_as=return_as,
165
+ )
166
+
167
+ # Find best matching result
168
+ for work in results:
169
+ work_title = work.get("title", "")
170
+ if work_title and work_title.endswith("."):
171
+ work_title = work_title[:-1]
172
+ if work_title and self._is_title_match(title, work_title):
173
+ return self._extract_metadata_from_work(work, return_as)
174
+
175
+ # Return first result if no exact match
176
+ return self._extract_metadata_from_work(results[0], return_as)
177
+
178
+ except Exception as e:
179
+ if "Connection refused" in str(e) or "Max retries exceeded" in str(e):
180
+ logger.warning(
181
+ f"OpenAlex Local server not available at {self.api_url} (connection refused)"
182
+ )
183
+ else:
184
+ logger.warning(f"OpenAlex Local search error: {e}")
185
+ return self._create_minimal_metadata(
186
+ title=title,
187
+ year=year,
188
+ authors=authors,
189
+ return_as=return_as,
190
+ )
191
+
192
+ def _extract_metadata_from_work(self, data: dict, return_as: str) -> Optional[Dict]:
193
+ """Extract metadata from OpenAlex work data"""
194
+ if not data:
195
+ return self._create_minimal_metadata(return_as=return_as)
196
+
197
+ # Extract title
198
+ title = data.get("title")
199
+ if title and title.endswith("."):
200
+ title = title[:-1]
201
+
202
+ # Extract DOI
203
+ doi = data.get("doi")
204
+ if doi:
205
+ doi = doi.replace("https://doi.org/", "").replace("http://doi.org/", "")
206
+
207
+ # Extract authors
208
+ authors = data.get("authors", [])
209
+ if not authors:
210
+ authors = []
211
+
212
+ # Extract publication info
213
+ year = data.get("year")
214
+ journal = data.get("source")
215
+ issn = data.get("issn")
216
+ volume = data.get("volume")
217
+ issue = data.get("issue")
218
+ pages = data.get("pages")
219
+ publisher = None # Not directly available in response
220
+
221
+ # Extract citation count
222
+ cited_by_count = data.get("cited_by_count")
223
+
224
+ # Extract concepts/keywords
225
+ concepts = data.get("concepts", [])
226
+ keywords = [c.get("name") for c in concepts if c.get("name")]
227
+
228
+ # Extract OA info
229
+ is_oa = data.get("is_oa", False)
230
+ oa_url = data.get("oa_url")
231
+
232
+ # Extract OpenAlex ID
233
+ openalex_id = data.get("openalex_id")
234
+
235
+ metadata = {
236
+ "id": {
237
+ "doi": doi if doi else None,
238
+ "doi_engines": [self.name] if doi else None,
239
+ "openalex_id": openalex_id if openalex_id else None,
240
+ "openalex_id_engines": [self.name] if openalex_id else None,
241
+ },
242
+ "basic": {
243
+ "title": title if title else None,
244
+ "title_engines": [self.name] if title else None,
245
+ "year": year if year else None,
246
+ "year_engines": [self.name] if year else None,
247
+ "authors": authors if authors else None,
248
+ "authors_engines": [self.name] if authors else None,
249
+ "keywords": keywords if keywords else None,
250
+ "keywords_engines": [self.name] if keywords else None,
251
+ },
252
+ "publication": {
253
+ "journal": journal if journal else None,
254
+ "journal_engines": [self.name] if journal else None,
255
+ "issn": issn if issn else None,
256
+ "issn_engines": [self.name] if issn else None,
257
+ "volume": volume if volume else None,
258
+ "volume_engines": [self.name] if volume else None,
259
+ "issue": issue if issue else None,
260
+ "issue_engines": [self.name] if issue else None,
261
+ "pages": pages if pages else None,
262
+ "pages_engines": [self.name] if pages else None,
263
+ },
264
+ "citation_count": {
265
+ "total": cited_by_count if cited_by_count else None,
266
+ "total_engines": [self.name] if cited_by_count else None,
267
+ },
268
+ "url": {
269
+ "doi": f"https://doi.org/{doi}" if doi else None,
270
+ "doi_engines": [self.name] if doi else None,
271
+ "pdf": oa_url if oa_url else None,
272
+ "pdf_engines": [self.name] if oa_url else None,
273
+ },
274
+ "open_access": {
275
+ "is_oa": is_oa,
276
+ "is_oa_engines": [self.name] if is_oa is not None else None,
277
+ "oa_url": oa_url if oa_url else None,
278
+ "oa_url_engines": [self.name] if oa_url else None,
279
+ },
280
+ "system": {
281
+ f"searched_by_{self.name}": True,
282
+ },
283
+ }
284
+
285
+ metadata = standardize_metadata(metadata)
286
+
287
+ if return_as == "dict":
288
+ return metadata
289
+ if return_as == "json":
290
+ return json.dumps(metadata, indent=2)
291
+
292
+
293
+ if __name__ == "__main__":
294
+
295
+ from scitex.scholar.metadata_engines.individual import OpenAlexLocalEngine
296
+
297
+ TITLE = "deep learning"
298
+ DOI = "10.1038/nature12373"
299
+
300
+ # Example 1: Internal API (Docker network or localhost)
301
+ print("\n" + "=" * 60)
302
+ print("INTERNAL API EXAMPLE")
303
+ print("=" * 60)
304
+ engine_internal = OpenAlexLocalEngine(
305
+ "test@example.com", api_url="http://openalex:31292"
306
+ )
307
+ print(f"API URL: {engine_internal.api_url}")
308
+ print(f"Is External: {engine_internal._is_external_api}")
309
+ print(f"Works endpoint: {engine_internal._build_endpoint_url('works')}")
310
+
311
+ # Example 2: External API (public internet)
312
+ print("\n" + "=" * 60)
313
+ print("EXTERNAL API EXAMPLE")
314
+ print("=" * 60)
315
+ engine_external = OpenAlexLocalEngine(
316
+ "test@example.com", api_url="https://scitex.ai/scholar/api/openalex"
317
+ )
318
+ print(f"API URL: {engine_external.api_url}")
319
+ print(f"Is External: {engine_external._is_external_api}")
320
+ print(f"Works endpoint: {engine_external._build_endpoint_url('works')}")
321
+
322
+ # Test search (use internal for demo)
323
+ print("\n" + "=" * 60)
324
+ print("SEARCH TEST")
325
+ print("=" * 60)
326
+ engine = OpenAlexLocalEngine("test@example.com")
327
+ result = engine.search(doi=DOI)
328
+ if result:
329
+ print(f"Title: {result.get('basic', {}).get('title')}")
330
+ print(f"DOI: {result.get('id', {}).get('doi')}")
331
+ print(f"Year: {result.get('basic', {}).get('year')}")
332
+ else:
333
+ print("No results found")
334
+
335
+
336
+ # Usage examples:
337
+ #
338
+ # Internal API (from NAS Docker):
339
+ # export SCITEX_SCHOLAR_OPENALEX_API_URL=http://openalex:31292
340
+ # python -m scitex.scholar.metadata_engines.individual.OpenAlexLocalEngine
341
+ #
342
+ # External API (from anywhere):
343
+ # export SCITEX_SCHOLAR_OPENALEX_API_URL=https://scitex.ai/scholar/api/openalex
344
+ # python -m scitex.scholar.metadata_engines.individual.OpenAlexLocalEngine
345
+
346
+ # EOF
@@ -2,6 +2,7 @@ from .ArXivEngine import ArXivEngine
2
2
  from .CrossRefEngine import CrossRefEngine
3
3
  from .CrossRefLocalEngine import CrossRefLocalEngine
4
4
  from .OpenAlexEngine import OpenAlexEngine
5
+ from .OpenAlexLocalEngine import OpenAlexLocalEngine
5
6
  from .PubMedEngine import PubMedEngine
6
7
  from .SemanticScholarEngine import SemanticScholarEngine
7
8
  from .URLDOIEngine import URLDOIEngine
@@ -22,6 +22,7 @@ from .clone_pip_project import TEMPLATE_REPO_URL as PIP_PROJECT_URL
22
22
  from .clone_pip_project import clone_pip_project
23
23
  from .clone_research import TEMPLATE_REPO_URL as RESEARCH_URL
24
24
  from .clone_research import clone_research
25
+ from .clone_research_minimal import clone_research_minimal
25
26
  from .clone_singularity import TEMPLATE_REPO_URL as SINGULARITY_URL
26
27
  from .clone_singularity import clone_singularity
27
28
  from .clone_writer_directory import (
@@ -54,9 +55,24 @@ def get_available_templates_info():
54
55
  ... print(f"{template['name']}: {template['description']}")
55
56
  """
56
57
  return [
58
+ {
59
+ "id": "research_minimal",
60
+ "name": "Research Minimal",
61
+ "description": "Minimal SciTeX structure with writer, scholar, visualizer, and console",
62
+ "github_url": RESEARCH_URL,
63
+ "branch": "minimal",
64
+ "use_case": "Focused research workflow with essential SciTeX modules only",
65
+ "features": [
66
+ "scitex/writer/ - LaTeX manuscript writing",
67
+ "scitex/scholar/ - Bibliography management",
68
+ "scitex/visualizer/ - Figure creation",
69
+ "scitex/console/ - Code execution",
70
+ "scitex/management/ - Project management",
71
+ ],
72
+ },
57
73
  {
58
74
  "id": "research",
59
- "name": "Research Project",
75
+ "name": "Research Project (Full)",
60
76
  "description": "Full scientific workflow structure for research projects",
61
77
  "github_url": RESEARCH_URL,
62
78
  "use_case": "Scientific research with data analysis, experiments, and paper writing",
@@ -113,6 +129,7 @@ def get_available_templates_info():
113
129
 
114
130
  __all__ = [
115
131
  "clone_research",
132
+ "clone_research_minimal",
116
133
  "clone_pip_project",
117
134
  "clone_singularity",
118
135
  "clone_writer_directory",
@@ -0,0 +1,111 @@
1
+ #!/usr/bin/env python3
2
+ # File: /home/ywatanabe/proj/scitex-code/src/scitex/template/clone_research_minimal.py
3
+ # ----------------------------------------
4
+ from __future__ import annotations
5
+
6
+ import os
7
+
8
+ __FILE__ = "./src/scitex/template/clone_research_minimal.py"
9
+ __DIR__ = os.path.dirname(__FILE__)
10
+ # ----------------------------------------
11
+
12
+ """
13
+ Create a new minimal research project from the scitex_template_research template.
14
+
15
+ Uses the 'minimal' branch which contains only the scitex/ directory with:
16
+ - writer/ - LaTeX manuscript writing
17
+ - scholar/ - Bibliography management
18
+ - visualizer/ - Figure creation
19
+ - console/ - Code execution
20
+ - management/ - Project management
21
+ """
22
+
23
+ import sys
24
+ from typing import Optional
25
+
26
+ from ._clone_project import clone_project
27
+
28
+ TEMPLATE_REPO_URL = "https://github.com/ywatanabe1989/scitex-minimal-template.git"
29
+
30
+
31
+ def clone_research_minimal(
32
+ project_dir: str,
33
+ git_strategy: Optional[str] = "child",
34
+ branch: Optional[str] = None,
35
+ tag: Optional[str] = None,
36
+ ) -> bool:
37
+ """
38
+ Create a new minimal research project from the scitex-minimal-template.
39
+
40
+ This template contains only the essential scitex/ directory structure:
41
+ - writer/ - Full LaTeX manuscript writing with compilation scripts
42
+ - scholar/ - Bibliography management
43
+ - visualizer/ - Figure creation
44
+ - console/ - Code execution
45
+
46
+ Parameters
47
+ ----------
48
+ project_dir : str
49
+ Path to project directory (will be created). Can be a simple name like "my_project"
50
+ or a full path like "./projects/my_project"
51
+ git_strategy : str, optional
52
+ Git initialization strategy ('child', 'parent', None). Default is 'child'.
53
+ branch : str, optional
54
+ Specific branch of the template repository to clone.
55
+ tag : str, optional
56
+ Specific tag/release of the template repository to clone.
57
+
58
+ Returns
59
+ -------
60
+ bool
61
+ True if successful, False otherwise
62
+
63
+ Example
64
+ -------
65
+ >>> from scitex.template import clone_research_minimal
66
+ >>> clone_research_minimal("my_research_project")
67
+ >>> clone_research_minimal("./projects/my_project")
68
+ """
69
+ return clone_project(
70
+ project_dir,
71
+ TEMPLATE_REPO_URL,
72
+ "scitex-minimal-template",
73
+ git_strategy,
74
+ branch=branch,
75
+ tag=tag,
76
+ )
77
+
78
+
79
+ def main(args: list = None) -> None:
80
+ """
81
+ Command-line interface for clone_research_minimal.
82
+
83
+ Parameters
84
+ ----------
85
+ args : list, optional
86
+ Command-line arguments. If None, uses sys.argv[1:]
87
+ """
88
+ if args is None:
89
+ args = sys.argv[1:]
90
+
91
+ if len(args) < 1:
92
+ print("Usage: python -m scitex clone_research_minimal <project-dir>")
93
+ print("")
94
+ print("Arguments:")
95
+ print(" project-dir Path to project directory (will be created)")
96
+ print(" Can be a simple name like 'my_project' or a full path")
97
+ print("")
98
+ print("Example:")
99
+ print(" python -m scitex clone_research_minimal my_research_project")
100
+ sys.exit(1)
101
+
102
+ project_dir = args[0]
103
+
104
+ success = clone_research_minimal(project_dir)
105
+ sys.exit(0 if success else 1)
106
+
107
+
108
+ if __name__ == "__main__":
109
+ main()
110
+
111
+ # EOF
scitex/verify/README.md CHANGED
@@ -34,7 +34,6 @@ scitex/verify/
34
34
  ├── _visualize.py # Re-exports from _viz/
35
35
  └── _viz/
36
36
  ├── _mermaid.py # Mermaid DAG generation
37
- ├── _plotly.py # Interactive Plotly DAGs
38
37
  ├── _json.py # JSON DAG export
39
38
  ├── _format.py # Terminal output formatting
40
39
  ├── _colors.py # Color constants
@@ -199,17 +198,6 @@ render_dag("dag.mmd", target_file="output.csv") # Mermaid code
199
198
  render_dag("dag.json", target_file="output.csv") # Graph structure
200
199
  ```
201
200
 
202
- ### Interactive Plotly
203
-
204
- ```python
205
- from scitex.verify import generate_plotly_dag, render_plotly_dag
206
-
207
- fig = generate_plotly_dag(target_file="output.csv")
208
- fig.show() # Opens browser
209
-
210
- render_plotly_dag("dag_plotly.html", target_file="output.csv")
211
- ```
212
-
213
201
  ## Integration Hooks (`_integration.py`)
214
202
 
215
203
  Automatically called by `@stx.session` and `stx.io`:
scitex/verify/__init__.py CHANGED
@@ -109,10 +109,8 @@ from ._visualize import (
109
109
  format_status,
110
110
  generate_html_dag,
111
111
  generate_mermaid_dag,
112
- generate_plotly_dag,
113
112
  print_verification_summary,
114
113
  render_dag,
115
- render_plotly_dag,
116
114
  )
117
115
 
118
116
 
@@ -191,9 +189,7 @@ __all__ = [
191
189
  "format_list",
192
190
  "generate_mermaid_dag",
193
191
  "generate_html_dag",
194
- "generate_plotly_dag",
195
192
  "render_dag",
196
- "render_plotly_dag",
197
193
  "print_verification_summary",
198
194
  # Convenience functions
199
195
  "list_runs",
@@ -22,10 +22,8 @@ from ._viz import (
22
22
  format_status,
23
23
  generate_html_dag,
24
24
  generate_mermaid_dag,
25
- generate_plotly_dag,
26
25
  print_verification_summary,
27
26
  render_dag,
28
- render_plotly_dag,
29
27
  )
30
28
 
31
29
  __all__ = [
@@ -38,9 +36,7 @@ __all__ = [
38
36
  "format_list",
39
37
  "generate_mermaid_dag",
40
38
  "generate_html_dag",
41
- "generate_plotly_dag",
42
39
  "render_dag",
43
- "render_plotly_dag",
44
40
  "print_verification_summary",
45
41
  ]
46
42
 
@@ -7,7 +7,6 @@ Provides multiple visualization backends:
7
7
  - Terminal: Colored text output with status icons
8
8
  - Mermaid: Text-based diagrams for docs/GitHub
9
9
  - HTML: Interactive web visualization
10
- - Plotly: Interactive Python-based visualization (optional)
11
10
  """
12
11
 
13
12
  from ._colors import Colors, VerificationLevel
@@ -21,21 +20,6 @@ from ._format import (
21
20
  from ._mermaid import generate_html_dag, generate_mermaid_dag, render_dag
22
21
  from ._utils import print_verification_summary
23
22
 
24
- # Optional Plotly support
25
- try:
26
- from ._plotly import generate_plotly_dag, render_plotly_dag
27
-
28
- _HAS_PLOTLY = True
29
- except ImportError:
30
- _HAS_PLOTLY = False
31
-
32
- def generate_plotly_dag(*args, **kwargs):
33
- raise ImportError("plotly required: pip install plotly")
34
-
35
- def render_plotly_dag(*args, **kwargs):
36
- raise ImportError("plotly required: pip install plotly")
37
-
38
-
39
23
  __all__ = [
40
24
  "Colors",
41
25
  "VerificationLevel",
@@ -48,8 +32,6 @@ __all__ = [
48
32
  "generate_html_dag",
49
33
  "render_dag",
50
34
  "print_verification_summary",
51
- "generate_plotly_dag",
52
- "render_plotly_dag",
53
35
  ]
54
36
 
55
37
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: scitex
3
- Version: 2.17.0
3
+ Version: 2.17.4
4
4
  Summary: A comprehensive Python library for scientific computing and data analysis
5
5
  Project-URL: Homepage, https://github.com/ywatanabe1989/scitex-python
6
6
  Project-URL: Documentation, https://scitex.readthedocs.io
@@ -442,6 +442,7 @@ Requires-Dist: matplotlib; extra == 'utils'
442
442
  Requires-Dist: natsort; extra == 'utils'
443
443
  Requires-Dist: tqdm; extra == 'utils'
444
444
  Requires-Dist: xarray; extra == 'utils'
445
+ Provides-Extra: verify
445
446
  Provides-Extra: web
446
447
  Requires-Dist: aiohttp; extra == 'web'
447
448
  Requires-Dist: anthropic; extra == 'web'