tooluniverse 1.0.10__py3-none-any.whl → 1.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tooluniverse might be problematic. Click here for more details.

Files changed (150) hide show
  1. tooluniverse/__init__.py +57 -1
  2. tooluniverse/blast_tool.py +132 -0
  3. tooluniverse/boltz_tool.py +2 -2
  4. tooluniverse/cbioportal_tool.py +42 -0
  5. tooluniverse/clinvar_tool.py +268 -74
  6. tooluniverse/compose_scripts/tool_discover.py +1941 -443
  7. tooluniverse/data/agentic_tools.json +0 -370
  8. tooluniverse/data/alphafold_tools.json +6 -6
  9. tooluniverse/data/blast_tools.json +112 -0
  10. tooluniverse/data/cbioportal_tools.json +87 -0
  11. tooluniverse/data/clinvar_tools.json +235 -0
  12. tooluniverse/data/compose_tools.json +0 -89
  13. tooluniverse/data/dbsnp_tools.json +275 -0
  14. tooluniverse/data/emdb_tools.json +61 -0
  15. tooluniverse/data/ensembl_tools.json +259 -0
  16. tooluniverse/data/file_download_tools.json +275 -0
  17. tooluniverse/data/geo_tools.json +200 -48
  18. tooluniverse/data/gnomad_tools.json +109 -0
  19. tooluniverse/data/gtopdb_tools.json +68 -0
  20. tooluniverse/data/gwas_tools.json +32 -0
  21. tooluniverse/data/interpro_tools.json +199 -0
  22. tooluniverse/data/jaspar_tools.json +70 -0
  23. tooluniverse/data/kegg_tools.json +356 -0
  24. tooluniverse/data/mpd_tools.json +87 -0
  25. tooluniverse/data/ols_tools.json +314 -0
  26. tooluniverse/data/package_discovery_tools.json +64 -0
  27. tooluniverse/data/packages/categorized_tools.txt +0 -1
  28. tooluniverse/data/packages/machine_learning_tools.json +0 -47
  29. tooluniverse/data/paleobiology_tools.json +91 -0
  30. tooluniverse/data/pride_tools.json +62 -0
  31. tooluniverse/data/pypi_package_inspector_tools.json +158 -0
  32. tooluniverse/data/python_executor_tools.json +341 -0
  33. tooluniverse/data/regulomedb_tools.json +50 -0
  34. tooluniverse/data/remap_tools.json +89 -0
  35. tooluniverse/data/screen_tools.json +89 -0
  36. tooluniverse/data/tool_discovery_agents.json +428 -0
  37. tooluniverse/data/tool_discovery_agents.json.backup +1343 -0
  38. tooluniverse/data/uniprot_tools.json +77 -0
  39. tooluniverse/data/web_search_tools.json +250 -0
  40. tooluniverse/data/worms_tools.json +55 -0
  41. tooluniverse/dbsnp_tool.py +196 -58
  42. tooluniverse/default_config.py +35 -2
  43. tooluniverse/emdb_tool.py +30 -0
  44. tooluniverse/ensembl_tool.py +140 -47
  45. tooluniverse/execute_function.py +74 -14
  46. tooluniverse/file_download_tool.py +269 -0
  47. tooluniverse/geo_tool.py +81 -28
  48. tooluniverse/gnomad_tool.py +100 -52
  49. tooluniverse/gtopdb_tool.py +41 -0
  50. tooluniverse/interpro_tool.py +72 -0
  51. tooluniverse/jaspar_tool.py +30 -0
  52. tooluniverse/kegg_tool.py +230 -0
  53. tooluniverse/mpd_tool.py +42 -0
  54. tooluniverse/ncbi_eutils_tool.py +96 -0
  55. tooluniverse/ols_tool.py +435 -0
  56. tooluniverse/package_discovery_tool.py +217 -0
  57. tooluniverse/paleobiology_tool.py +30 -0
  58. tooluniverse/pride_tool.py +30 -0
  59. tooluniverse/pypi_package_inspector_tool.py +593 -0
  60. tooluniverse/python_executor_tool.py +711 -0
  61. tooluniverse/regulomedb_tool.py +30 -0
  62. tooluniverse/remap_tool.py +44 -0
  63. tooluniverse/remote/depmap_24q2/depmap_24q2_mcp_tool.py +1 -1
  64. tooluniverse/screen_tool.py +44 -0
  65. tooluniverse/smcp_server.py +3 -3
  66. tooluniverse/tool_finder_embedding.py +3 -1
  67. tooluniverse/tool_finder_keyword.py +3 -1
  68. tooluniverse/tool_finder_llm.py +6 -2
  69. tooluniverse/tools/{UCSC_get_genes_by_region.py → BLAST_nucleotide_search.py} +22 -26
  70. tooluniverse/tools/BLAST_protein_search.py +63 -0
  71. tooluniverse/tools/ClinVar_search_variants.py +26 -15
  72. tooluniverse/tools/CodeQualityAnalyzer.py +3 -3
  73. tooluniverse/tools/EMDB_get_structure.py +46 -0
  74. tooluniverse/tools/GtoPdb_get_targets.py +52 -0
  75. tooluniverse/tools/InterPro_get_domain_details.py +46 -0
  76. tooluniverse/tools/InterPro_get_protein_domains.py +49 -0
  77. tooluniverse/tools/InterPro_search_domains.py +52 -0
  78. tooluniverse/tools/JASPAR_get_transcription_factors.py +52 -0
  79. tooluniverse/tools/MPD_get_phenotype_data.py +59 -0
  80. tooluniverse/tools/PRIDE_search_proteomics.py +52 -0
  81. tooluniverse/tools/PackageAnalyzer.py +55 -0
  82. tooluniverse/tools/Paleobiology_get_fossils.py +52 -0
  83. tooluniverse/tools/PyPIPackageInspector.py +59 -0
  84. tooluniverse/tools/ReMap_get_transcription_factor_binding.py +59 -0
  85. tooluniverse/tools/ReferenceInfoAnalyzer.py +55 -0
  86. tooluniverse/tools/RegulomeDB_query_variant.py +46 -0
  87. tooluniverse/tools/SCREEN_get_regulatory_elements.py +59 -0
  88. tooluniverse/tools/{ArgumentDescriptionOptimizer.py → TestResultsAnalyzer.py} +13 -13
  89. tooluniverse/tools/ToolDiscover.py +11 -11
  90. tooluniverse/tools/UniProt_id_mapping.py +63 -0
  91. tooluniverse/tools/UniProt_search.py +63 -0
  92. tooluniverse/tools/UnifiedToolGenerator.py +59 -0
  93. tooluniverse/tools/WoRMS_search_species.py +49 -0
  94. tooluniverse/tools/XMLToolOptimizer.py +55 -0
  95. tooluniverse/tools/__init__.py +119 -29
  96. tooluniverse/tools/alphafold_get_annotations.py +3 -3
  97. tooluniverse/tools/alphafold_get_prediction.py +3 -3
  98. tooluniverse/tools/alphafold_get_summary.py +3 -3
  99. tooluniverse/tools/cBioPortal_get_cancer_studies.py +46 -0
  100. tooluniverse/tools/cBioPortal_get_mutations.py +52 -0
  101. tooluniverse/tools/{gnomAD_query_variant.py → clinvar_get_clinical_significance.py} +8 -11
  102. tooluniverse/tools/clinvar_get_variant_details.py +49 -0
  103. tooluniverse/tools/dbSNP_get_variant_by_rsid.py +7 -7
  104. tooluniverse/tools/dbsnp_get_frequencies.py +46 -0
  105. tooluniverse/tools/dbsnp_search_by_gene.py +52 -0
  106. tooluniverse/tools/download_binary_file.py +66 -0
  107. tooluniverse/tools/download_file.py +71 -0
  108. tooluniverse/tools/download_text_content.py +55 -0
  109. tooluniverse/tools/dynamic_package_discovery.py +59 -0
  110. tooluniverse/tools/ensembl_get_sequence.py +52 -0
  111. tooluniverse/tools/{Ensembl_lookup_gene_by_symbol.py → ensembl_get_variants.py} +11 -11
  112. tooluniverse/tools/ensembl_lookup_gene.py +46 -0
  113. tooluniverse/tools/geo_get_dataset_info.py +46 -0
  114. tooluniverse/tools/geo_get_sample_info.py +46 -0
  115. tooluniverse/tools/geo_search_datasets.py +67 -0
  116. tooluniverse/tools/gnomad_get_gene_constraints.py +49 -0
  117. tooluniverse/tools/kegg_find_genes.py +52 -0
  118. tooluniverse/tools/kegg_get_gene_info.py +46 -0
  119. tooluniverse/tools/kegg_get_pathway_info.py +46 -0
  120. tooluniverse/tools/kegg_list_organisms.py +44 -0
  121. tooluniverse/tools/kegg_search_pathway.py +46 -0
  122. tooluniverse/tools/ols_find_similar_terms.py +63 -0
  123. tooluniverse/tools/{get_hyperopt_info.py → ols_get_ontology_info.py} +13 -10
  124. tooluniverse/tools/ols_get_term_ancestors.py +67 -0
  125. tooluniverse/tools/ols_get_term_children.py +67 -0
  126. tooluniverse/tools/{TestCaseGenerator.py → ols_get_term_info.py} +12 -9
  127. tooluniverse/tools/{CodeOptimizer.py → ols_search_ontologies.py} +22 -14
  128. tooluniverse/tools/ols_search_terms.py +71 -0
  129. tooluniverse/tools/python_code_executor.py +79 -0
  130. tooluniverse/tools/python_script_runner.py +79 -0
  131. tooluniverse/tools/web_api_documentation_search.py +63 -0
  132. tooluniverse/tools/web_search.py +71 -0
  133. tooluniverse/uniprot_tool.py +219 -16
  134. tooluniverse/url_tool.py +18 -0
  135. tooluniverse/utils.py +2 -2
  136. tooluniverse/web_search_tool.py +229 -0
  137. tooluniverse/worms_tool.py +64 -0
  138. {tooluniverse-1.0.10.dist-info → tooluniverse-1.0.11.dist-info}/METADATA +3 -2
  139. {tooluniverse-1.0.10.dist-info → tooluniverse-1.0.11.dist-info}/RECORD +143 -54
  140. tooluniverse/data/genomics_tools.json +0 -174
  141. tooluniverse/tools/ToolDescriptionOptimizer.py +0 -67
  142. tooluniverse/tools/ToolImplementationGenerator.py +0 -67
  143. tooluniverse/tools/ToolOptimizer.py +0 -59
  144. tooluniverse/tools/ToolSpecificationGenerator.py +0 -67
  145. tooluniverse/tools/ToolSpecificationOptimizer.py +0 -63
  146. tooluniverse/ucsc_tool.py +0 -60
  147. {tooluniverse-1.0.10.dist-info → tooluniverse-1.0.11.dist-info}/WHEEL +0 -0
  148. {tooluniverse-1.0.10.dist-info → tooluniverse-1.0.11.dist-info}/entry_points.txt +0 -0
  149. {tooluniverse-1.0.10.dist-info → tooluniverse-1.0.11.dist-info}/licenses/LICENSE +0 -0
  150. {tooluniverse-1.0.10.dist-info → tooluniverse-1.0.11.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,30 @@
1
+ import requests
2
+ from typing import Any, Dict
3
+ from .base_tool import BaseTool
4
+ from .tool_registry import register_tool
5
+
6
+
7
+ @register_tool("PRIDERESTTool")
8
+ class PRIDERESTTool(BaseTool):
9
+ def __init__(self, tool_config: Dict):
10
+ super().__init__(tool_config)
11
+ self.base_url = "https://www.ebi.ac.uk/pride/ws/archive/v2"
12
+ self.session = requests.Session()
13
+ self.session.headers.update({"Accept": "application/json"})
14
+ self.timeout = 30
15
+
16
+ def _build_url(self, args: Dict[str, Any]) -> str:
17
+ url = self.tool_config["fields"]["endpoint"]
18
+ for k, v in args.items():
19
+ url = url.replace(f"{{{k}}}", str(v))
20
+ return url
21
+
22
+ def run(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
23
+ try:
24
+ url = self._build_url(arguments)
25
+ response = self.session.get(url, timeout=self.timeout)
26
+ response.raise_for_status()
27
+ data = response.json()
28
+ return {"status": "success", "data": data, "url": url}
29
+ except Exception as e:
30
+ return {"status": "error", "error": f"PRIDE API error: {str(e)}"}
@@ -0,0 +1,593 @@
1
+ """PyPI Package Inspector - Comprehensive package information extraction"""
2
+
3
+ import requests
4
+ import time
5
+ from datetime import datetime, timedelta
6
+ from typing import Dict, Any
7
+ from .tool_registry import register_tool
8
+
9
+
10
+ @register_tool(
11
+ "PyPIPackageInspector",
12
+ config={
13
+ "name": "PyPIPackageInspector",
14
+ "type": "PyPIPackageInspector",
15
+ "description": (
16
+ "Extracts comprehensive package information from PyPI and GitHub "
17
+ "for quality evaluation. Provides detailed metrics on popularity, "
18
+ "maintenance, security, and compatibility."
19
+ ),
20
+ "parameter": {
21
+ "type": "object",
22
+ "properties": {
23
+ "package_name": {
24
+ "type": "string",
25
+ "description": "Name of the Python package to inspect",
26
+ },
27
+ "include_github": {
28
+ "type": "boolean",
29
+ "description": "Whether to fetch GitHub statistics",
30
+ "default": True,
31
+ },
32
+ "include_downloads": {
33
+ "type": "boolean",
34
+ "description": "Whether to fetch download statistics",
35
+ "default": True,
36
+ },
37
+ },
38
+ "required": ["package_name"],
39
+ },
40
+ },
41
+ )
42
+ class PyPIPackageInspector:
43
+ """
44
+ Extracts comprehensive package information from PyPI and GitHub.
45
+ Provides detailed metrics on popularity, maintenance, security,
46
+ and compatibility.
47
+ """
48
+
49
+ def __init__(self, tool_config: Dict[str, Any] = None):
50
+ self.tool_config = tool_config or {}
51
+ self.pypi_api_url = "https://pypi.org/pypi/{package}/json"
52
+ self.pypistats_api_url = "https://pypistats.org/api/packages/{package}/recent"
53
+ self.github_api_url = "https://api.github.com/repos/{owner}/{repo}"
54
+ self.session = requests.Session()
55
+ self.session.headers.update(
56
+ {
57
+ "User-Agent": "ToolUniverse-PyPIInspector/1.0",
58
+ "Accept": "application/vnd.github.v3+json",
59
+ }
60
+ )
61
+
62
+ # GitHub token if available
63
+ github_token = self.tool_config.get("github_token")
64
+ if github_token:
65
+ self.session.headers["Authorization"] = f"token {github_token}"
66
+
67
+ def _get_pypi_metadata(self, package_name: str) -> Dict[str, Any]:
68
+ """Fetch comprehensive metadata from PyPI"""
69
+ try:
70
+ response = self.session.get(
71
+ self.pypi_api_url.format(package=package_name), timeout=10
72
+ )
73
+
74
+ if response.status_code == 404:
75
+ return {"error": "Package not found on PyPI"}
76
+
77
+ response.raise_for_status()
78
+ data = response.json()
79
+
80
+ info = data.get("info", {})
81
+ releases = data.get("releases", {})
82
+ urls = data.get("urls", [])
83
+
84
+ # Parse project URLs to find GitHub repo
85
+ project_urls = info.get("project_urls", {})
86
+ github_url = None
87
+ for _key, url in project_urls.items():
88
+ if url and "github.com" in url.lower():
89
+ github_url = url
90
+ break
91
+
92
+ # If no project URLs, check home_page
93
+ if not github_url and info.get("home_page"):
94
+ if "github.com" in info.get("home_page", "").lower():
95
+ github_url = info["home_page"]
96
+
97
+ # Get release history
98
+ release_dates = []
99
+ for _version, files in releases.items():
100
+ if files:
101
+ upload_time = files[0].get("upload_time_iso_8601")
102
+ if upload_time:
103
+ try:
104
+ date = datetime.fromisoformat(
105
+ upload_time.replace("Z", "+00:00")
106
+ )
107
+ release_dates.append(date)
108
+ except (ValueError, AttributeError):
109
+ pass
110
+
111
+ release_dates.sort(reverse=True)
112
+
113
+ # Calculate maintenance metrics
114
+ latest_release = release_dates[0] if release_dates else None
115
+ days_since_last_release = None
116
+ if latest_release:
117
+ time_diff = datetime.now(latest_release.tzinfo) - latest_release
118
+ days_since_last_release = time_diff.days
119
+
120
+ # Count recent releases (last year)
121
+ one_year_ago = datetime.now() - timedelta(days=365)
122
+ recent_releases = sum(
123
+ 1 for date in release_dates if date.replace(tzinfo=None) > one_year_ago
124
+ )
125
+
126
+ return {
127
+ "name": info.get("name"),
128
+ "version": info.get("version"),
129
+ "summary": info.get("summary", ""),
130
+ "description_length": len(info.get("description", "")),
131
+ "author": info.get("author", ""),
132
+ "author_email": info.get("author_email", ""),
133
+ "maintainer": info.get("maintainer", ""),
134
+ "license": info.get("license", ""),
135
+ "requires_python": info.get("requires_python", ""),
136
+ "requires_dist": info.get("requires_dist", []),
137
+ "classifiers": info.get("classifiers", []),
138
+ "keywords": info.get("keywords", ""),
139
+ "project_urls": project_urls,
140
+ "github_url": github_url,
141
+ "home_page": info.get("home_page", ""),
142
+ "package_url": info.get("package_url", ""),
143
+ "release_url": info.get("release_url", ""),
144
+ "docs_url": info.get("docs_url", ""),
145
+ # Release metrics
146
+ "total_releases": len(releases),
147
+ "latest_release_date": (
148
+ latest_release.isoformat() if latest_release else None
149
+ ),
150
+ "days_since_last_release": days_since_last_release,
151
+ "releases_last_year": recent_releases,
152
+ # File metrics
153
+ "has_wheel": any(
154
+ url.get("packagetype") == "bdist_wheel" for url in urls
155
+ ),
156
+ "has_source": any(url.get("packagetype") == "sdist" for url in urls),
157
+ }
158
+
159
+ except requests.exceptions.RequestException as e:
160
+ return {"error": f"PyPI API error: {str(e)}"}
161
+ except Exception as e:
162
+ return {"error": f"Unexpected error: {str(e)}"}
163
+
164
+ def _get_download_stats(self, package_name: str) -> Dict[str, Any]:
165
+ """Fetch download statistics from pypistats.org"""
166
+ try:
167
+ response = self.session.get(
168
+ self.pypistats_api_url.format(package=package_name), timeout=10
169
+ )
170
+
171
+ if response.status_code == 200:
172
+ data = response.json()
173
+ return {
174
+ "downloads_last_day": (data.get("data", {}).get("last_day", 0)),
175
+ "downloads_last_week": (data.get("data", {}).get("last_week", 0)),
176
+ "downloads_last_month": (data.get("data", {}).get("last_month", 0)),
177
+ }
178
+ else:
179
+ return {
180
+ "downloads_last_day": 0,
181
+ "downloads_last_week": 0,
182
+ "downloads_last_month": 0,
183
+ "note": "Download stats unavailable",
184
+ }
185
+
186
+ except Exception as e:
187
+ return {
188
+ "downloads_last_day": 0,
189
+ "downloads_last_week": 0,
190
+ "downloads_last_month": 0,
191
+ "error": str(e),
192
+ }
193
+
194
+ def _get_github_stats(self, github_url: str) -> Dict[str, Any]:
195
+ """Fetch repository statistics from GitHub"""
196
+ try:
197
+ # Parse owner and repo from URL
198
+ # Expected format: https://github.com/owner/repo
199
+ parts = github_url.rstrip("/").split("/")
200
+ if len(parts) < 2:
201
+ return {"error": "Invalid GitHub URL format"}
202
+
203
+ repo_name = parts[-1]
204
+ owner = parts[-2]
205
+
206
+ # Remove .git suffix if present
207
+ if repo_name.endswith(".git"):
208
+ repo_name = repo_name[:-4]
209
+
210
+ response = self.session.get(
211
+ self.github_api_url.format(owner=owner, repo=repo_name), timeout=10
212
+ )
213
+
214
+ if response.status_code == 404:
215
+ return {"error": "GitHub repository not found"}
216
+
217
+ response.raise_for_status()
218
+ data = response.json()
219
+
220
+ # Calculate activity metrics
221
+ pushed_at = data.get("pushed_at")
222
+ days_since_last_push = None
223
+ if pushed_at:
224
+ try:
225
+ last_push = datetime.fromisoformat(pushed_at.replace("Z", "+00:00"))
226
+ time_diff = datetime.now(last_push.tzinfo) - last_push
227
+ days_since_last_push = time_diff.days
228
+ except (ValueError, AttributeError):
229
+ pass
230
+
231
+ return {
232
+ "stars": data.get("stargazers_count", 0),
233
+ "watchers": data.get("subscribers_count", 0),
234
+ "forks": data.get("forks_count", 0),
235
+ "open_issues": data.get("open_issues_count", 0),
236
+ "created_at": data.get("created_at"),
237
+ "updated_at": data.get("updated_at"),
238
+ "pushed_at": pushed_at,
239
+ "days_since_last_push": days_since_last_push,
240
+ "default_branch": data.get("default_branch", "main"),
241
+ "language": data.get("language"),
242
+ "has_issues": data.get("has_issues", False),
243
+ "has_wiki": data.get("has_wiki", False),
244
+ "has_pages": data.get("has_pages", False),
245
+ "archived": data.get("archived", False),
246
+ "disabled": data.get("disabled", False),
247
+ "license": (
248
+ data.get("license", {}).get("name") if data.get("license") else None
249
+ ),
250
+ "topics": data.get("topics", []),
251
+ "description": data.get("description", ""),
252
+ }
253
+
254
+ except requests.exceptions.RequestException as e:
255
+ return {"error": f"GitHub API error: {str(e)}"}
256
+ except Exception as e:
257
+ return {"error": f"Unexpected error: {str(e)}"}
258
+
259
+ def _calculate_quality_scores(
260
+ self, pypi_data: Dict, downloads: Dict, github_data: Dict
261
+ ) -> Dict[str, Any]:
262
+ """Calculate quality scores based on collected metrics"""
263
+
264
+ scores = {
265
+ "popularity_score": 0,
266
+ "maintenance_score": 0,
267
+ "documentation_score": 0,
268
+ "compatibility_score": 0,
269
+ "security_score": 0,
270
+ "overall_score": 0,
271
+ }
272
+
273
+ # === POPULARITY SCORE ===
274
+ popularity = 0
275
+
276
+ # Downloads (40 points max)
277
+ downloads_last_month = downloads.get("downloads_last_month", 0)
278
+ if downloads_last_month > 1000000:
279
+ popularity += 40
280
+ elif downloads_last_month > 100000:
281
+ popularity += 30
282
+ elif downloads_last_month > 10000:
283
+ popularity += 20
284
+ elif downloads_last_month > 1000:
285
+ popularity += 10
286
+ elif downloads_last_month > 100:
287
+ popularity += 5
288
+
289
+ # GitHub stars (30 points max)
290
+ stars = github_data.get("stars", 0)
291
+ if stars > 10000:
292
+ popularity += 30
293
+ elif stars > 5000:
294
+ popularity += 25
295
+ elif stars > 1000:
296
+ popularity += 20
297
+ elif stars > 500:
298
+ popularity += 15
299
+ elif stars > 100:
300
+ popularity += 10
301
+ elif stars > 10:
302
+ popularity += 5
303
+
304
+ # Forks (15 points max)
305
+ forks = github_data.get("forks", 0)
306
+ if forks > 1000:
307
+ popularity += 15
308
+ elif forks > 500:
309
+ popularity += 12
310
+ elif forks > 100:
311
+ popularity += 9
312
+ elif forks > 50:
313
+ popularity += 6
314
+ elif forks > 10:
315
+ popularity += 3
316
+
317
+ # Total releases (15 points max)
318
+ total_releases = pypi_data.get("total_releases", 0)
319
+ if total_releases > 100:
320
+ popularity += 15
321
+ elif total_releases > 50:
322
+ popularity += 12
323
+ elif total_releases > 20:
324
+ popularity += 9
325
+ elif total_releases > 10:
326
+ popularity += 6
327
+ elif total_releases > 5:
328
+ popularity += 3
329
+
330
+ scores["popularity_score"] = min(popularity, 100)
331
+
332
+ # === MAINTENANCE SCORE ===
333
+ maintenance = 0
334
+
335
+ # Recent release (40 points max)
336
+ days_since_release = pypi_data.get("days_since_last_release")
337
+ if days_since_release is not None:
338
+ if days_since_release <= 30:
339
+ maintenance += 40
340
+ elif days_since_release <= 90:
341
+ maintenance += 30
342
+ elif days_since_release <= 180:
343
+ maintenance += 20
344
+ elif days_since_release <= 365:
345
+ maintenance += 10
346
+ elif days_since_release <= 730:
347
+ maintenance += 5
348
+
349
+ # Recent GitHub activity (30 points max)
350
+ days_since_push = github_data.get("days_since_last_push")
351
+ if days_since_push is not None:
352
+ if days_since_push <= 7:
353
+ maintenance += 30
354
+ elif days_since_push <= 30:
355
+ maintenance += 25
356
+ elif days_since_push <= 90:
357
+ maintenance += 20
358
+ elif days_since_push <= 180:
359
+ maintenance += 10
360
+ elif days_since_push <= 365:
361
+ maintenance += 5
362
+
363
+ # Release frequency (30 points max)
364
+ releases_last_year = pypi_data.get("releases_last_year", 0)
365
+ if releases_last_year >= 12:
366
+ maintenance += 30
367
+ elif releases_last_year >= 6:
368
+ maintenance += 25
369
+ elif releases_last_year >= 4:
370
+ maintenance += 20
371
+ elif releases_last_year >= 2:
372
+ maintenance += 15
373
+ elif releases_last_year >= 1:
374
+ maintenance += 10
375
+
376
+ scores["maintenance_score"] = min(maintenance, 100)
377
+
378
+ # === DOCUMENTATION SCORE ===
379
+ documentation = 0
380
+
381
+ # Has documentation URL (30 points)
382
+ if pypi_data.get("project_urls", {}).get("Documentation") or pypi_data.get(
383
+ "docs_url"
384
+ ):
385
+ documentation += 30
386
+
387
+ # Description length (30 points)
388
+ desc_length = pypi_data.get("description_length", 0)
389
+ if desc_length > 5000:
390
+ documentation += 30
391
+ elif desc_length > 2000:
392
+ documentation += 20
393
+ elif desc_length > 500:
394
+ documentation += 10
395
+ elif desc_length > 100:
396
+ documentation += 5
397
+
398
+ # Has README/wiki (20 points)
399
+ if github_data.get("has_wiki"):
400
+ documentation += 10
401
+ if github_data.get("has_pages"):
402
+ documentation += 10
403
+
404
+ # Keywords (10 points)
405
+ if pypi_data.get("keywords"):
406
+ documentation += 10
407
+
408
+ # Classifiers (10 points)
409
+ if len(pypi_data.get("classifiers", [])) > 5:
410
+ documentation += 10
411
+ elif len(pypi_data.get("classifiers", [])) > 0:
412
+ documentation += 5
413
+
414
+ scores["documentation_score"] = min(documentation, 100)
415
+
416
+ # === COMPATIBILITY SCORE ===
417
+ compatibility = 0
418
+
419
+ # Has wheel distribution (40 points)
420
+ if pypi_data.get("has_wheel"):
421
+ compatibility += 40
422
+
423
+ # Has source distribution (20 points)
424
+ if pypi_data.get("has_source"):
425
+ compatibility += 20
426
+
427
+ # Python version support (40 points)
428
+ requires_python = pypi_data.get("requires_python", "")
429
+ if requires_python:
430
+ compatibility += 20
431
+ # Check for broad compatibility
432
+ if "3.6" in requires_python or "3.7" in requires_python:
433
+ compatibility += 20
434
+
435
+ scores["compatibility_score"] = min(compatibility, 100)
436
+
437
+ # === SECURITY SCORE ===
438
+ security = 0
439
+
440
+ # Has license (30 points)
441
+ if pypi_data.get("license") or github_data.get("license"):
442
+ security += 30
443
+
444
+ # Not archived or disabled (30 points)
445
+ if not github_data.get("archived", False) and not github_data.get(
446
+ "disabled", False
447
+ ):
448
+ security += 30
449
+
450
+ # Active issue management (20 points)
451
+ open_issues = github_data.get("open_issues", 0)
452
+ if github_data.get("has_issues"):
453
+ if open_issues < 10:
454
+ security += 20
455
+ elif open_issues < 50:
456
+ security += 15
457
+ elif open_issues < 100:
458
+ security += 10
459
+ else:
460
+ security += 5
461
+
462
+ # Has maintainer (20 points)
463
+ if pypi_data.get("maintainer") or pypi_data.get("author"):
464
+ security += 20
465
+
466
+ scores["security_score"] = min(security, 100)
467
+
468
+ # === OVERALL SCORE ===
469
+ # Weighted average
470
+ scores["overall_score"] = int(
471
+ scores["popularity_score"] * 0.25
472
+ + scores["maintenance_score"] * 0.30
473
+ + scores["documentation_score"] * 0.20
474
+ + scores["compatibility_score"] * 0.15
475
+ + scores["security_score"] * 0.10
476
+ )
477
+
478
+ return scores
479
+
480
+ def _generate_recommendation(
481
+ self, scores: Dict, pypi_data: Dict, github_data: Dict
482
+ ) -> str:
483
+ """Generate a human-readable recommendation based on scores"""
484
+ overall = scores["overall_score"]
485
+
486
+ if overall >= 80:
487
+ recommendation = (
488
+ "✅ HIGHLY RECOMMENDED - Excellent package with "
489
+ "strong community support"
490
+ )
491
+ elif overall >= 60:
492
+ recommendation = "👍 RECOMMENDED - Good package with acceptable quality"
493
+ elif overall >= 40:
494
+ recommendation = "⚠️ USE WITH CAUTION - Package has some concerns"
495
+ else:
496
+ recommendation = "❌ NOT RECOMMENDED - Consider alternatives"
497
+
498
+ # Add specific concerns
499
+ concerns = []
500
+ if scores["maintenance_score"] < 40:
501
+ concerns.append("Poor maintenance")
502
+ if scores["popularity_score"] < 30:
503
+ concerns.append("Low popularity")
504
+ if scores["documentation_score"] < 40:
505
+ concerns.append("Insufficient documentation")
506
+ if github_data.get("archived"):
507
+ concerns.append("Repository is archived")
508
+
509
+ if concerns:
510
+ recommendation += f" ({', '.join(concerns)})"
511
+
512
+ return recommendation
513
+
514
+ def run(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
515
+ """
516
+ Inspect a package and return comprehensive quality metrics
517
+
518
+ Args:
519
+ package_name: Name of the package to inspect
520
+ include_github: Whether to fetch GitHub stats (default: True)
521
+ include_downloads: Whether to fetch download stats (default: True)
522
+
523
+ Returns:
524
+ Dict with package metadata, statistics, and quality scores
525
+ """
526
+ try:
527
+ package_name = arguments.get("package_name", "").strip()
528
+ if not package_name:
529
+ return {"status": "error", "error": "package_name is required"}
530
+
531
+ include_github = arguments.get("include_github", True)
532
+ include_downloads = arguments.get("include_downloads", True)
533
+
534
+ print(f"🔍 Inspecting package: {package_name}")
535
+
536
+ # Step 1: Get PyPI metadata
537
+ print(" 📦 Fetching PyPI metadata...")
538
+ pypi_data = self._get_pypi_metadata(package_name)
539
+
540
+ if "error" in pypi_data:
541
+ return {
542
+ "status": "error",
543
+ "error": pypi_data["error"],
544
+ "package_name": package_name,
545
+ }
546
+
547
+ # Step 2: Get download statistics
548
+ downloads = {}
549
+ if include_downloads:
550
+ print(" 📊 Fetching download statistics...")
551
+ downloads = self._get_download_stats(package_name)
552
+ time.sleep(0.5) # Rate limiting
553
+
554
+ # Step 3: Get GitHub statistics
555
+ github_data = {}
556
+ if include_github and pypi_data.get("github_url"):
557
+ print(
558
+ f" 🐙 Fetching GitHub statistics from "
559
+ f"{pypi_data['github_url']}..."
560
+ )
561
+ github_data = self._get_github_stats(pypi_data["github_url"])
562
+ time.sleep(0.5) # Rate limiting
563
+
564
+ # Step 4: Calculate quality scores
565
+ print(" 🎯 Calculating quality scores...")
566
+ scores = self._calculate_quality_scores(pypi_data, downloads, github_data)
567
+
568
+ # Compile comprehensive report
569
+ result = {
570
+ "status": "success",
571
+ "package_name": package_name,
572
+ "pypi_metadata": pypi_data,
573
+ "download_stats": downloads,
574
+ "github_stats": github_data,
575
+ "quality_scores": scores,
576
+ "recommendation": self._generate_recommendation(
577
+ scores, pypi_data, github_data
578
+ ),
579
+ }
580
+
581
+ print(
582
+ f"✅ Inspection complete - Overall score: "
583
+ f"{scores['overall_score']}/100"
584
+ )
585
+
586
+ return result
587
+
588
+ except Exception as e:
589
+ return {
590
+ "status": "error",
591
+ "error": str(e),
592
+ "package_name": arguments.get("package_name", ""),
593
+ }