tooluniverse 1.0.5__py3-none-any.whl → 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tooluniverse might be problematic. Click here for more details.
- tooluniverse/__init__.py +70 -0
- tooluniverse/agentic_tool.py +121 -15
- tooluniverse/arxiv_tool.py +109 -0
- tooluniverse/base_tool.py +174 -25
- tooluniverse/biorxiv_tool.py +116 -0
- tooluniverse/cellosaurus_tool.py +1332 -0
- tooluniverse/compose_scripts/enhanced_multi_agent_literature_search.py +310 -0
- tooluniverse/compose_scripts/multi_agent_literature_search.py +794 -0
- tooluniverse/compose_scripts/tool_graph_generation.py +68 -35
- tooluniverse/compose_scripts/tool_metadata_generator.py +205 -105
- tooluniverse/compose_tool.py +93 -8
- tooluniverse/core_tool.py +155 -0
- tooluniverse/crossref_tool.py +158 -0
- tooluniverse/data/agentic_tools.json +1271 -1179
- tooluniverse/data/alphafold_tools.json +356 -105
- tooluniverse/data/arxiv_tools.json +94 -0
- tooluniverse/data/biorxiv_tools.json +75 -0
- tooluniverse/data/cellosaurus_tools.json +260 -0
- tooluniverse/data/chembl_tools.json +27 -12
- tooluniverse/data/clinicaltrials_gov_tools.json +377 -302
- tooluniverse/data/compose_tools.json +123 -16
- tooluniverse/data/core_tools.json +113 -0
- tooluniverse/data/crossref_tools.json +138 -0
- tooluniverse/data/dailymed_tools.json +17 -3
- tooluniverse/data/dataset_tools.json +1031 -588
- tooluniverse/data/dblp_tools.json +144 -0
- tooluniverse/data/disease_target_score_tools.json +20 -10
- tooluniverse/data/doaj_tools.json +140 -0
- tooluniverse/data/embedding_tools.json +362 -299
- tooluniverse/data/enrichr_tools.json +34 -27
- tooluniverse/data/europe_pmc_tools.json +108 -16
- tooluniverse/data/fatcat_tools.json +77 -0
- tooluniverse/data/fda_drug_adverse_event_tools.json +1061 -445
- tooluniverse/data/fda_drug_labeling_tools.json +6858 -6901
- tooluniverse/data/finder_tools.json +32 -37
- tooluniverse/data/gene_ontology_tools.json +19 -7
- tooluniverse/data/gwas_tools.json +1720 -959
- tooluniverse/data/hal_tools.json +75 -0
- tooluniverse/data/hpa_tools.json +53 -14
- tooluniverse/data/humanbase_tools.json +51 -43
- tooluniverse/data/idmap_tools.json +76 -70
- tooluniverse/data/literature_search_tools.json +306 -0
- tooluniverse/data/mcp_client_tools_example.json +122 -107
- tooluniverse/data/medlineplus_tools.json +50 -10
- tooluniverse/data/medrxiv_tools.json +75 -0
- tooluniverse/data/molecule_2d_tools.json +134 -0
- tooluniverse/data/molecule_3d_tools.json +164 -0
- tooluniverse/data/monarch_tools.json +112 -110
- tooluniverse/data/odphp_tools.json +389 -119
- tooluniverse/data/openaire_tools.json +95 -0
- tooluniverse/data/openalex_tools.json +100 -31
- tooluniverse/data/opentarget_tools.json +1457 -1372
- tooluniverse/data/osf_preprints_tools.json +81 -0
- tooluniverse/data/packages/bioinformatics_core_tools.json +40 -10
- tooluniverse/data/packages/cheminformatics_tools.json +20 -5
- tooluniverse/data/packages/genomics_tools.json +36 -9
- tooluniverse/data/packages/machine_learning_tools.json +36 -9
- tooluniverse/data/packages/scientific_computing_tools.json +20 -5
- tooluniverse/data/packages/single_cell_tools.json +20 -5
- tooluniverse/data/packages/structural_biology_tools.json +16 -4
- tooluniverse/data/packages/visualization_tools.json +20 -5
- tooluniverse/data/pmc_tools.json +117 -0
- tooluniverse/data/protein_structure_3d_tools.json +138 -0
- tooluniverse/data/pubchem_tools.json +37 -12
- tooluniverse/data/pubmed_tools.json +133 -0
- tooluniverse/data/pubtator_tools.json +68 -60
- tooluniverse/data/rcsb_pdb_tools.json +1532 -1221
- tooluniverse/data/semantic_scholar_tools.json +55 -22
- tooluniverse/data/special_tools.json +8 -6
- tooluniverse/data/tool_composition_tools.json +112 -82
- tooluniverse/data/unified_guideline_tools.json +707 -0
- tooluniverse/data/unpaywall_tools.json +86 -0
- tooluniverse/data/url_fetch_tools.json +102 -82
- tooluniverse/data/uspto_tools.json +49 -30
- tooluniverse/data/wikidata_sparql_tools.json +45 -0
- tooluniverse/data/xml_tools.json +3274 -3113
- tooluniverse/data/zenodo_tools.json +90 -0
- tooluniverse/dblp_tool.py +132 -0
- tooluniverse/default_config.py +30 -0
- tooluniverse/doaj_tool.py +183 -0
- tooluniverse/doctor.py +48 -0
- tooluniverse/europe_pmc_tool.py +132 -17
- tooluniverse/exceptions.py +170 -0
- tooluniverse/execute_function.py +825 -342
- tooluniverse/fatcat_tool.py +65 -0
- tooluniverse/generate_tools.py +198 -0
- tooluniverse/hal_tool.py +77 -0
- tooluniverse/llm_clients.py +283 -20
- tooluniverse/mcp_tool_registry.py +4 -1
- tooluniverse/medrxiv_tool.py +116 -0
- tooluniverse/memory_manager.py +166 -0
- tooluniverse/molecule_2d_tool.py +274 -0
- tooluniverse/molecule_3d_tool.py +441 -0
- tooluniverse/odphp_tool.py +49 -14
- tooluniverse/openaire_tool.py +130 -0
- tooluniverse/openalex_tool.py +34 -0
- tooluniverse/osf_preprints_tool.py +67 -0
- tooluniverse/pmc_tool.py +179 -0
- tooluniverse/protein_structure_3d_tool.py +295 -0
- tooluniverse/pubmed_tool.py +173 -0
- tooluniverse/remote/boltz/boltz_mcp_server.py +3 -1
- tooluniverse/remote/uspto_downloader/uspto_downloader_mcp_server.py +3 -1
- tooluniverse/semantic_scholar_tool.py +40 -10
- tooluniverse/smcp.py +228 -263
- tooluniverse/smcp_server.py +97 -55
- tooluniverse/tool_registry.py +35 -3
- tooluniverse/tools/ADMETAI_predict_BBB_penetrance.py +46 -0
- tooluniverse/tools/ADMETAI_predict_CYP_interactions.py +46 -0
- tooluniverse/tools/ADMETAI_predict_bioavailability.py +46 -0
- tooluniverse/tools/ADMETAI_predict_clearance_distribution.py +49 -0
- tooluniverse/tools/ADMETAI_predict_nuclear_receptor_activity.py +49 -0
- tooluniverse/tools/ADMETAI_predict_physicochemical_properties.py +49 -0
- tooluniverse/tools/ADMETAI_predict_solubility_lipophilicity_hydration.py +49 -0
- tooluniverse/tools/ADMETAI_predict_stress_response.py +46 -0
- tooluniverse/tools/ADMETAI_predict_toxicity.py +46 -0
- tooluniverse/tools/AdvancedCodeQualityAnalyzer.py +63 -0
- tooluniverse/tools/AdverseEventICDMapper.py +46 -0
- tooluniverse/tools/AdverseEventPredictionQuestionGenerator.py +52 -0
- tooluniverse/tools/AdverseEventPredictionQuestionGeneratorWithContext.py +59 -0
- tooluniverse/tools/ArXiv_search_papers.py +63 -0
- tooluniverse/tools/ArgumentDescriptionOptimizer.py +55 -0
- tooluniverse/tools/BioRxiv_search_preprints.py +52 -0
- tooluniverse/tools/BiomarkerDiscoveryWorkflow.py +55 -0
- tooluniverse/tools/CORE_search_papers.py +67 -0
- tooluniverse/tools/CallAgent.py +46 -0
- tooluniverse/tools/ChEMBL_search_similar_molecules.py +59 -0
- tooluniverse/tools/CodeOptimizer.py +55 -0
- tooluniverse/tools/CodeQualityAnalyzer.py +71 -0
- tooluniverse/tools/ComprehensiveDrugDiscoveryPipeline.py +49 -0
- tooluniverse/tools/Crossref_search_works.py +55 -0
- tooluniverse/tools/DBLP_search_publications.py +52 -0
- tooluniverse/tools/DOAJ_search_articles.py +55 -0
- tooluniverse/tools/DailyMed_get_spl_by_setid.py +52 -0
- tooluniverse/tools/DailyMed_search_spls.py +79 -0
- tooluniverse/tools/DataAnalysisValidityReviewer.py +49 -0
- tooluniverse/tools/DescriptionAnalyzer.py +55 -0
- tooluniverse/tools/DescriptionQualityEvaluator.py +59 -0
- tooluniverse/tools/DomainExpertValidator.py +63 -0
- tooluniverse/tools/DrugSafetyAnalyzer.py +59 -0
- tooluniverse/tools/EthicalComplianceReviewer.py +49 -0
- tooluniverse/tools/EuropePMC_Guidelines_Search.py +52 -0
- tooluniverse/tools/EuropePMC_search_articles.py +52 -0
- tooluniverse/tools/ExperimentalDesignScorer.py +55 -0
- tooluniverse/tools/FAERS_count_additive_administration_routes.py +52 -0
- tooluniverse/tools/FAERS_count_additive_adverse_reactions.py +71 -0
- tooluniverse/tools/FAERS_count_additive_event_reports_by_country.py +63 -0
- tooluniverse/tools/FAERS_count_additive_reaction_outcomes.py +63 -0
- tooluniverse/tools/FAERS_count_additive_reports_by_reporter_country.py +63 -0
- tooluniverse/tools/FAERS_count_additive_seriousness_classification.py +63 -0
- tooluniverse/tools/FAERS_count_country_by_drug_event.py +63 -0
- tooluniverse/tools/FAERS_count_death_related_by_drug.py +49 -0
- tooluniverse/tools/FAERS_count_drug_routes_by_event.py +52 -0
- tooluniverse/tools/FAERS_count_drugs_by_drug_event.py +63 -0
- tooluniverse/tools/FAERS_count_outcomes_by_drug_event.py +63 -0
- tooluniverse/tools/FAERS_count_patient_age_distribution.py +49 -0
- tooluniverse/tools/FAERS_count_reactions_by_drug_event.py +71 -0
- tooluniverse/tools/FAERS_count_reportercountry_by_drug_event.py +63 -0
- tooluniverse/tools/FAERS_count_seriousness_by_drug_event.py +63 -0
- tooluniverse/tools/FDA_get_abuse_dependence_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_abuse_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_accessories_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_active_ingredient_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_adverse_reactions_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_alarms_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_animal_pharmacology_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_assembly_installation_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_boxed_warning_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_brand_name_generic_name.py +52 -0
- tooluniverse/tools/FDA_get_calibration_instructions_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_carcinogenic_mutagenic_fertility_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_child_safety_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_clinical_pharmacology_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_clinical_studies_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_contact_for_questions_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_contraindications_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_controlled_substance_DEA_schedule_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_dear_health_care_provider_letter_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_dependence_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_disposal_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_do_not_use_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_document_id_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_dosage_and_storage_information_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_dosage_forms_and_strengths_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_drug_generic_name.py +46 -0
- tooluniverse/tools/FDA_get_drug_interactions_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_SPL_ID.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_adverse_reaction.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_calibration_instructions.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_dependence_info.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_document_id.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_dosage_info.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_environmental_warning.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_inactive_ingredient.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_info_on_conditions_for_doctor_consultation.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_labor_and_delivery_info.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_microbiology.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_other_safety_info.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_pharmacodynamics.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_pharmacogenomics.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_precautions.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_pregnancy_or_breastfeeding_info.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_principal_display_panel.py +59 -0
- tooluniverse/tools/FDA_get_drug_name_by_reference.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_set_id.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_stop_use_info.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_storage_and_handling_info.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_by_warnings.py +55 -0
- tooluniverse/tools/FDA_get_drug_name_from_patient_package_insert.py +59 -0
- tooluniverse/tools/FDA_get_drug_names_by_abuse_dependence_info.py +55 -0
- tooluniverse/tools/FDA_get_drug_names_by_abuse_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_accessories.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_active_ingredient.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_alarm.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_animal_pharmacology_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_application_number_NDC_number.py +59 -0
- tooluniverse/tools/FDA_get_drug_names_by_assembly_installation_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_boxed_warning.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_child_safety_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_clinical_pharmacology.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_clinical_studies.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_consulting_doctor_pharmacist_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_contraindications.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_controlled_substance_DEA_schedule.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_dear_health_care_provider_letter_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_disposal_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_dosage_forms_and_strengths_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_drug_interactions.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_effective_time.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_food_safety_warnings.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_general_precautions.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_geriatric_use.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_health_claim.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_indication.py +55 -0
- tooluniverse/tools/FDA_get_drug_names_by_info_for_nursing_mothers.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_information_for_owners_or_caregivers.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_ingredient.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_instructions_for_use.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_lab_test_interference.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_lab_tests.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_mechanism_of_action.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_medication_guide.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_nonclinical_toxicology_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_nonteratogenic_effects.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_overdosage_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_pediatric_use.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_pharmacokinetics.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_population_use.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_pregnancy_effects_info.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_residue_warning.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_risk.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_route.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_safe_handling_warning.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_safety_summary.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_spl_indexing_data_elements.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_teratogenic_effects.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_user_safety_warning.py +63 -0
- tooluniverse/tools/FDA_get_drug_names_by_warnings_and_cautions.py +63 -0
- tooluniverse/tools/FDA_get_drugs_by_carcinogenic_mutagenic_fertility.py +63 -0
- tooluniverse/tools/FDA_get_effective_time_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_environmental_warning_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_general_precautions_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_geriatric_use_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_health_claims_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_inactive_ingredient_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_indications_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_info_for_nursing_mothers_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_info_for_patients_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_info_on_conditions_for_doctor_consultation_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_info_on_consulting_doctor_pharmacist_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_information_for_owners_or_caregivers_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_ingredients_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_instructions_for_use_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_lab_test_interference_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_lab_tests_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_labor_and_delivery_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_manufacturer_name_NDC_number_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_mechanism_of_action_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_medication_guide_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_microbiology_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_nonclinical_toxicology_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_nonteratogenic_effects_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_other_safety_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_overdosage_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_patient_package_insert_from_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_pediatric_use_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_pharmacodynamics_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_pharmacogenomics_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_pharmacokinetics_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_population_use_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_precautions_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_pregnancy_effects_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_pregnancy_or_breastfeeding_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_principal_display_panel_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_purpose_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_recent_changes_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_reference_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_residue_warning_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_risk_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_route_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_safe_handling_warnings_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_safety_summary_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_spl_indexing_data_elements_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_spl_unclassified_section_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_stop_use_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_storage_and_handling_info_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_teratogenic_effects_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_user_safety_warning_by_drug_names.py +55 -0
- tooluniverse/tools/FDA_get_warnings_and_cautions_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_warnings_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_get_when_using_info.py +55 -0
- tooluniverse/tools/FDA_retrieve_device_use_by_drug_name.py +55 -0
- tooluniverse/tools/FDA_retrieve_drug_name_by_device_use.py +59 -0
- tooluniverse/tools/FDA_retrieve_drug_names_by_patient_medication_info.py +55 -0
- tooluniverse/tools/FDA_retrieve_patient_medication_info_by_drug_name.py +55 -0
- tooluniverse/tools/Fatcat_search_scholar.py +52 -0
- tooluniverse/tools/Finish.py +44 -0
- tooluniverse/tools/GO_get_annotations_for_gene.py +46 -0
- tooluniverse/tools/GO_get_genes_for_term.py +55 -0
- tooluniverse/tools/GO_get_term_by_id.py +46 -0
- tooluniverse/tools/GO_get_term_details.py +46 -0
- tooluniverse/tools/GO_search_terms.py +46 -0
- tooluniverse/tools/HAL_search_archive.py +52 -0
- tooluniverse/tools/HPA_get_biological_processes_by_gene.py +52 -0
- tooluniverse/tools/HPA_get_cancer_prognostics_by_gene.py +49 -0
- tooluniverse/tools/HPA_get_comparative_expression_by_gene_and_cellline.py +52 -0
- tooluniverse/tools/HPA_get_comprehensive_gene_details_by_ensembl_id.py +63 -0
- tooluniverse/tools/HPA_get_contextual_biological_process_analysis.py +52 -0
- tooluniverse/tools/HPA_get_disease_expression_by_gene_tissue_disease.py +59 -0
- tooluniverse/tools/HPA_get_gene_basic_info_by_ensembl_id.py +49 -0
- tooluniverse/tools/HPA_get_gene_tsv_data_by_ensembl_id.py +49 -0
- tooluniverse/tools/HPA_get_protein_interactions_by_gene.py +49 -0
- tooluniverse/tools/HPA_get_rna_expression_by_source.py +59 -0
- tooluniverse/tools/HPA_get_rna_expression_in_specific_tissues.py +52 -0
- tooluniverse/tools/HPA_get_subcellular_location.py +46 -0
- tooluniverse/tools/HPA_search_genes_by_query.py +49 -0
- tooluniverse/tools/HypothesisGenerator.py +63 -0
- tooluniverse/tools/LabelGenerator.py +67 -0
- tooluniverse/tools/LiteratureContextReviewer.py +55 -0
- tooluniverse/tools/LiteratureSearchTool.py +49 -0
- tooluniverse/tools/MedRxiv_search_preprints.py +52 -0
- tooluniverse/tools/MedicalLiteratureReviewer.py +71 -0
- tooluniverse/tools/MedicalTermNormalizer.py +46 -0
- tooluniverse/tools/MedlinePlus_connect_lookup_by_code.py +67 -0
- tooluniverse/tools/MedlinePlus_get_genetics_condition_by_name.py +52 -0
- tooluniverse/tools/MedlinePlus_get_genetics_gene_by_name.py +52 -0
- tooluniverse/tools/MedlinePlus_get_genetics_index.py +44 -0
- tooluniverse/tools/MedlinePlus_search_topics_by_keyword.py +55 -0
- tooluniverse/tools/MethodologyRigorReviewer.py +49 -0
- tooluniverse/tools/MultiAgentLiteratureSearch.py +59 -0
- tooluniverse/tools/NICE_Clinical_Guidelines_Search.py +52 -0
- tooluniverse/tools/NICE_Guideline_Full_Text.py +46 -0
- tooluniverse/tools/NoveltySignificanceReviewer.py +59 -0
- tooluniverse/tools/OSF_search_preprints.py +59 -0
- tooluniverse/tools/OSL_get_efo_id_by_disease_name.py +46 -0
- tooluniverse/tools/OpenAIRE_search_publications.py +55 -0
- tooluniverse/tools/OpenAlex_Guidelines_Search.py +63 -0
- tooluniverse/tools/OpenTargets_drug_pharmacogenomics_data.py +52 -0
- tooluniverse/tools/OpenTargets_get_approved_indications_by_drug_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_associated_diseases_by_drug_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_associated_drugs_by_disease_efoId.py +52 -0
- tooluniverse/tools/OpenTargets_get_associated_drugs_by_target_ensemblID.py +55 -0
- tooluniverse/tools/OpenTargets_get_associated_phenotypes_by_disease_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_associated_targets_by_disease_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_associated_targets_by_drug_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_biological_mouse_models_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_chemical_probes_by_target_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_disease_ancestors_parents_by_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_disease_descendants_children_by_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_disease_description_by_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_disease_id_description_by_name.py +49 -0
- tooluniverse/tools/OpenTargets_get_disease_ids_by_efoId.py +46 -0
- tooluniverse/tools/OpenTargets_get_disease_ids_by_name.py +46 -0
- tooluniverse/tools/OpenTargets_get_disease_locations_by_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_disease_synonyms_by_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_disease_therapeutic_areas_by_efoId.py +49 -0
- tooluniverse/tools/OpenTargets_get_diseases_phenotypes_by_target_ensembl.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_adverse_events_by_chemblId.py +52 -0
- tooluniverse/tools/OpenTargets_get_drug_approval_status_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_chembId_by_generic_name.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_description_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_id_description_by_name.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_indications_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_mechanisms_of_action_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_synonyms_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_trade_names_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_warnings_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_drug_withdrawn_blackbox_status_by_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_gene_ontology_terms_by_goID.py +49 -0
- tooluniverse/tools/OpenTargets_get_known_drugs_by_drug_chemblId.py +49 -0
- tooluniverse/tools/OpenTargets_get_parent_child_molecules_by_drug_chembl_ID.py +49 -0
- tooluniverse/tools/OpenTargets_get_publications_by_disease_efoId.py +71 -0
- tooluniverse/tools/OpenTargets_get_publications_by_drug_chemblId.py +71 -0
- tooluniverse/tools/OpenTargets_get_publications_by_target_ensemblID.py +71 -0
- tooluniverse/tools/OpenTargets_get_similar_entities_by_disease_efoId.py +55 -0
- tooluniverse/tools/OpenTargets_get_similar_entities_by_drug_chemblId.py +55 -0
- tooluniverse/tools/OpenTargets_get_similar_entities_by_target_ensemblID.py +55 -0
- tooluniverse/tools/OpenTargets_get_target_classes_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_constraint_info_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_enabling_packages_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_gene_ontology_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_genomic_location_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_homologues_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_id_description_by_name.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_interactions_by_ensemblID.py +52 -0
- tooluniverse/tools/OpenTargets_get_target_safety_profile_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_subcellular_locations_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_synonyms_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_get_target_tractability_by_ensemblID.py +49 -0
- tooluniverse/tools/OpenTargets_map_any_disease_id_to_all_other_ids.py +49 -0
- tooluniverse/tools/OpenTargets_multi_entity_search_by_query_string.py +59 -0
- tooluniverse/tools/OpenTargets_search_category_counts_by_query_string.py +49 -0
- tooluniverse/tools/OpenTargets_target_disease_evidence.py +52 -0
- tooluniverse/tools/OutputSummarizationComposer.py +71 -0
- tooluniverse/tools/PMC_search_papers.py +67 -0
- tooluniverse/tools/ProtocolOptimizer.py +49 -0
- tooluniverse/tools/PubChem_get_CID_by_SMILES.py +46 -0
- tooluniverse/tools/PubChem_get_CID_by_compound_name.py +46 -0
- tooluniverse/tools/PubChem_get_associated_patents_by_CID.py +46 -0
- tooluniverse/tools/PubChem_get_compound_2D_image_by_CID.py +52 -0
- tooluniverse/tools/PubChem_get_compound_properties_by_CID.py +46 -0
- tooluniverse/tools/PubChem_get_compound_synonyms_by_CID.py +46 -0
- tooluniverse/tools/PubChem_get_compound_xrefs_by_CID.py +52 -0
- tooluniverse/tools/PubChem_search_compounds_by_similarity.py +52 -0
- tooluniverse/tools/PubChem_search_compounds_by_substructure.py +49 -0
- tooluniverse/tools/PubMed_Guidelines_Search.py +55 -0
- tooluniverse/tools/PubMed_search_articles.py +55 -0
- tooluniverse/tools/PubTator3_EntityAutocomplete.py +59 -0
- tooluniverse/tools/PubTator3_LiteratureSearch.py +55 -0
- tooluniverse/tools/QuestionRephraser.py +52 -0
- tooluniverse/tools/Reactome_get_pathway_reactions.py +46 -0
- tooluniverse/tools/ReproducibilityTransparencyReviewer.py +49 -0
- tooluniverse/tools/ResultsInterpretationReviewer.py +55 -0
- tooluniverse/tools/ScientificTextSummarizer.py +59 -0
- tooluniverse/tools/SemanticScholar_search_papers.py +55 -0
- tooluniverse/tools/TRIP_Database_Guidelines_Search.py +55 -0
- tooluniverse/tools/TestCaseGenerator.py +46 -0
- tooluniverse/tools/ToolCompatibilityAnalyzer.py +59 -0
- tooluniverse/tools/ToolDescriptionOptimizer.py +67 -0
- tooluniverse/tools/ToolDiscover.py +63 -0
- tooluniverse/tools/ToolGraphComposer.py +71 -0
- tooluniverse/tools/ToolGraphGenerationPipeline.py +63 -0
- tooluniverse/tools/ToolImplementationGenerator.py +67 -0
- tooluniverse/tools/ToolMetadataGenerationPipeline.py +63 -0
- tooluniverse/tools/ToolMetadataGenerator.py +55 -0
- tooluniverse/tools/ToolMetadataStandardizer.py +52 -0
- tooluniverse/tools/ToolOptimizer.py +59 -0
- tooluniverse/tools/ToolOutputSummarizer.py +67 -0
- tooluniverse/tools/ToolQualityEvaluator.py +59 -0
- tooluniverse/tools/ToolRelationshipDetector.py +52 -0
- tooluniverse/tools/ToolSpecificationGenerator.py +67 -0
- tooluniverse/tools/ToolSpecificationOptimizer.py +63 -0
- tooluniverse/tools/Tool_Finder.py +67 -0
- tooluniverse/tools/Tool_Finder_Keyword.py +67 -0
- tooluniverse/tools/Tool_Finder_LLM.py +67 -0
- tooluniverse/tools/Tool_RAG.py +49 -0
- tooluniverse/tools/UniProt_get_alternative_names_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_disease_variants_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_entry_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_function_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_isoform_ids_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_organism_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_ptm_processing_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_recommended_name_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_sequence_by_accession.py +49 -0
- tooluniverse/tools/UniProt_get_subcellular_location_by_accession.py +49 -0
- tooluniverse/tools/Unpaywall_check_oa_status.py +52 -0
- tooluniverse/tools/WHO_Guideline_Full_Text.py +46 -0
- tooluniverse/tools/WHO_Guidelines_Search.py +52 -0
- tooluniverse/tools/Wikidata_SPARQL_query.py +52 -0
- tooluniverse/tools/WritingPresentationReviewer.py +49 -0
- tooluniverse/tools/Zenodo_search_records.py +59 -0
- tooluniverse/tools/__init__.py +1738 -0
- tooluniverse/tools/_shared_client.py +138 -0
- tooluniverse/tools/alphafold_get_annotations.py +52 -0
- tooluniverse/tools/alphafold_get_prediction.py +55 -0
- tooluniverse/tools/alphafold_get_summary.py +46 -0
- tooluniverse/tools/call_agentic_human.py +46 -0
- tooluniverse/tools/cancer_biomarkers_disease_target_score.py +52 -0
- tooluniverse/tools/cancer_gene_census_disease_target_score.py +52 -0
- tooluniverse/tools/cellosaurus_get_cell_line_info.py +55 -0
- tooluniverse/tools/cellosaurus_query_converter.py +52 -0
- tooluniverse/tools/cellosaurus_search_cell_lines.py +55 -0
- tooluniverse/tools/chembl_disease_target_score.py +52 -0
- tooluniverse/tools/dict_search.py +67 -0
- tooluniverse/tools/dili_search.py +67 -0
- tooluniverse/tools/diqt_search.py +67 -0
- tooluniverse/tools/disease_target_score.py +59 -0
- tooluniverse/tools/drugbank_filter_drugs_by_name.py +55 -0
- tooluniverse/tools/drugbank_full_search.py +67 -0
- tooluniverse/tools/drugbank_get_drug_basic_info_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_drug_chemistry_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_drug_interactions_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_drug_name_and_description_by_indication.py +63 -0
- tooluniverse/tools/drugbank_get_drug_name_and_description_by_pathway_name.py +63 -0
- tooluniverse/tools/drugbank_get_drug_name_and_description_by_target_name.py +63 -0
- tooluniverse/tools/drugbank_get_drug_name_description_pharmacology_by_mechanism_of_action.py +63 -0
- tooluniverse/tools/drugbank_get_drug_pathways_and_reactions_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_drug_products_by_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_drug_references_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_indications_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_pharmacology_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_safety_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_get_targets_by_drug_name_or_drugbank_id.py +63 -0
- tooluniverse/tools/drugbank_links_search.py +67 -0
- tooluniverse/tools/drugbank_vocab_filter.py +63 -0
- tooluniverse/tools/drugbank_vocab_search.py +67 -0
- tooluniverse/tools/embedding_database_add.py +63 -0
- tooluniverse/tools/embedding_database_create.py +71 -0
- tooluniverse/tools/embedding_database_load.py +63 -0
- tooluniverse/tools/embedding_database_search.py +67 -0
- tooluniverse/tools/embedding_sync_download.py +63 -0
- tooluniverse/tools/embedding_sync_upload.py +71 -0
- tooluniverse/tools/enrichr_gene_enrichment_analysis.py +52 -0
- tooluniverse/tools/europepmc_disease_target_score.py +52 -0
- tooluniverse/tools/eva_disease_target_score.py +52 -0
- tooluniverse/tools/eva_somatic_disease_target_score.py +52 -0
- tooluniverse/tools/expression_atlas_disease_target_score.py +52 -0
- tooluniverse/tools/extract_clinical_trial_adverse_events.py +59 -0
- tooluniverse/tools/extract_clinical_trial_outcomes.py +52 -0
- tooluniverse/tools/genomics_england_disease_target_score.py +52 -0
- tooluniverse/tools/get_HPO_ID_by_phenotype.py +55 -0
- tooluniverse/tools/get_albumentations_info.py +44 -0
- tooluniverse/tools/get_altair_info.py +44 -0
- tooluniverse/tools/get_anndata_info.py +49 -0
- tooluniverse/tools/get_arboreto_info.py +46 -0
- tooluniverse/tools/get_arxiv_info.py +46 -0
- tooluniverse/tools/get_ase_info.py +46 -0
- tooluniverse/tools/get_assembly_info_by_pdb_id.py +46 -0
- tooluniverse/tools/get_assembly_summary.py +46 -0
- tooluniverse/tools/get_astropy_info.py +44 -0
- tooluniverse/tools/get_binding_affinity_by_pdb_id.py +46 -0
- tooluniverse/tools/get_biopandas_info.py +49 -0
- tooluniverse/tools/get_biopython_info.py +49 -0
- tooluniverse/tools/get_bioservices_info.py +44 -0
- tooluniverse/tools/get_biotite_info.py +49 -0
- tooluniverse/tools/get_bokeh_info.py +44 -0
- tooluniverse/tools/get_brian2_info.py +44 -0
- tooluniverse/tools/get_cartopy_info.py +44 -0
- tooluniverse/tools/get_catboost_info.py +44 -0
- tooluniverse/tools/get_cellpose_info.py +49 -0
- tooluniverse/tools/get_cellrank_info.py +44 -0
- tooluniverse/tools/get_cellxgene_census_info.py +46 -0
- tooluniverse/tools/get_cftime_info.py +44 -0
- tooluniverse/tools/get_chem_comp_audit_info.py +46 -0
- tooluniverse/tools/get_chem_comp_charge_and_ambiguity.py +46 -0
- tooluniverse/tools/get_chembl_webresource_client_info.py +44 -0
- tooluniverse/tools/get_citation_info_by_pdb_id.py +46 -0
- tooluniverse/tools/get_clair3_info.py +46 -0
- tooluniverse/tools/get_clinical_trial_conditions_and_interventions.py +55 -0
- tooluniverse/tools/get_clinical_trial_descriptions.py +52 -0
- tooluniverse/tools/get_clinical_trial_eligibility_criteria.py +55 -0
- tooluniverse/tools/get_clinical_trial_locations.py +52 -0
- tooluniverse/tools/get_clinical_trial_outcome_measures.py +52 -0
- tooluniverse/tools/get_clinical_trial_references.py +52 -0
- tooluniverse/tools/get_clinical_trial_status_and_dates.py +52 -0
- tooluniverse/tools/get_cobra_info.py +46 -0
- tooluniverse/tools/get_cobrapy_info.py +46 -0
- tooluniverse/tools/get_cooler_info.py +49 -0
- tooluniverse/tools/get_core_refinement_statistics.py +46 -0
- tooluniverse/tools/get_cryosparc_tools_info.py +46 -0
- tooluniverse/tools/get_crystal_growth_conditions_by_pdb_id.py +49 -0
- tooluniverse/tools/get_crystallization_ph_by_pdb_id.py +46 -0
- tooluniverse/tools/get_crystallographic_properties_by_pdb_id.py +49 -0
- tooluniverse/tools/get_cupy_info.py +44 -0
- tooluniverse/tools/get_cyvcf2_info.py +49 -0
- tooluniverse/tools/get_dask_info.py +44 -0
- tooluniverse/tools/get_datamol_info.py +44 -0
- tooluniverse/tools/get_datashader_info.py +44 -0
- tooluniverse/tools/get_deepchem_info.py +49 -0
- tooluniverse/tools/get_deeppurpose_info.py +46 -0
- tooluniverse/tools/get_deeptools_info.py +46 -0
- tooluniverse/tools/get_deepxde_info.py +49 -0
- tooluniverse/tools/get_dendropy_info.py +44 -0
- tooluniverse/tools/get_descriptastorus_info.py +46 -0
- tooluniverse/tools/get_diffdock_info.py +46 -0
- tooluniverse/tools/get_dscribe_info.py +49 -0
- tooluniverse/tools/get_ec_number_by_entity_id.py +46 -0
- tooluniverse/tools/get_elephant_info.py +44 -0
- tooluniverse/tools/get_em_3d_fitting_and_reconstruction_details.py +49 -0
- tooluniverse/tools/get_emdb_ids_by_pdb_id.py +46 -0
- tooluniverse/tools/get_episcanpy_info.py +44 -0
- tooluniverse/tools/get_ete3_info.py +44 -0
- tooluniverse/tools/get_faiss_info.py +46 -0
- tooluniverse/tools/get_fanc_info.py +46 -0
- tooluniverse/tools/get_flask_info.py +46 -0
- tooluniverse/tools/get_flowio_info.py +46 -0
- tooluniverse/tools/get_flowkit_info.py +46 -0
- tooluniverse/tools/get_flowutils_info.py +46 -0
- tooluniverse/tools/get_freesasa_info.py +44 -0
- tooluniverse/tools/get_galpy_info.py +44 -0
- tooluniverse/tools/get_gene_name_by_entity_id.py +46 -0
- tooluniverse/tools/get_geopandas_info.py +44 -0
- tooluniverse/tools/get_gget_info.py +46 -0
- tooluniverse/tools/get_googlesearch_python_info.py +46 -0
- tooluniverse/tools/get_gseapy_info.py +49 -0
- tooluniverse/tools/get_h5py_info.py +46 -0
- tooluniverse/tools/get_harmony_pytorch_info.py +46 -0
- tooluniverse/tools/get_hmmlearn_info.py +46 -0
- tooluniverse/tools/get_holoviews_info.py +44 -0
- tooluniverse/tools/get_host_organism_by_pdb_id.py +46 -0
- tooluniverse/tools/get_htmd_info.py +44 -0
- tooluniverse/tools/get_hyperopt_info.py +49 -0
- tooluniverse/tools/get_igraph_info.py +49 -0
- tooluniverse/tools/get_imageio_info.py +44 -0
- tooluniverse/tools/get_imbalanced_learn_info.py +44 -0
- tooluniverse/tools/get_jcvi_info.py +46 -0
- tooluniverse/tools/get_joblib_info.py +44 -0
- tooluniverse/tools/get_joint_associated_diseases_by_HPO_ID_list.py +55 -0
- tooluniverse/tools/get_khmer_info.py +46 -0
- tooluniverse/tools/get_kipoiseq_info.py +44 -0
- tooluniverse/tools/get_lifelines_info.py +49 -0
- tooluniverse/tools/get_ligand_bond_count_by_pdb_id.py +46 -0
- tooluniverse/tools/get_ligand_smiles_by_chem_comp_id.py +49 -0
- tooluniverse/tools/get_lightgbm_info.py +44 -0
- tooluniverse/tools/get_loompy_info.py +46 -0
- tooluniverse/tools/get_mageck_info.py +46 -0
- tooluniverse/tools/get_matplotlib_info.py +49 -0
- tooluniverse/tools/get_mdanalysis_info.py +46 -0
- tooluniverse/tools/get_mdtraj_info.py +44 -0
- tooluniverse/tools/get_mne_info.py +44 -0
- tooluniverse/tools/get_molfeat_info.py +44 -0
- tooluniverse/tools/get_molvs_info.py +44 -0
- tooluniverse/tools/get_mordred_info.py +44 -0
- tooluniverse/tools/get_msprime_info.py +49 -0
- tooluniverse/tools/get_mudata_info.py +49 -0
- tooluniverse/tools/get_mutation_annotations_by_pdb_id.py +46 -0
- tooluniverse/tools/get_neo_info.py +44 -0
- tooluniverse/tools/get_netcdf4_info.py +44 -0
- tooluniverse/tools/get_networkx_info.py +46 -0
- tooluniverse/tools/get_nglview_info.py +44 -0
- tooluniverse/tools/get_nilearn_info.py +44 -0
- tooluniverse/tools/get_numba_info.py +46 -0
- tooluniverse/tools/get_numpy_info.py +46 -0
- tooluniverse/tools/get_oligosaccharide_descriptors_by_entity_id.py +49 -0
- tooluniverse/tools/get_openbabel_info.py +49 -0
- tooluniverse/tools/get_openchem_info.py +46 -0
- tooluniverse/tools/get_opencv_info.py +49 -0
- tooluniverse/tools/get_openmm_info.py +49 -0
- tooluniverse/tools/get_optlang_info.py +46 -0
- tooluniverse/tools/get_optuna_info.py +44 -0
- tooluniverse/tools/get_palantir_info.py +44 -0
- tooluniverse/tools/get_pandas_info.py +49 -0
- tooluniverse/tools/get_patsy_info.py +44 -0
- tooluniverse/tools/get_pdbfixer_info.py +46 -0
- tooluniverse/tools/get_phenotype_by_HPO_ID.py +46 -0
- tooluniverse/tools/get_pillow_info.py +44 -0
- tooluniverse/tools/get_plantcv_info.py +46 -0
- tooluniverse/tools/get_plip_info.py +46 -0
- tooluniverse/tools/get_plotly_info.py +44 -0
- tooluniverse/tools/get_poliastro_info.py +46 -0
- tooluniverse/tools/get_polymer_entity_annotations.py +49 -0
- tooluniverse/tools/get_polymer_entity_count_by_pdb_id.py +46 -0
- tooluniverse/tools/get_polymer_entity_ids_by_pdb_id.py +46 -0
- tooluniverse/tools/get_polymer_entity_type_by_entity_id.py +49 -0
- tooluniverse/tools/get_polymer_molecular_weight_by_entity_id.py +49 -0
- tooluniverse/tools/get_poretools_info.py +44 -0
- tooluniverse/tools/get_prody_info.py +46 -0
- tooluniverse/tools/get_protein_classification_by_pdb_id.py +49 -0
- tooluniverse/tools/get_protein_metadata_by_pdb_id.py +46 -0
- tooluniverse/tools/get_pubchempy_info.py +44 -0
- tooluniverse/tools/get_pybedtools_info.py +49 -0
- tooluniverse/tools/get_pybigwig_info.py +46 -0
- tooluniverse/tools/get_pydeseq2_info.py +46 -0
- tooluniverse/tools/get_pyensembl_info.py +44 -0
- tooluniverse/tools/get_pyephem_info.py +44 -0
- tooluniverse/tools/get_pyfaidx_info.py +49 -0
- tooluniverse/tools/get_pyfasta_info.py +44 -0
- tooluniverse/tools/get_pykalman_info.py +46 -0
- tooluniverse/tools/get_pyliftover_info.py +49 -0
- tooluniverse/tools/get_pymassspec_info.py +46 -0
- tooluniverse/tools/get_pymed_info.py +46 -0
- tooluniverse/tools/get_pymzml_info.py +46 -0
- tooluniverse/tools/get_pypdf2_info.py +46 -0
- tooluniverse/tools/get_pyranges_info.py +49 -0
- tooluniverse/tools/get_pyrosetta_info.py +44 -0
- tooluniverse/tools/get_pysam_info.py +46 -0
- tooluniverse/tools/get_pyscenic_info.py +46 -0
- tooluniverse/tools/get_pyscf_info.py +46 -0
- tooluniverse/tools/get_pyscreener_info.py +46 -0
- tooluniverse/tools/get_pytdc_info.py +46 -0
- tooluniverse/tools/get_python_libsbml_info.py +46 -0
- tooluniverse/tools/get_pytorch_info.py +49 -0
- tooluniverse/tools/get_pyvcf_info.py +44 -0
- tooluniverse/tools/get_pyvis_info.py +44 -0
- tooluniverse/tools/get_qutip_info.py +44 -0
- tooluniverse/tools/get_rasterio_info.py +44 -0
- tooluniverse/tools/get_rdkit_info.py +46 -0
- tooluniverse/tools/get_refinement_resolution_by_pdb_id.py +49 -0
- tooluniverse/tools/get_release_deposit_dates_by_pdb_id.py +49 -0
- tooluniverse/tools/get_reportlab_info.py +49 -0
- tooluniverse/tools/get_requests_info.py +49 -0
- tooluniverse/tools/get_ruptures_info.py +46 -0
- tooluniverse/tools/get_scanorama_info.py +44 -0
- tooluniverse/tools/get_scanpy_info.py +49 -0
- tooluniverse/tools/get_schnetpack_info.py +49 -0
- tooluniverse/tools/get_scholarly_info.py +46 -0
- tooluniverse/tools/get_scikit_bio_info.py +49 -0
- tooluniverse/tools/get_scikit_image_info.py +46 -0
- tooluniverse/tools/get_scikit_learn_info.py +49 -0
- tooluniverse/tools/get_scipy_info.py +46 -0
- tooluniverse/tools/get_scrublet_info.py +49 -0
- tooluniverse/tools/get_scvelo_info.py +49 -0
- tooluniverse/tools/get_scvi_tools_info.py +44 -0
- tooluniverse/tools/get_seaborn_info.py +49 -0
- tooluniverse/tools/get_sequence_by_pdb_id.py +46 -0
- tooluniverse/tools/get_sequence_lengths_by_pdb_id.py +46 -0
- tooluniverse/tools/get_sequence_positional_features_by_instance_id.py +49 -0
- tooluniverse/tools/get_skopt_info.py +44 -0
- tooluniverse/tools/get_souporcell_info.py +46 -0
- tooluniverse/tools/get_source_organism_by_pdb_id.py +46 -0
- tooluniverse/tools/get_space_group_by_pdb_id.py +46 -0
- tooluniverse/tools/get_statsmodels_info.py +49 -0
- tooluniverse/tools/get_structure_determination_software_by_pdb_id.py +49 -0
- tooluniverse/tools/get_structure_title_by_pdb_id.py +46 -0
- tooluniverse/tools/get_structure_validation_metrics_by_pdb_id.py +49 -0
- tooluniverse/tools/get_sunpy_info.py +44 -0
- tooluniverse/tools/get_sympy_info.py +46 -0
- tooluniverse/tools/get_target_cofactor_info.py +46 -0
- tooluniverse/tools/get_taxonomy_by_pdb_id.py +46 -0
- tooluniverse/tools/get_tiledb_info.py +46 -0
- tooluniverse/tools/get_tiledbsoma_info.py +46 -0
- tooluniverse/tools/get_torch_geometric_info.py +49 -0
- tooluniverse/tools/get_tqdm_info.py +46 -0
- tooluniverse/tools/get_trackpy_info.py +46 -0
- tooluniverse/tools/get_tskit_info.py +46 -0
- tooluniverse/tools/get_umap_learn_info.py +49 -0
- tooluniverse/tools/get_uniprot_accession_by_entity_id.py +49 -0
- tooluniverse/tools/get_velocyto_info.py +44 -0
- tooluniverse/tools/get_viennarna_info.py +49 -0
- tooluniverse/tools/get_webpage_text_from_url.py +52 -0
- tooluniverse/tools/get_webpage_title.py +49 -0
- tooluniverse/tools/get_xarray_info.py +44 -0
- tooluniverse/tools/get_xesmf_info.py +44 -0
- tooluniverse/tools/get_xgboost_info.py +44 -0
- tooluniverse/tools/get_zarr_info.py +44 -0
- tooluniverse/tools/gwas_get_association_by_id.py +49 -0
- tooluniverse/tools/gwas_get_associations_for_snp.py +67 -0
- tooluniverse/tools/gwas_get_associations_for_study.py +55 -0
- tooluniverse/tools/gwas_get_associations_for_trait.py +55 -0
- tooluniverse/tools/gwas_get_snp_by_id.py +46 -0
- tooluniverse/tools/gwas_get_snps_for_gene.py +55 -0
- tooluniverse/tools/gwas_get_studies_for_trait.py +75 -0
- tooluniverse/tools/gwas_get_study_by_id.py +46 -0
- tooluniverse/tools/gwas_get_variants_for_trait.py +55 -0
- tooluniverse/tools/gwas_search_associations.py +75 -0
- tooluniverse/tools/gwas_search_snps.py +63 -0
- tooluniverse/tools/gwas_search_studies.py +75 -0
- tooluniverse/tools/humanbase_ppi_analysis.py +67 -0
- tooluniverse/tools/mesh_get_subjects_by_pharmacological_action.py +63 -0
- tooluniverse/tools/mesh_get_subjects_by_subject_id.py +63 -0
- tooluniverse/tools/mesh_get_subjects_by_subject_name.py +63 -0
- tooluniverse/tools/mesh_get_subjects_by_subject_scope_or_definition.py +63 -0
- tooluniverse/tools/odphp_itemlist.py +49 -0
- tooluniverse/tools/odphp_myhealthfinder.py +67 -0
- tooluniverse/tools/odphp_outlink_fetch.py +59 -0
- tooluniverse/tools/odphp_topicsearch.py +67 -0
- tooluniverse/tools/openalex_literature_search.py +67 -0
- tooluniverse/tools/reactome_disease_target_score.py +52 -0
- tooluniverse/tools/search_clinical_trials.py +67 -0
- tooluniverse/tools/visualize_molecule_2d.py +83 -0
- tooluniverse/tools/visualize_molecule_3d.py +91 -0
- tooluniverse/tools/visualize_protein_structure_3d.py +79 -0
- tooluniverse/unified_guideline_tools.py +1210 -0
- tooluniverse/unpaywall_tool.py +62 -0
- tooluniverse/utils.py +71 -2
- tooluniverse/visualization_tool.py +897 -0
- tooluniverse/wikidata_sparql_tool.py +60 -0
- tooluniverse/zenodo_tool.py +72 -0
- {tooluniverse-1.0.5.dist-info → tooluniverse-1.0.7.dist-info}/METADATA +12 -2
- tooluniverse-1.0.7.dist-info/RECORD +855 -0
- {tooluniverse-1.0.5.dist-info → tooluniverse-1.0.7.dist-info}/entry_points.txt +4 -0
- tooluniverse/test/list_azure_openai_models.py +0 -210
- tooluniverse/test/mcp_server_test.py +0 -0
- tooluniverse/test/test_admetai_tool.py +0 -370
- tooluniverse/test/test_agentic_tool.py +0 -129
- tooluniverse/test/test_agentic_tool_azure_models.py +0 -91
- tooluniverse/test/test_alphafold_tool.py +0 -108
- tooluniverse/test/test_api_key_validation_min.py +0 -64
- tooluniverse/test/test_chem_tool.py +0 -37
- tooluniverse/test/test_claude_sdk.py +0 -86
- tooluniverse/test/test_compose_lieraturereview.py +0 -63
- tooluniverse/test/test_compose_tool.py +0 -448
- tooluniverse/test/test_dailymed.py +0 -69
- tooluniverse/test/test_dataset_tool.py +0 -200
- tooluniverse/test/test_disease_target_score.py +0 -56
- tooluniverse/test/test_drugbank_filter_examples.py +0 -179
- tooluniverse/test/test_efo.py +0 -31
- tooluniverse/test/test_enrichr_tool.py +0 -21
- tooluniverse/test/test_europe_pmc_tool.py +0 -20
- tooluniverse/test/test_fda_adv.py +0 -95
- tooluniverse/test/test_fda_drug_labeling.py +0 -91
- tooluniverse/test/test_gene_ontology_tools.py +0 -66
- tooluniverse/test/test_global_fallback.py +0 -288
- tooluniverse/test/test_gwas_tool.py +0 -139
- tooluniverse/test/test_hooks_direct.py +0 -219
- tooluniverse/test/test_hpa.py +0 -625
- tooluniverse/test/test_humanbase_tool.py +0 -20
- tooluniverse/test/test_idmap_tools.py +0 -61
- tooluniverse/test/test_list_built_in_tools.py +0 -33
- tooluniverse/test/test_mcp_server.py +0 -211
- tooluniverse/test/test_mcp_tool.py +0 -247
- tooluniverse/test/test_medlineplus.py +0 -220
- tooluniverse/test/test_odphp_tool.py +0 -166
- tooluniverse/test/test_openalex_tool.py +0 -32
- tooluniverse/test/test_openrouter_client.py +0 -288
- tooluniverse/test/test_opentargets.py +0 -28
- tooluniverse/test/test_pubchem_tool.py +0 -116
- tooluniverse/test/test_pubtator_tool.py +0 -37
- tooluniverse/test/test_rcsb_pdb_tool.py +0 -86
- tooluniverse/test/test_reactome.py +0 -54
- tooluniverse/test/test_semantic_scholar_tool.py +0 -24
- tooluniverse/test/test_software_tools.py +0 -147
- tooluniverse/test/test_stdio_hooks.py +0 -285
- tooluniverse/test/test_tool_description_optimizer.py +0 -49
- tooluniverse/test/test_tool_finder.py +0 -26
- tooluniverse/test/test_tool_finder_llm.py +0 -252
- tooluniverse/test/test_tools_find.py +0 -195
- tooluniverse/test/test_uniprot_tools.py +0 -74
- tooluniverse/test/test_uspto_tool.py +0 -72
- tooluniverse/test/test_xml_tool.py +0 -113
- tooluniverse-1.0.5.dist-info/RECORD +0 -198
- {tooluniverse-1.0.5.dist-info → tooluniverse-1.0.7.dist-info}/WHEEL +0 -0
- {tooluniverse-1.0.5.dist-info → tooluniverse-1.0.7.dist-info}/licenses/LICENSE +0 -0
- {tooluniverse-1.0.5.dist-info → tooluniverse-1.0.7.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1210 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Unified Guideline Tools
|
|
4
|
+
Consolidated clinical guidelines search tools from multiple sources.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
import time
|
|
9
|
+
import re
|
|
10
|
+
import xml.etree.ElementTree as ET
|
|
11
|
+
from bs4 import BeautifulSoup
|
|
12
|
+
from .base_tool import BaseTool
|
|
13
|
+
from .tool_registry import register_tool
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@register_tool()
|
|
17
|
+
class NICEWebScrapingTool(BaseTool):
|
|
18
|
+
"""
|
|
19
|
+
Real NICE guidelines search using web scraping.
|
|
20
|
+
Makes actual HTTP requests to NICE website and parses HTML responses.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, tool_config):
|
|
24
|
+
super().__init__(tool_config)
|
|
25
|
+
self.base_url = "https://www.nice.org.uk"
|
|
26
|
+
self.search_url = f"{self.base_url}/search"
|
|
27
|
+
self.session = requests.Session()
|
|
28
|
+
self.session.headers.update(
|
|
29
|
+
{
|
|
30
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
|
31
|
+
}
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
def run(self, arguments):
|
|
35
|
+
query = arguments.get("query", "")
|
|
36
|
+
limit = arguments.get("limit", 10)
|
|
37
|
+
|
|
38
|
+
if not query:
|
|
39
|
+
return {"error": "Query parameter is required"}
|
|
40
|
+
|
|
41
|
+
return self._search_nice_guidelines_real(query, limit)
|
|
42
|
+
|
|
43
|
+
def _fetch_guideline_summary(self, url):
|
|
44
|
+
"""Fetch summary from a guideline detail page."""
|
|
45
|
+
try:
|
|
46
|
+
time.sleep(0.5) # Be respectful
|
|
47
|
+
response = self.session.get(url, timeout=15)
|
|
48
|
+
response.raise_for_status()
|
|
49
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
|
50
|
+
|
|
51
|
+
# Try to find overview section
|
|
52
|
+
overview = soup.find("div", {"class": "chapter-overview"})
|
|
53
|
+
if overview:
|
|
54
|
+
paragraphs = overview.find_all("p")
|
|
55
|
+
if paragraphs:
|
|
56
|
+
return " ".join([p.get_text().strip() for p in paragraphs[:2]])
|
|
57
|
+
|
|
58
|
+
# Try meta description
|
|
59
|
+
meta_desc = soup.find("meta", {"name": "description"})
|
|
60
|
+
if meta_desc and meta_desc.get("content"):
|
|
61
|
+
return meta_desc.get("content")
|
|
62
|
+
|
|
63
|
+
# Try first paragraph in main content
|
|
64
|
+
main_content = soup.find("div", {"class": "content"}) or soup.find("main")
|
|
65
|
+
if main_content:
|
|
66
|
+
first_p = main_content.find("p")
|
|
67
|
+
if first_p:
|
|
68
|
+
return first_p.get_text().strip()
|
|
69
|
+
|
|
70
|
+
return ""
|
|
71
|
+
except Exception:
|
|
72
|
+
return ""
|
|
73
|
+
|
|
74
|
+
def _search_nice_guidelines_real(self, query, limit):
|
|
75
|
+
"""Search NICE guidelines using real web scraping."""
|
|
76
|
+
try:
|
|
77
|
+
# Add delay to be respectful
|
|
78
|
+
time.sleep(1)
|
|
79
|
+
|
|
80
|
+
params = {"q": query, "type": "guidance"}
|
|
81
|
+
|
|
82
|
+
response = self.session.get(self.search_url, params=params, timeout=30)
|
|
83
|
+
response.raise_for_status()
|
|
84
|
+
|
|
85
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
|
86
|
+
|
|
87
|
+
# Find the JSON data in the script tag
|
|
88
|
+
script_tag = soup.find("script", {"id": "__NEXT_DATA__"})
|
|
89
|
+
if not script_tag:
|
|
90
|
+
return {
|
|
91
|
+
"error": "No search results found",
|
|
92
|
+
"suggestion": "Try different search terms or check if the NICE website is accessible",
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
# Parse the JSON data
|
|
96
|
+
import json
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
data = json.loads(script_tag.string)
|
|
100
|
+
documents = (
|
|
101
|
+
data.get("props", {})
|
|
102
|
+
.get("pageProps", {})
|
|
103
|
+
.get("results", {})
|
|
104
|
+
.get("documents", [])
|
|
105
|
+
)
|
|
106
|
+
except (json.JSONDecodeError, KeyError) as e:
|
|
107
|
+
return {
|
|
108
|
+
"error": f"Failed to parse search results: {str(e)}",
|
|
109
|
+
"source": "NICE",
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if not documents:
|
|
113
|
+
return {
|
|
114
|
+
"error": "No NICE guidelines found",
|
|
115
|
+
"suggestion": "Try different search terms or check if the NICE website is accessible",
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
# Process the documents
|
|
119
|
+
results = []
|
|
120
|
+
for doc in documents[:limit]:
|
|
121
|
+
try:
|
|
122
|
+
title = doc.get("title", "").replace("<b>", "").replace("</b>", "")
|
|
123
|
+
url = doc.get("url", "")
|
|
124
|
+
|
|
125
|
+
# Make URL absolute
|
|
126
|
+
if url.startswith("/"):
|
|
127
|
+
url = self.base_url + url
|
|
128
|
+
|
|
129
|
+
# Extract summary - try multiple fields
|
|
130
|
+
summary = (
|
|
131
|
+
doc.get("abstract", "")
|
|
132
|
+
or doc.get("staticAbstract", "")
|
|
133
|
+
or doc.get("metaDescription", "")
|
|
134
|
+
or doc.get("teaser", "")
|
|
135
|
+
or ""
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
# If still no summary, try to fetch from the detail page
|
|
139
|
+
if not summary and url:
|
|
140
|
+
summary = self._fetch_guideline_summary(url)
|
|
141
|
+
|
|
142
|
+
# Extract date
|
|
143
|
+
publication_date = doc.get("publicationDate", "")
|
|
144
|
+
last_updated = doc.get("lastUpdated", "")
|
|
145
|
+
date = last_updated or publication_date
|
|
146
|
+
|
|
147
|
+
# Extract type/category
|
|
148
|
+
nice_result_type = doc.get("niceResultType", "")
|
|
149
|
+
nice_guidance_type = doc.get("niceGuidanceType", [])
|
|
150
|
+
guideline_type = nice_result_type or (
|
|
151
|
+
nice_guidance_type[0]
|
|
152
|
+
if nice_guidance_type
|
|
153
|
+
else "NICE Guideline"
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Determine if it's a guideline
|
|
157
|
+
is_guideline = any(
|
|
158
|
+
keyword in guideline_type.lower()
|
|
159
|
+
for keyword in [
|
|
160
|
+
"guideline",
|
|
161
|
+
"quality standard",
|
|
162
|
+
"technology appraisal",
|
|
163
|
+
]
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
# Extract category
|
|
167
|
+
category = "Clinical Guidelines"
|
|
168
|
+
if "quality standard" in guideline_type.lower():
|
|
169
|
+
category = "Quality Standards"
|
|
170
|
+
elif "technology appraisal" in guideline_type.lower():
|
|
171
|
+
category = "Technology Appraisal"
|
|
172
|
+
|
|
173
|
+
result = {
|
|
174
|
+
"title": title,
|
|
175
|
+
"url": url,
|
|
176
|
+
"summary": summary,
|
|
177
|
+
"date": date,
|
|
178
|
+
"type": guideline_type,
|
|
179
|
+
"source": "NICE",
|
|
180
|
+
"is_guideline": is_guideline,
|
|
181
|
+
"category": category,
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
results.append(result)
|
|
185
|
+
|
|
186
|
+
except Exception:
|
|
187
|
+
# Skip items that can't be parsed
|
|
188
|
+
continue
|
|
189
|
+
|
|
190
|
+
if not results:
|
|
191
|
+
return {
|
|
192
|
+
"error": "No NICE guidelines found",
|
|
193
|
+
"suggestion": "Try different search terms or check if the NICE website is accessible",
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return results
|
|
197
|
+
|
|
198
|
+
except requests.exceptions.RequestException as e:
|
|
199
|
+
return {
|
|
200
|
+
"error": f"Failed to search NICE guidelines: {str(e)}",
|
|
201
|
+
"source": "NICE",
|
|
202
|
+
}
|
|
203
|
+
except Exception as e:
|
|
204
|
+
return {"error": f"Error parsing NICE response: {str(e)}", "source": "NICE"}
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
@register_tool()
|
|
208
|
+
class PubMedGuidelinesTool(BaseTool):
|
|
209
|
+
"""
|
|
210
|
+
Search PubMed for clinical practice guidelines.
|
|
211
|
+
Uses NCBI E-utilities with guideline publication type filter.
|
|
212
|
+
"""
|
|
213
|
+
|
|
214
|
+
def __init__(self, tool_config):
|
|
215
|
+
super().__init__(tool_config)
|
|
216
|
+
self.base_url = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils"
|
|
217
|
+
self.session = requests.Session()
|
|
218
|
+
|
|
219
|
+
def run(self, arguments):
|
|
220
|
+
query = arguments.get("query", "")
|
|
221
|
+
limit = arguments.get("limit", 10)
|
|
222
|
+
api_key = arguments.get("api_key", "")
|
|
223
|
+
|
|
224
|
+
if not query:
|
|
225
|
+
return {"error": "Query parameter is required"}
|
|
226
|
+
|
|
227
|
+
return self._search_pubmed_guidelines(query, limit, api_key)
|
|
228
|
+
|
|
229
|
+
def _search_pubmed_guidelines(self, query, limit, api_key):
|
|
230
|
+
"""Search PubMed for guideline publications."""
|
|
231
|
+
try:
|
|
232
|
+
# Add guideline publication type filter
|
|
233
|
+
guideline_query = f"{query} AND (guideline[Publication Type] OR practice guideline[Publication Type])"
|
|
234
|
+
|
|
235
|
+
# Search for PMIDs
|
|
236
|
+
search_params = {
|
|
237
|
+
"db": "pubmed",
|
|
238
|
+
"term": guideline_query,
|
|
239
|
+
"retmode": "json",
|
|
240
|
+
"retmax": limit,
|
|
241
|
+
}
|
|
242
|
+
if api_key:
|
|
243
|
+
search_params["api_key"] = api_key
|
|
244
|
+
|
|
245
|
+
search_response = self.session.get(
|
|
246
|
+
f"{self.base_url}/esearch.fcgi", params=search_params, timeout=30
|
|
247
|
+
)
|
|
248
|
+
search_response.raise_for_status()
|
|
249
|
+
search_data = search_response.json()
|
|
250
|
+
|
|
251
|
+
pmids = search_data.get("esearchresult", {}).get("idlist", [])
|
|
252
|
+
search_data.get("esearchresult", {}).get("count", "0")
|
|
253
|
+
|
|
254
|
+
if not pmids:
|
|
255
|
+
return []
|
|
256
|
+
|
|
257
|
+
# Get details for PMIDs
|
|
258
|
+
time.sleep(0.5) # Be respectful with API calls
|
|
259
|
+
|
|
260
|
+
detail_params = {"db": "pubmed", "id": ",".join(pmids), "retmode": "json"}
|
|
261
|
+
if api_key:
|
|
262
|
+
detail_params["api_key"] = api_key
|
|
263
|
+
|
|
264
|
+
detail_response = self.session.get(
|
|
265
|
+
f"{self.base_url}/esummary.fcgi", params=detail_params, timeout=30
|
|
266
|
+
)
|
|
267
|
+
detail_response.raise_for_status()
|
|
268
|
+
detail_data = detail_response.json()
|
|
269
|
+
|
|
270
|
+
# Fetch abstracts using efetch
|
|
271
|
+
time.sleep(0.5)
|
|
272
|
+
abstract_params = {
|
|
273
|
+
"db": "pubmed",
|
|
274
|
+
"id": ",".join(pmids),
|
|
275
|
+
"retmode": "xml",
|
|
276
|
+
"rettype": "abstract",
|
|
277
|
+
}
|
|
278
|
+
if api_key:
|
|
279
|
+
abstract_params["api_key"] = api_key
|
|
280
|
+
|
|
281
|
+
abstract_response = self.session.get(
|
|
282
|
+
f"{self.base_url}/efetch.fcgi", params=abstract_params, timeout=30
|
|
283
|
+
)
|
|
284
|
+
abstract_response.raise_for_status()
|
|
285
|
+
|
|
286
|
+
# Parse abstracts from XML
|
|
287
|
+
import re
|
|
288
|
+
|
|
289
|
+
abstracts = {}
|
|
290
|
+
xml_text = abstract_response.text
|
|
291
|
+
# Extract abstracts for each PMID
|
|
292
|
+
for pmid in pmids:
|
|
293
|
+
# Find abstract text for this PMID
|
|
294
|
+
pmid_pattern = rf"<PMID[^>]*>{pmid}</PMID>.*?<AbstractText[^>]*>(.*?)</AbstractText>"
|
|
295
|
+
abstract_match = re.search(pmid_pattern, xml_text, re.DOTALL)
|
|
296
|
+
if abstract_match:
|
|
297
|
+
# Clean HTML tags from abstract
|
|
298
|
+
abstract = re.sub(r"<[^>]+>", "", abstract_match.group(1))
|
|
299
|
+
abstracts[pmid] = abstract.strip()
|
|
300
|
+
else:
|
|
301
|
+
abstracts[pmid] = ""
|
|
302
|
+
|
|
303
|
+
# Process results
|
|
304
|
+
results = []
|
|
305
|
+
for pmid in pmids:
|
|
306
|
+
if pmid in detail_data.get("result", {}):
|
|
307
|
+
article = detail_data["result"][pmid]
|
|
308
|
+
|
|
309
|
+
# Extract author information
|
|
310
|
+
authors = []
|
|
311
|
+
for author in article.get("authors", [])[:3]:
|
|
312
|
+
authors.append(author.get("name", ""))
|
|
313
|
+
author_str = ", ".join(authors)
|
|
314
|
+
if len(article.get("authors", [])) > 3:
|
|
315
|
+
author_str += ", et al."
|
|
316
|
+
|
|
317
|
+
# Check publication types
|
|
318
|
+
pub_types = article.get("pubtype", [])
|
|
319
|
+
is_guideline = any("guideline" in pt.lower() for pt in pub_types)
|
|
320
|
+
|
|
321
|
+
result = {
|
|
322
|
+
"pmid": pmid,
|
|
323
|
+
"title": article.get("title", ""),
|
|
324
|
+
"abstract": abstracts.get(pmid, ""),
|
|
325
|
+
"authors": author_str,
|
|
326
|
+
"journal": article.get("source", ""),
|
|
327
|
+
"publication_date": article.get("pubdate", ""),
|
|
328
|
+
"publication_types": pub_types,
|
|
329
|
+
"is_guideline": is_guideline,
|
|
330
|
+
"url": f"https://pubmed.ncbi.nlm.nih.gov/{pmid}/",
|
|
331
|
+
"doi": (
|
|
332
|
+
article.get("elocationid", "").replace("doi: ", "")
|
|
333
|
+
if "doi:" in article.get("elocationid", "")
|
|
334
|
+
else ""
|
|
335
|
+
),
|
|
336
|
+
"source": "PubMed",
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
results.append(result)
|
|
340
|
+
|
|
341
|
+
return results
|
|
342
|
+
|
|
343
|
+
except requests.exceptions.RequestException as e:
|
|
344
|
+
return {"error": f"Failed to search PubMed: {str(e)}", "source": "PubMed"}
|
|
345
|
+
except Exception as e:
|
|
346
|
+
return {
|
|
347
|
+
"error": f"Error processing PubMed response: {str(e)}",
|
|
348
|
+
"source": "PubMed",
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
@register_tool()
|
|
353
|
+
class EuropePMCGuidelinesTool(BaseTool):
|
|
354
|
+
"""
|
|
355
|
+
Search Europe PMC for clinical guidelines.
|
|
356
|
+
Europe PMC provides access to life science literature including guidelines.
|
|
357
|
+
"""
|
|
358
|
+
|
|
359
|
+
def __init__(self, tool_config):
|
|
360
|
+
super().__init__(tool_config)
|
|
361
|
+
self.base_url = "https://www.ebi.ac.uk/europepmc/webservices/rest/search"
|
|
362
|
+
self.session = requests.Session()
|
|
363
|
+
|
|
364
|
+
def run(self, arguments):
|
|
365
|
+
query = arguments.get("query", "")
|
|
366
|
+
limit = arguments.get("limit", 10)
|
|
367
|
+
|
|
368
|
+
if not query:
|
|
369
|
+
return {"error": "Query parameter is required"}
|
|
370
|
+
|
|
371
|
+
return self._search_europepmc_guidelines(query, limit)
|
|
372
|
+
|
|
373
|
+
def _search_europepmc_guidelines(self, query, limit):
|
|
374
|
+
"""Search Europe PMC for guideline publications."""
|
|
375
|
+
try:
|
|
376
|
+
# Add guideline filter to query
|
|
377
|
+
guideline_query = f"guideline AND {query}"
|
|
378
|
+
|
|
379
|
+
params = {"query": guideline_query, "format": "json", "pageSize": limit}
|
|
380
|
+
|
|
381
|
+
response = self.session.get(self.base_url, params=params, timeout=30)
|
|
382
|
+
response.raise_for_status()
|
|
383
|
+
data = response.json()
|
|
384
|
+
|
|
385
|
+
data.get("hitCount", 0)
|
|
386
|
+
results_list = data.get("resultList", {}).get("result", [])
|
|
387
|
+
|
|
388
|
+
if not results_list:
|
|
389
|
+
return []
|
|
390
|
+
|
|
391
|
+
# Process results
|
|
392
|
+
results = []
|
|
393
|
+
for result in results_list:
|
|
394
|
+
title = result.get("title", "")
|
|
395
|
+
pub_type = result.get("pubType", "")
|
|
396
|
+
abstract = result.get("abstractText", "")
|
|
397
|
+
|
|
398
|
+
# Determine if it's a guideline
|
|
399
|
+
is_guideline = (
|
|
400
|
+
"guideline" in title.lower()
|
|
401
|
+
or "guideline" in pub_type.lower()
|
|
402
|
+
or "guideline" in abstract.lower()
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
# Build URL
|
|
406
|
+
pmid = result.get("pmid", "")
|
|
407
|
+
pmcid = result.get("pmcid", "")
|
|
408
|
+
doi = result.get("doi", "")
|
|
409
|
+
|
|
410
|
+
url = ""
|
|
411
|
+
if pmid:
|
|
412
|
+
url = f"https://europepmc.org/article/MED/{pmid}"
|
|
413
|
+
elif pmcid:
|
|
414
|
+
url = f"https://europepmc.org/article/{pmcid}"
|
|
415
|
+
elif doi:
|
|
416
|
+
url = f"https://doi.org/{doi}"
|
|
417
|
+
|
|
418
|
+
guideline_result = {
|
|
419
|
+
"title": title,
|
|
420
|
+
"pmid": pmid,
|
|
421
|
+
"pmcid": pmcid,
|
|
422
|
+
"doi": doi,
|
|
423
|
+
"authors": result.get("authorString", ""),
|
|
424
|
+
"journal": result.get("journalTitle", ""),
|
|
425
|
+
"publication_date": result.get("firstPublicationDate", ""),
|
|
426
|
+
"publication_type": pub_type,
|
|
427
|
+
"abstract": (
|
|
428
|
+
abstract[:500] + "..." if len(abstract) > 500 else abstract
|
|
429
|
+
),
|
|
430
|
+
"is_guideline": is_guideline,
|
|
431
|
+
"url": url,
|
|
432
|
+
"source": "Europe PMC",
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
results.append(guideline_result)
|
|
436
|
+
|
|
437
|
+
return results
|
|
438
|
+
|
|
439
|
+
except requests.exceptions.RequestException as e:
|
|
440
|
+
return {
|
|
441
|
+
"error": f"Failed to search Europe PMC: {str(e)}",
|
|
442
|
+
"source": "Europe PMC",
|
|
443
|
+
}
|
|
444
|
+
except Exception as e:
|
|
445
|
+
return {
|
|
446
|
+
"error": f"Error processing Europe PMC response: {str(e)}",
|
|
447
|
+
"source": "Europe PMC",
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@register_tool()
|
|
452
|
+
class TRIPDatabaseTool(BaseTool):
|
|
453
|
+
"""
|
|
454
|
+
Search TRIP Database (Turning Research into Practice).
|
|
455
|
+
Specialized evidence-based medicine database with clinical guidelines filter.
|
|
456
|
+
"""
|
|
457
|
+
|
|
458
|
+
def __init__(self, tool_config):
|
|
459
|
+
super().__init__(tool_config)
|
|
460
|
+
self.base_url = "https://www.tripdatabase.com/api/search"
|
|
461
|
+
self.session = requests.Session()
|
|
462
|
+
self.session.headers.update(
|
|
463
|
+
{
|
|
464
|
+
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36"
|
|
465
|
+
}
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
def run(self, arguments):
|
|
469
|
+
query = arguments.get("query", "")
|
|
470
|
+
limit = arguments.get("limit", 10)
|
|
471
|
+
search_type = arguments.get("search_type", "guideline")
|
|
472
|
+
|
|
473
|
+
if not query:
|
|
474
|
+
return {"error": "Query parameter is required"}
|
|
475
|
+
|
|
476
|
+
return self._search_trip_database(query, limit, search_type)
|
|
477
|
+
|
|
478
|
+
def _search_trip_database(self, query, limit, search_type):
|
|
479
|
+
"""Search TRIP Database for clinical guidelines."""
|
|
480
|
+
try:
|
|
481
|
+
params = {"criteria": query, "searchType": search_type, "limit": limit}
|
|
482
|
+
|
|
483
|
+
response = self.session.get(self.base_url, params=params, timeout=30)
|
|
484
|
+
response.raise_for_status()
|
|
485
|
+
|
|
486
|
+
# Parse XML response
|
|
487
|
+
root = ET.fromstring(response.content)
|
|
488
|
+
|
|
489
|
+
total = root.find("total")
|
|
490
|
+
count = root.find("count")
|
|
491
|
+
|
|
492
|
+
int(total.text) if total is not None else 0
|
|
493
|
+
int(count.text) if count is not None else 0
|
|
494
|
+
|
|
495
|
+
documents = root.findall("document")
|
|
496
|
+
|
|
497
|
+
if not documents:
|
|
498
|
+
return []
|
|
499
|
+
|
|
500
|
+
# Process results
|
|
501
|
+
results = []
|
|
502
|
+
for doc in documents[:limit]:
|
|
503
|
+
title_elem = doc.find("title")
|
|
504
|
+
link_elem = doc.find("link")
|
|
505
|
+
publication_elem = doc.find("publication")
|
|
506
|
+
category_elem = doc.find("category")
|
|
507
|
+
description_elem = doc.find("description")
|
|
508
|
+
|
|
509
|
+
guideline_result = {
|
|
510
|
+
"title": title_elem.text if title_elem is not None else "",
|
|
511
|
+
"url": link_elem.text if link_elem is not None else "",
|
|
512
|
+
"description": (
|
|
513
|
+
description_elem.text if description_elem is not None else ""
|
|
514
|
+
),
|
|
515
|
+
"publication": (
|
|
516
|
+
publication_elem.text if publication_elem is not None else ""
|
|
517
|
+
),
|
|
518
|
+
"category": category_elem.text if category_elem is not None else "",
|
|
519
|
+
"is_guideline": True, # TRIP returns filtered results
|
|
520
|
+
"source": "TRIP Database",
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
results.append(guideline_result)
|
|
524
|
+
|
|
525
|
+
return results
|
|
526
|
+
|
|
527
|
+
except requests.exceptions.RequestException as e:
|
|
528
|
+
return {
|
|
529
|
+
"error": f"Failed to search TRIP Database: {str(e)}",
|
|
530
|
+
"source": "TRIP Database",
|
|
531
|
+
}
|
|
532
|
+
except ET.ParseError as e:
|
|
533
|
+
return {
|
|
534
|
+
"error": f"Failed to parse TRIP Database response: {str(e)}",
|
|
535
|
+
"source": "TRIP Database",
|
|
536
|
+
}
|
|
537
|
+
except Exception as e:
|
|
538
|
+
return {
|
|
539
|
+
"error": f"Error processing TRIP Database response: {str(e)}",
|
|
540
|
+
"source": "TRIP Database",
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
@register_tool()
|
|
545
|
+
class WHOGuidelinesTool(BaseTool):
|
|
546
|
+
"""
|
|
547
|
+
WHO (World Health Organization) Guidelines Search Tool.
|
|
548
|
+
Searches WHO official guidelines from their publications website.
|
|
549
|
+
"""
|
|
550
|
+
|
|
551
|
+
def __init__(self, tool_config):
|
|
552
|
+
super().__init__(tool_config)
|
|
553
|
+
self.base_url = "https://www.who.int"
|
|
554
|
+
self.guidelines_url = f"{self.base_url}/publications/who-guidelines"
|
|
555
|
+
self.session = requests.Session()
|
|
556
|
+
self.session.headers.update(
|
|
557
|
+
{
|
|
558
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
|
559
|
+
}
|
|
560
|
+
)
|
|
561
|
+
|
|
562
|
+
def run(self, arguments):
|
|
563
|
+
query = arguments.get("query", "")
|
|
564
|
+
limit = arguments.get("limit", 10)
|
|
565
|
+
|
|
566
|
+
if not query:
|
|
567
|
+
return {"error": "Query parameter is required"}
|
|
568
|
+
|
|
569
|
+
return self._search_who_guidelines(query, limit)
|
|
570
|
+
|
|
571
|
+
def _fetch_guideline_description(self, url):
|
|
572
|
+
"""Fetch description from a WHO guideline detail page."""
|
|
573
|
+
try:
|
|
574
|
+
time.sleep(0.5) # Be respectful
|
|
575
|
+
response = self.session.get(url, timeout=15)
|
|
576
|
+
response.raise_for_status()
|
|
577
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
|
578
|
+
|
|
579
|
+
# Try to find overview or description
|
|
580
|
+
overview = soup.find("div", {"class": "overview"}) or soup.find(
|
|
581
|
+
"div", {"class": "description"}
|
|
582
|
+
)
|
|
583
|
+
if overview:
|
|
584
|
+
paragraphs = overview.find_all("p")
|
|
585
|
+
if paragraphs:
|
|
586
|
+
return " ".join([p.get_text().strip() for p in paragraphs[:2]])
|
|
587
|
+
|
|
588
|
+
# Try meta description
|
|
589
|
+
meta_desc = soup.find("meta", {"name": "description"}) or soup.find(
|
|
590
|
+
"meta", {"property": "og:description"}
|
|
591
|
+
)
|
|
592
|
+
if meta_desc and meta_desc.get("content"):
|
|
593
|
+
return meta_desc.get("content")
|
|
594
|
+
|
|
595
|
+
# Try first few paragraphs in main content
|
|
596
|
+
main_content = (
|
|
597
|
+
soup.find("div", {"class": "content"})
|
|
598
|
+
or soup.find("main")
|
|
599
|
+
or soup.find("article")
|
|
600
|
+
)
|
|
601
|
+
if main_content:
|
|
602
|
+
paragraphs = main_content.find_all("p", recursive=True)
|
|
603
|
+
if paragraphs:
|
|
604
|
+
text_parts = []
|
|
605
|
+
for p in paragraphs[:3]:
|
|
606
|
+
text = p.get_text().strip()
|
|
607
|
+
if len(text) > 30: # Skip very short paragraphs
|
|
608
|
+
text_parts.append(text)
|
|
609
|
+
if len(" ".join(text_parts)) > 300: # Limit total length
|
|
610
|
+
break
|
|
611
|
+
if text_parts:
|
|
612
|
+
return " ".join(text_parts)
|
|
613
|
+
|
|
614
|
+
return ""
|
|
615
|
+
except Exception:
|
|
616
|
+
return ""
|
|
617
|
+
|
|
618
|
+
def _search_who_guidelines(self, query, limit):
|
|
619
|
+
"""Search WHO guidelines by scraping their official website."""
|
|
620
|
+
try:
|
|
621
|
+
# Add delay to be respectful
|
|
622
|
+
time.sleep(1)
|
|
623
|
+
|
|
624
|
+
# First, get the guidelines page
|
|
625
|
+
response = self.session.get(self.guidelines_url, timeout=30)
|
|
626
|
+
response.raise_for_status()
|
|
627
|
+
|
|
628
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
|
629
|
+
|
|
630
|
+
# Find all publication links
|
|
631
|
+
all_links = soup.find_all("a", href=True)
|
|
632
|
+
guidelines = []
|
|
633
|
+
|
|
634
|
+
query_lower = query.lower()
|
|
635
|
+
|
|
636
|
+
for link in all_links:
|
|
637
|
+
href = link["href"]
|
|
638
|
+
text = link.get_text().strip()
|
|
639
|
+
|
|
640
|
+
# Filter for actual guideline publications
|
|
641
|
+
if (
|
|
642
|
+
("/publications/i/item/" in href or "/publications/m/item/" in href)
|
|
643
|
+
and text
|
|
644
|
+
and len(text) > 10
|
|
645
|
+
):
|
|
646
|
+
# Check if query matches the title
|
|
647
|
+
if query_lower in text.lower():
|
|
648
|
+
full_url = (
|
|
649
|
+
href if href.startswith("http") else self.base_url + href
|
|
650
|
+
)
|
|
651
|
+
|
|
652
|
+
# Avoid duplicates
|
|
653
|
+
if not any(g["url"] == full_url for g in guidelines):
|
|
654
|
+
# Fetch description from detail page
|
|
655
|
+
description = self._fetch_guideline_description(full_url)
|
|
656
|
+
|
|
657
|
+
guidelines.append(
|
|
658
|
+
{
|
|
659
|
+
"title": text,
|
|
660
|
+
"url": full_url,
|
|
661
|
+
"description": description,
|
|
662
|
+
"source": "WHO",
|
|
663
|
+
"organization": "World Health Organization",
|
|
664
|
+
"is_guideline": True,
|
|
665
|
+
"official": True,
|
|
666
|
+
}
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
if len(guidelines) >= limit:
|
|
670
|
+
break
|
|
671
|
+
|
|
672
|
+
# If no results with strict matching, get all WHO guidelines from page
|
|
673
|
+
if len(guidelines) == 0:
|
|
674
|
+
print(
|
|
675
|
+
f"No exact matches for '{query}', retrieving latest WHO guidelines..."
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
all_guidelines = []
|
|
679
|
+
for link in all_links:
|
|
680
|
+
href = link["href"]
|
|
681
|
+
text = link.get_text().strip()
|
|
682
|
+
|
|
683
|
+
if (
|
|
684
|
+
(
|
|
685
|
+
"/publications/i/item/" in href
|
|
686
|
+
or "/publications/m/item/" in href
|
|
687
|
+
)
|
|
688
|
+
and text
|
|
689
|
+
and len(text) > 10
|
|
690
|
+
):
|
|
691
|
+
full_url = (
|
|
692
|
+
href if href.startswith("http") else self.base_url + href
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
if not any(g["url"] == full_url for g in all_guidelines):
|
|
696
|
+
# Fetch description from detail page
|
|
697
|
+
description = self._fetch_guideline_description(full_url)
|
|
698
|
+
|
|
699
|
+
all_guidelines.append(
|
|
700
|
+
{
|
|
701
|
+
"title": text,
|
|
702
|
+
"url": full_url,
|
|
703
|
+
"description": description,
|
|
704
|
+
"source": "WHO",
|
|
705
|
+
"organization": "World Health Organization",
|
|
706
|
+
"is_guideline": True,
|
|
707
|
+
"official": True,
|
|
708
|
+
}
|
|
709
|
+
)
|
|
710
|
+
|
|
711
|
+
guidelines = all_guidelines[:limit]
|
|
712
|
+
|
|
713
|
+
return guidelines
|
|
714
|
+
|
|
715
|
+
except requests.exceptions.RequestException as e:
|
|
716
|
+
return {
|
|
717
|
+
"error": f"Failed to access WHO guidelines: {str(e)}",
|
|
718
|
+
"source": "WHO",
|
|
719
|
+
}
|
|
720
|
+
except Exception as e:
|
|
721
|
+
return {
|
|
722
|
+
"error": f"Error processing WHO guidelines: {str(e)}",
|
|
723
|
+
"source": "WHO",
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
@register_tool()
|
|
728
|
+
class OpenAlexGuidelinesTool(BaseTool):
|
|
729
|
+
"""
|
|
730
|
+
OpenAlex Guidelines Search Tool.
|
|
731
|
+
Specialized tool for searching clinical practice guidelines using OpenAlex API.
|
|
732
|
+
"""
|
|
733
|
+
|
|
734
|
+
def __init__(self, tool_config):
|
|
735
|
+
super().__init__(tool_config)
|
|
736
|
+
self.base_url = "https://api.openalex.org/works"
|
|
737
|
+
|
|
738
|
+
def run(self, arguments):
|
|
739
|
+
query = arguments.get("query", "")
|
|
740
|
+
limit = arguments.get("limit", 10)
|
|
741
|
+
year_from = arguments.get("year_from", None)
|
|
742
|
+
year_to = arguments.get("year_to", None)
|
|
743
|
+
|
|
744
|
+
if not query:
|
|
745
|
+
return {"error": "Query parameter is required"}
|
|
746
|
+
|
|
747
|
+
return self._search_openalex_guidelines(query, limit, year_from, year_to)
|
|
748
|
+
|
|
749
|
+
def _search_openalex_guidelines(self, query, limit, year_from=None, year_to=None):
|
|
750
|
+
"""Search for clinical guidelines using OpenAlex API."""
|
|
751
|
+
try:
|
|
752
|
+
# Build search query to focus on guidelines
|
|
753
|
+
search_query = f"{query} clinical practice guideline"
|
|
754
|
+
|
|
755
|
+
# Build parameters
|
|
756
|
+
params = {
|
|
757
|
+
"search": search_query,
|
|
758
|
+
"per_page": min(limit, 50),
|
|
759
|
+
"sort": "cited_by_count:desc", # Sort by citations
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
# Add year filters
|
|
763
|
+
filters = []
|
|
764
|
+
if year_from and year_to:
|
|
765
|
+
filters.append(f"publication_year:{year_from}-{year_to}")
|
|
766
|
+
elif year_from:
|
|
767
|
+
filters.append(f"from_publication_date:{year_from}-01-01")
|
|
768
|
+
elif year_to:
|
|
769
|
+
filters.append(f"to_publication_date:{year_to}-12-31")
|
|
770
|
+
|
|
771
|
+
# Filter for articles
|
|
772
|
+
filters.append("type:article")
|
|
773
|
+
|
|
774
|
+
if filters:
|
|
775
|
+
params["filter"] = ",".join(filters)
|
|
776
|
+
|
|
777
|
+
response = requests.get(self.base_url, params=params, timeout=30)
|
|
778
|
+
response.raise_for_status()
|
|
779
|
+
|
|
780
|
+
data = response.json()
|
|
781
|
+
results = data.get("results", [])
|
|
782
|
+
data.get("meta", {})
|
|
783
|
+
|
|
784
|
+
guidelines = []
|
|
785
|
+
for work in results:
|
|
786
|
+
# Extract information
|
|
787
|
+
title = work.get("title", "N/A")
|
|
788
|
+
year = work.get("publication_year", "N/A")
|
|
789
|
+
doi = work.get("doi", "")
|
|
790
|
+
openalex_id = work.get("id", "")
|
|
791
|
+
cited_by = work.get("cited_by_count", 0)
|
|
792
|
+
|
|
793
|
+
# Extract authors
|
|
794
|
+
authors = []
|
|
795
|
+
authorships = work.get("authorships", [])
|
|
796
|
+
for authorship in authorships[:5]:
|
|
797
|
+
author = authorship.get("author", {})
|
|
798
|
+
author_name = author.get("display_name", "")
|
|
799
|
+
if author_name:
|
|
800
|
+
authors.append(author_name)
|
|
801
|
+
|
|
802
|
+
# Extract institutions
|
|
803
|
+
institutions = []
|
|
804
|
+
for authorship in authorships[:3]:
|
|
805
|
+
for inst in authorship.get("institutions", []):
|
|
806
|
+
inst_name = inst.get("display_name", "")
|
|
807
|
+
if inst_name and inst_name not in institutions:
|
|
808
|
+
institutions.append(inst_name)
|
|
809
|
+
|
|
810
|
+
# Extract abstract
|
|
811
|
+
abstract_inverted = work.get("abstract_inverted_index", {})
|
|
812
|
+
abstract = (
|
|
813
|
+
self._reconstruct_abstract(abstract_inverted)
|
|
814
|
+
if abstract_inverted
|
|
815
|
+
else None
|
|
816
|
+
)
|
|
817
|
+
|
|
818
|
+
# Check if it's likely a guideline
|
|
819
|
+
is_guideline = any(
|
|
820
|
+
keyword in title.lower()
|
|
821
|
+
for keyword in [
|
|
822
|
+
"guideline",
|
|
823
|
+
"recommendation",
|
|
824
|
+
"consensus",
|
|
825
|
+
"practice",
|
|
826
|
+
"statement",
|
|
827
|
+
]
|
|
828
|
+
)
|
|
829
|
+
|
|
830
|
+
# Build URL
|
|
831
|
+
url = (
|
|
832
|
+
doi
|
|
833
|
+
if doi and doi.startswith("http")
|
|
834
|
+
else (
|
|
835
|
+
f"https://doi.org/{doi.replace('https://doi.org/', '')}"
|
|
836
|
+
if doi
|
|
837
|
+
else openalex_id
|
|
838
|
+
)
|
|
839
|
+
)
|
|
840
|
+
|
|
841
|
+
guideline = {
|
|
842
|
+
"title": title,
|
|
843
|
+
"authors": authors,
|
|
844
|
+
"institutions": institutions[:3],
|
|
845
|
+
"year": year,
|
|
846
|
+
"doi": doi,
|
|
847
|
+
"url": url,
|
|
848
|
+
"openalex_id": openalex_id,
|
|
849
|
+
"cited_by_count": cited_by,
|
|
850
|
+
"is_guideline": is_guideline,
|
|
851
|
+
"source": "OpenAlex",
|
|
852
|
+
"abstract": (
|
|
853
|
+
abstract[:500] if abstract else None
|
|
854
|
+
), # Limit abstract length
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
guidelines.append(guideline)
|
|
858
|
+
|
|
859
|
+
return guidelines
|
|
860
|
+
|
|
861
|
+
except requests.exceptions.RequestException as e:
|
|
862
|
+
return {
|
|
863
|
+
"error": f"Failed to search OpenAlex: {str(e)}",
|
|
864
|
+
"source": "OpenAlex",
|
|
865
|
+
}
|
|
866
|
+
except Exception as e:
|
|
867
|
+
return {
|
|
868
|
+
"error": f"Error processing OpenAlex response: {str(e)}",
|
|
869
|
+
"source": "OpenAlex",
|
|
870
|
+
}
|
|
871
|
+
|
|
872
|
+
def _reconstruct_abstract(self, abstract_inverted_index):
|
|
873
|
+
"""Reconstruct abstract from inverted index."""
|
|
874
|
+
if not abstract_inverted_index:
|
|
875
|
+
return None
|
|
876
|
+
|
|
877
|
+
try:
|
|
878
|
+
# Create a list to hold words at their positions
|
|
879
|
+
max_position = max(
|
|
880
|
+
max(positions) for positions in abstract_inverted_index.values()
|
|
881
|
+
)
|
|
882
|
+
words = [""] * (max_position + 1)
|
|
883
|
+
|
|
884
|
+
# Place each word at its positions
|
|
885
|
+
for word, positions in abstract_inverted_index.items():
|
|
886
|
+
for pos in positions:
|
|
887
|
+
words[pos] = word
|
|
888
|
+
|
|
889
|
+
# Join words to form abstract
|
|
890
|
+
abstract = " ".join(words).strip()
|
|
891
|
+
return abstract
|
|
892
|
+
|
|
893
|
+
except Exception:
|
|
894
|
+
return None
|
|
895
|
+
|
|
896
|
+
|
|
897
|
+
@register_tool()
|
|
898
|
+
class NICEGuidelineFullTextTool(BaseTool):
|
|
899
|
+
"""
|
|
900
|
+
Fetch full text content from NICE guideline pages.
|
|
901
|
+
Takes a NICE guideline URL and extracts the complete guideline content.
|
|
902
|
+
"""
|
|
903
|
+
|
|
904
|
+
def __init__(self, tool_config):
|
|
905
|
+
super().__init__(tool_config)
|
|
906
|
+
self.base_url = "https://www.nice.org.uk"
|
|
907
|
+
self.session = requests.Session()
|
|
908
|
+
self.session.headers.update(
|
|
909
|
+
{
|
|
910
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
|
911
|
+
}
|
|
912
|
+
)
|
|
913
|
+
|
|
914
|
+
def run(self, arguments):
|
|
915
|
+
url = arguments.get("url", "")
|
|
916
|
+
|
|
917
|
+
if not url:
|
|
918
|
+
return {"error": "URL parameter is required"}
|
|
919
|
+
|
|
920
|
+
# Ensure it's a NICE URL
|
|
921
|
+
if "nice.org.uk" not in url:
|
|
922
|
+
return {"error": "URL must be a NICE guideline URL (nice.org.uk)"}
|
|
923
|
+
|
|
924
|
+
return self._fetch_full_guideline(url)
|
|
925
|
+
|
|
926
|
+
def _fetch_full_guideline(self, url):
|
|
927
|
+
"""Fetch complete guideline content from NICE page."""
|
|
928
|
+
try:
|
|
929
|
+
time.sleep(1) # Be respectful
|
|
930
|
+
response = self.session.get(url, timeout=30)
|
|
931
|
+
response.raise_for_status()
|
|
932
|
+
|
|
933
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
|
934
|
+
|
|
935
|
+
# Extract title
|
|
936
|
+
title_elem = soup.find("h1") or soup.find("title")
|
|
937
|
+
title = title_elem.get_text().strip() if title_elem else "Unknown Title"
|
|
938
|
+
|
|
939
|
+
# Extract guideline metadata
|
|
940
|
+
metadata = {}
|
|
941
|
+
|
|
942
|
+
# Published date
|
|
943
|
+
date_elem = soup.find("time") or soup.find(
|
|
944
|
+
"span", {"class": "published-date"}
|
|
945
|
+
)
|
|
946
|
+
if date_elem:
|
|
947
|
+
metadata["published_date"] = date_elem.get_text().strip()
|
|
948
|
+
|
|
949
|
+
# Guideline code (e.g., NG28)
|
|
950
|
+
code_match = re.search(r"\(([A-Z]{2,3}\d+)\)", title)
|
|
951
|
+
if code_match:
|
|
952
|
+
metadata["guideline_code"] = code_match.group(1)
|
|
953
|
+
|
|
954
|
+
# Extract main content sections
|
|
955
|
+
content_sections = []
|
|
956
|
+
|
|
957
|
+
# Find main content div - NICE uses specific structure
|
|
958
|
+
main_content = (
|
|
959
|
+
soup.find("div", {"class": "content"})
|
|
960
|
+
or soup.find("main")
|
|
961
|
+
or soup.find("article")
|
|
962
|
+
)
|
|
963
|
+
|
|
964
|
+
if main_content:
|
|
965
|
+
# Extract all headings and their content
|
|
966
|
+
all_headings = main_content.find_all(["h1", "h2", "h3", "h4", "h5"])
|
|
967
|
+
|
|
968
|
+
for heading in all_headings:
|
|
969
|
+
heading_text = heading.get_text().strip()
|
|
970
|
+
|
|
971
|
+
# Find content between this heading and the next
|
|
972
|
+
content_parts = []
|
|
973
|
+
current = heading.find_next_sibling()
|
|
974
|
+
|
|
975
|
+
while current and current.name not in [
|
|
976
|
+
"h1",
|
|
977
|
+
"h2",
|
|
978
|
+
"h3",
|
|
979
|
+
"h4",
|
|
980
|
+
"h5",
|
|
981
|
+
]:
|
|
982
|
+
if current.name == "p":
|
|
983
|
+
text = current.get_text().strip()
|
|
984
|
+
if text:
|
|
985
|
+
content_parts.append(text)
|
|
986
|
+
elif current.name in ["ul", "ol"]:
|
|
987
|
+
items = current.find_all("li")
|
|
988
|
+
for li in items:
|
|
989
|
+
content_parts.append(f" • {li.get_text().strip()}")
|
|
990
|
+
elif current.name == "div":
|
|
991
|
+
# Check if div has paragraphs
|
|
992
|
+
paras = current.find_all("p", recursive=False)
|
|
993
|
+
for p in paras:
|
|
994
|
+
text = p.get_text().strip()
|
|
995
|
+
if text:
|
|
996
|
+
content_parts.append(text)
|
|
997
|
+
|
|
998
|
+
current = current.find_next_sibling()
|
|
999
|
+
|
|
1000
|
+
if content_parts:
|
|
1001
|
+
content_sections.append(
|
|
1002
|
+
{
|
|
1003
|
+
"heading": heading_text,
|
|
1004
|
+
"content": "\n\n".join(content_parts),
|
|
1005
|
+
}
|
|
1006
|
+
)
|
|
1007
|
+
|
|
1008
|
+
# If no sections found with headings, extract all paragraphs
|
|
1009
|
+
if not content_sections:
|
|
1010
|
+
all_paragraphs = main_content.find_all("p")
|
|
1011
|
+
all_text = "\n\n".join(
|
|
1012
|
+
[
|
|
1013
|
+
p.get_text().strip()
|
|
1014
|
+
for p in all_paragraphs
|
|
1015
|
+
if p.get_text().strip()
|
|
1016
|
+
]
|
|
1017
|
+
)
|
|
1018
|
+
if all_text:
|
|
1019
|
+
content_sections.append(
|
|
1020
|
+
{"heading": "Content", "content": all_text}
|
|
1021
|
+
)
|
|
1022
|
+
|
|
1023
|
+
# Compile full text
|
|
1024
|
+
full_text_parts = []
|
|
1025
|
+
for section in content_sections:
|
|
1026
|
+
if section["heading"]:
|
|
1027
|
+
full_text_parts.append(f"## {section['heading']}")
|
|
1028
|
+
full_text_parts.append(section["content"])
|
|
1029
|
+
|
|
1030
|
+
full_text = "\n\n".join(full_text_parts)
|
|
1031
|
+
|
|
1032
|
+
# Extract recommendations specifically
|
|
1033
|
+
recommendations = []
|
|
1034
|
+
rec_sections = soup.find_all(
|
|
1035
|
+
["div", "section"], class_=re.compile(r"recommendation")
|
|
1036
|
+
)
|
|
1037
|
+
for rec in rec_sections[:20]: # Limit to first 20 recommendations
|
|
1038
|
+
rec_text = rec.get_text().strip()
|
|
1039
|
+
if rec_text and len(rec_text) > 20:
|
|
1040
|
+
recommendations.append(rec_text)
|
|
1041
|
+
|
|
1042
|
+
return {
|
|
1043
|
+
"url": url,
|
|
1044
|
+
"title": title,
|
|
1045
|
+
"metadata": metadata,
|
|
1046
|
+
"full_text": full_text,
|
|
1047
|
+
"full_text_length": len(full_text),
|
|
1048
|
+
"sections_count": len(content_sections),
|
|
1049
|
+
"recommendations": recommendations[:20] if recommendations else None,
|
|
1050
|
+
"recommendations_count": len(recommendations) if recommendations else 0,
|
|
1051
|
+
"source": "NICE",
|
|
1052
|
+
"content_type": "full_guideline",
|
|
1053
|
+
"success": len(full_text) > 500,
|
|
1054
|
+
}
|
|
1055
|
+
|
|
1056
|
+
except requests.exceptions.RequestException as e:
|
|
1057
|
+
return {"error": f"Failed to fetch NICE guideline: {str(e)}", "url": url}
|
|
1058
|
+
except Exception as e:
|
|
1059
|
+
return {"error": f"Error parsing NICE guideline: {str(e)}", "url": url}
|
|
1060
|
+
|
|
1061
|
+
|
|
1062
|
+
@register_tool()
|
|
1063
|
+
class WHOGuidelineFullTextTool(BaseTool):
|
|
1064
|
+
"""
|
|
1065
|
+
Fetch full text content from WHO guideline pages.
|
|
1066
|
+
Takes a WHO publication URL and extracts content or PDF download link.
|
|
1067
|
+
"""
|
|
1068
|
+
|
|
1069
|
+
def __init__(self, tool_config):
|
|
1070
|
+
super().__init__(tool_config)
|
|
1071
|
+
self.base_url = "https://www.who.int"
|
|
1072
|
+
self.session = requests.Session()
|
|
1073
|
+
self.session.headers.update(
|
|
1074
|
+
{
|
|
1075
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
|
1076
|
+
}
|
|
1077
|
+
)
|
|
1078
|
+
|
|
1079
|
+
def run(self, arguments):
|
|
1080
|
+
url = arguments.get("url", "")
|
|
1081
|
+
|
|
1082
|
+
if not url:
|
|
1083
|
+
return {"error": "URL parameter is required"}
|
|
1084
|
+
|
|
1085
|
+
# Ensure it's a WHO URL
|
|
1086
|
+
if "who.int" not in url:
|
|
1087
|
+
return {"error": "URL must be a WHO publication URL (who.int)"}
|
|
1088
|
+
|
|
1089
|
+
return self._fetch_who_guideline(url)
|
|
1090
|
+
|
|
1091
|
+
def _fetch_who_guideline(self, url):
|
|
1092
|
+
"""Fetch WHO guideline content and PDF link."""
|
|
1093
|
+
try:
|
|
1094
|
+
time.sleep(1) # Be respectful
|
|
1095
|
+
response = self.session.get(url, timeout=30)
|
|
1096
|
+
response.raise_for_status()
|
|
1097
|
+
|
|
1098
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
|
1099
|
+
|
|
1100
|
+
# Extract title
|
|
1101
|
+
title_elem = soup.find("h1") or soup.find("title")
|
|
1102
|
+
title = title_elem.get_text().strip() if title_elem else "Unknown Title"
|
|
1103
|
+
|
|
1104
|
+
# Extract metadata
|
|
1105
|
+
metadata = {}
|
|
1106
|
+
|
|
1107
|
+
# Publication date
|
|
1108
|
+
date_elem = soup.find("time") or soup.find(
|
|
1109
|
+
"span", class_=re.compile(r"date")
|
|
1110
|
+
)
|
|
1111
|
+
if date_elem:
|
|
1112
|
+
metadata["published_date"] = date_elem.get_text().strip()
|
|
1113
|
+
|
|
1114
|
+
# ISBN
|
|
1115
|
+
isbn_elem = soup.find(text=re.compile(r"ISBN"))
|
|
1116
|
+
if isbn_elem:
|
|
1117
|
+
isbn_match = re.search(r"ISBN[:\s]*([\d\-]+)", isbn_elem)
|
|
1118
|
+
if isbn_match:
|
|
1119
|
+
metadata["isbn"] = isbn_match.group(1)
|
|
1120
|
+
|
|
1121
|
+
# Find PDF download link
|
|
1122
|
+
pdf_link = None
|
|
1123
|
+
pdf_links = soup.find_all("a", href=re.compile(r"\.pdf$", re.I))
|
|
1124
|
+
|
|
1125
|
+
for link in pdf_links:
|
|
1126
|
+
href = link.get("href", "")
|
|
1127
|
+
if href:
|
|
1128
|
+
# Make absolute URL
|
|
1129
|
+
if href.startswith("http"):
|
|
1130
|
+
pdf_link = href
|
|
1131
|
+
elif href.startswith("//"):
|
|
1132
|
+
pdf_link = "https:" + href
|
|
1133
|
+
elif href.startswith("/"):
|
|
1134
|
+
pdf_link = self.base_url + href
|
|
1135
|
+
else:
|
|
1136
|
+
pdf_link = self.base_url + "/" + href
|
|
1137
|
+
|
|
1138
|
+
# Prefer full document over excerpts
|
|
1139
|
+
link_text = link.get_text().lower()
|
|
1140
|
+
if "full" in link_text or "complete" in link_text:
|
|
1141
|
+
break
|
|
1142
|
+
|
|
1143
|
+
# Extract overview/description
|
|
1144
|
+
overview = ""
|
|
1145
|
+
overview_section = soup.find(
|
|
1146
|
+
"div", class_=re.compile(r"overview|description|summary")
|
|
1147
|
+
) or soup.find(
|
|
1148
|
+
"section", class_=re.compile(r"overview|description|summary")
|
|
1149
|
+
)
|
|
1150
|
+
|
|
1151
|
+
if overview_section:
|
|
1152
|
+
paragraphs = overview_section.find_all("p")
|
|
1153
|
+
overview = "\n\n".join(
|
|
1154
|
+
[p.get_text().strip() for p in paragraphs if p.get_text().strip()]
|
|
1155
|
+
)
|
|
1156
|
+
|
|
1157
|
+
# Extract key facts/highlights
|
|
1158
|
+
key_facts = []
|
|
1159
|
+
facts_section = soup.find(
|
|
1160
|
+
["div", "section"], class_=re.compile(r"key.*facts|highlights")
|
|
1161
|
+
)
|
|
1162
|
+
if facts_section:
|
|
1163
|
+
items = facts_section.find_all("li")
|
|
1164
|
+
key_facts = [
|
|
1165
|
+
li.get_text().strip() for li in items if li.get_text().strip()
|
|
1166
|
+
]
|
|
1167
|
+
|
|
1168
|
+
# Try to extract main content
|
|
1169
|
+
main_content = ""
|
|
1170
|
+
content_div = (
|
|
1171
|
+
soup.find("div", {"class": "content"})
|
|
1172
|
+
or soup.find("main")
|
|
1173
|
+
or soup.find("article")
|
|
1174
|
+
)
|
|
1175
|
+
|
|
1176
|
+
if content_div:
|
|
1177
|
+
# Get all paragraphs
|
|
1178
|
+
paragraphs = content_div.find_all("p")
|
|
1179
|
+
content_parts = []
|
|
1180
|
+
for p in paragraphs[:50]: # Limit to avoid too much content
|
|
1181
|
+
text = p.get_text().strip()
|
|
1182
|
+
if len(text) > 30: # Skip very short paragraphs
|
|
1183
|
+
content_parts.append(text)
|
|
1184
|
+
|
|
1185
|
+
main_content = "\n\n".join(content_parts)
|
|
1186
|
+
|
|
1187
|
+
return {
|
|
1188
|
+
"url": url,
|
|
1189
|
+
"title": title,
|
|
1190
|
+
"metadata": metadata,
|
|
1191
|
+
"overview": overview,
|
|
1192
|
+
"main_content": main_content,
|
|
1193
|
+
"content_length": len(main_content),
|
|
1194
|
+
"key_facts": key_facts if key_facts else None,
|
|
1195
|
+
"pdf_download_url": pdf_link,
|
|
1196
|
+
"has_pdf": pdf_link is not None,
|
|
1197
|
+
"source": "WHO",
|
|
1198
|
+
"content_type": "guideline_page",
|
|
1199
|
+
"success": len(overview) > 100 or pdf_link is not None,
|
|
1200
|
+
"note": (
|
|
1201
|
+
"Full text available as PDF download"
|
|
1202
|
+
if pdf_link
|
|
1203
|
+
else "Limited web content available"
|
|
1204
|
+
),
|
|
1205
|
+
}
|
|
1206
|
+
|
|
1207
|
+
except requests.exceptions.RequestException as e:
|
|
1208
|
+
return {"error": f"Failed to fetch WHO guideline: {str(e)}", "url": url}
|
|
1209
|
+
except Exception as e:
|
|
1210
|
+
return {"error": f"Error parsing WHO guideline: {str(e)}", "url": url}
|