aiagents4pharma 1.46.3__py3-none-any.whl → 1.46.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,10 +11,6 @@ If your machine has NVIDIA GPU(s), please install the following this:
11
11
 
12
12
  _This agent is available on Docker Hub._
13
13
 
14
- **Prerequisites**
15
-
16
- - [Milvus](https://milvus.io) (for a vector database)
17
-
18
14
  ---
19
15
 
20
16
  #### 1. Download files
@@ -61,7 +57,7 @@ MILVUS_HOST=localhost
61
57
  MILVUS_PORT=19530
62
58
  MILVUS_USER=root
63
59
  MILVUS_PASSWORD=Milvus
64
- MILVUS_DATABASE=your_database_name_here
60
+ MILVUS_DATABASE=t2kg_primekg
65
61
 
66
62
  # Specify the data directory for multimodal data to your own data directory
67
63
  # DATA_DIR=/your_absolute_path_to_your_data_dir/
@@ -1,3 +1,3 @@
1
1
  _target_: talk2knowledgegraphs.utils.ols_terms
2
- base_url: https://www.ebi.ac.uk/ols4/api/terms/
2
+ base_url: https://www.ebi.ac.uk/ols/api/terms
3
3
  timeout: 10
@@ -4,8 +4,8 @@ Class for loading BioBridgePrimeKG dataset.
4
4
 
5
5
  import json
6
6
  import os
7
- import pickle
8
7
 
8
+ import joblib
9
9
  import numpy as np
10
10
  import pandas as pd
11
11
  import requests
@@ -161,8 +161,7 @@ class BioBridgePrimeKG(Dataset):
161
161
  processed_file_path = os.path.join(self.local_dir, "embeddings", "embedding_dict.pkl")
162
162
  if os.path.exists(processed_file_path):
163
163
  # Load the embeddings from the local directory
164
- with open(processed_file_path, "rb") as f:
165
- emb_dict_all = pickle.load(f)
164
+ emb_dict_all = joblib.load(processed_file_path)
166
165
  else:
167
166
  # Download the embeddings from the BioBridge repo and further process them
168
167
  # List of embedding source files
@@ -183,16 +182,14 @@ class BioBridgePrimeKG(Dataset):
183
182
  # Unified embeddings
184
183
  emb_dict_all = {}
185
184
  for file in file_list:
186
- with open(os.path.join(self.local_dir, "embeddings", file), "rb") as f:
187
- emb = pickle.load(f)
185
+ emb = joblib.load(os.path.join(self.local_dir, "embeddings", file))
188
186
  emb_ar = emb["embedding"]
189
187
  if not isinstance(emb_ar, list):
190
188
  emb_ar = emb_ar.tolist()
191
189
  emb_dict_all.update(dict(zip(emb["node_index"], emb_ar, strict=False)))
192
190
 
193
- # Store embeddings
194
- with open(processed_file_path, "wb") as f:
195
- pickle.dump(emb_dict_all, f)
191
+ # Store embeddings using secure joblib
192
+ joblib.dump(emb_dict_all, processed_file_path)
196
193
 
197
194
  return emb_dict_all
198
195
 
@@ -3,10 +3,10 @@ Class for loading StarkQAPrimeKG dataset.
3
3
  """
4
4
 
5
5
  import os
6
- import pickle
7
6
  import shutil
8
7
 
9
8
  import gdown
9
+ import joblib
10
10
  import numpy as np
11
11
  import pandas as pd
12
12
  import torch
@@ -110,8 +110,9 @@ class StarkQAPrimeKG(Dataset):
110
110
  )
111
111
 
112
112
  # Load the node info of PrimeKG preprocessed for StarkQA
113
- with open(os.path.join(self.local_dir, "skb/prime/processed/node_info.pkl"), "rb") as f:
114
- starkqa_node_info = pickle.load(f)
113
+ starkqa_node_info = joblib.load(
114
+ os.path.join(self.local_dir, "skb/prime/processed/node_info.pkl")
115
+ )
115
116
 
116
117
  return starkqa, starkqa_split_idx, starkqa_node_info
117
118
 
@@ -22,12 +22,6 @@ If your machine has NVIDIA GPU(s), please install the following this:
22
22
 
23
23
  _This agent is available on Docker Hub._
24
24
 
25
- **Prerequisites**
26
-
27
- - If your machine has NVIDIA GPU(s), please install [nvidia-container-toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/1.17.8/install-guide.html) (required for GPU support with Docker; enables containers to access NVIDIA GPUs for accelerated computing). After installing `nvidia-container-toolkit`, please restart Docker to ensure GPU support is enabled.
28
-
29
- - [Milvus](https://milvus.io) (for a vector database)
30
-
31
25
  ---
32
26
 
33
27
  #### 1. Download files
@@ -74,7 +68,7 @@ MILVUS_HOST=localhost
74
68
  MILVUS_PORT=19530
75
69
  MILVUS_USER=root
76
70
  MILVUS_PASSWORD=Milvus
77
- MILVUS_DATABASE=your_database_name_here
71
+ MILVUS_DATABASE=t2kg_primekg
78
72
 
79
73
  # Specify the data directory for multimodal data to your own data directory
80
74
  # DATA_DIR=/your_absolute_path_to_your_data_dir/
@@ -107,8 +107,8 @@ class SystemDetector:
107
107
  sys.exit(1)
108
108
  else:
109
109
  logger.info("Successfully installed: %s", package_cmd.split()[-1])
110
- except subprocess.CalledProcessError as e:
111
- logger.error("Failed to install %s: %s", package_cmd, e.stderr)
110
+ except subprocess.CalledProcessError:
111
+ logger.error("Failed to install package: %s", package_cmd.split()[-1])
112
112
  if "cudf" in package_cmd:
113
113
  logger.warning("GPU package installation failed, falling back to CPU mode")
114
114
  self.use_gpu = False
@@ -198,12 +198,12 @@ class DynamicDataLoader:
198
198
  self.cudf = cudf
199
199
  self.cp = cp
200
200
  logger.info("Successfully imported GPU libraries (cudf, cupy)")
201
- except ImportError as e:
201
+ except ImportError:
202
202
  logger.error(
203
203
  "[DATA LOADER] cudf or cupy not found. "
204
204
  "Please ensure they are installed correctly."
205
205
  )
206
- logger.error("Import error: %s", str(e))
206
+ logger.error("Import error occurred - GPU libraries not available")
207
207
  # Match original script's exit behavior for critical GPU import failure
208
208
  if not os.getenv("FORCE_CPU", "false").lower() == "true":
209
209
  logger.error(
@@ -795,11 +795,9 @@ class DynamicDataLoader:
795
795
  collection = self.pymilvus_modules["Collection"](name=coll)
796
796
  logger.info(" %s: %d entities", coll, collection.num_entities)
797
797
 
798
- except Exception as e:
799
- logger.error("Error during data loading: %s", str(e))
800
- import traceback
801
-
802
- logger.error("Full traceback: %s", traceback.format_exc())
798
+ except Exception:
799
+ logger.error("Error occurred during data loading")
800
+ logger.debug("Detailed error information available in debug mode")
803
801
  raise
804
802
 
805
803
 
@@ -832,8 +830,10 @@ def main():
832
830
  logger.info("Configuration:")
833
831
  for key, value in config.items():
834
832
  # Don't log sensitive information
835
- if "password" in key.lower():
836
- logger.info(" %s: %s", key, "*" * len(str(value)))
833
+ if any(
834
+ sensitive in key.lower() for sensitive in ["password", "user", "token", "key", "secret"]
835
+ ):
836
+ logger.info(" %s: %s", key, "*" * min(8, len(str(value))))
837
837
  else:
838
838
  logger.info(" %s: %s", key, value)
839
839
 
@@ -866,11 +866,9 @@ def main():
866
866
  except KeyboardInterrupt:
867
867
  logger.info("Data loading interrupted by user")
868
868
  sys.exit(1)
869
- except Exception as e:
870
- logger.error("Fatal error during data loading: %s", str(e))
871
- import traceback
872
-
873
- logger.error("Full traceback: %s", traceback.format_exc())
869
+ except Exception:
870
+ logger.error("Fatal error occurred during data loading")
871
+ logger.debug("Detailed error information available in debug mode")
874
872
  sys.exit(1)
875
873
 
876
874
 
@@ -48,7 +48,7 @@ def test_enrich_documents(enrich_obj):
48
48
  assert UBERON_DESC in descriptions[2]
49
49
  assert HP_DESC in descriptions[3]
50
50
  assert MONDO_DESC in descriptions[4]
51
- assert descriptions[5] is None
51
+ assert descriptions[5] == ""
52
52
 
53
53
 
54
54
  def test_enrich_documents_with_rag(enrich_obj):
@@ -67,4 +67,4 @@ def test_enrich_documents_with_rag(enrich_obj):
67
67
  assert UBERON_DESC in descriptions[2]
68
68
  assert HP_DESC in descriptions[3]
69
69
  assert MONDO_DESC in descriptions[4]
70
- assert descriptions[5] is None
70
+ assert descriptions[5] == ""
@@ -3,10 +3,10 @@ Tool for performing multimodal subgraph extraction.
3
3
  """
4
4
 
5
5
  import logging
6
- import pickle
7
6
  from typing import Annotated
8
7
 
9
8
  import hydra
9
+ import joblib
10
10
  import networkx as nx
11
11
  import numpy as np
12
12
  import pandas as pd
@@ -319,11 +319,9 @@ class MultimodalSubgraphExtractionTool(BaseTool):
319
319
  initial_graph["source"] = state["dic_source_graph"][-1] # The last source graph as of now
320
320
  # logger.log(logging.INFO, "Source graph: %s", source_graph)
321
321
 
322
- # Load the knowledge graph
323
- with open(initial_graph["source"]["kg_pyg_path"], "rb") as f:
324
- initial_graph["pyg"] = pickle.load(f)
325
- with open(initial_graph["source"]["kg_text_path"], "rb") as f:
326
- initial_graph["text"] = pickle.load(f)
322
+ # Load the knowledge graph using secure joblib
323
+ initial_graph["pyg"] = joblib.load(initial_graph["source"]["kg_pyg_path"])
324
+ initial_graph["text"] = joblib.load(initial_graph["source"]["kg_text_path"])
327
325
 
328
326
  # Prepare the query embeddings and modalities
329
327
  query_df = self._prepare_query_modalities(
@@ -3,10 +3,10 @@ Tool for performing subgraph extraction.
3
3
  """
4
4
 
5
5
  import logging
6
- import pickle
7
6
  from typing import Annotated
8
7
 
9
8
  import hydra
9
+ import joblib
10
10
  import networkx as nx
11
11
  import numpy as np
12
12
  import pandas as pd
@@ -220,11 +220,9 @@ class SubgraphExtractionTool(BaseTool):
220
220
  initial_graph["source"] = state["dic_source_graph"][-1] # The last source graph as of now
221
221
  # logger.log(logging.INFO, "Source graph: %s", source_graph)
222
222
 
223
- # Load the knowledge graph
224
- with open(initial_graph["source"]["kg_pyg_path"], "rb") as f:
225
- initial_graph["pyg"] = pickle.load(f)
226
- with open(initial_graph["source"]["kg_text_path"], "rb") as f:
227
- initial_graph["text"] = pickle.load(f)
223
+ # Load the knowledge graph using secure joblib
224
+ initial_graph["pyg"] = joblib.load(initial_graph["source"]["kg_pyg_path"])
225
+ initial_graph["text"] = joblib.load(initial_graph["source"]["kg_text_path"])
228
226
 
229
227
  # Prepare prompt construction along with a list of endotypes
230
228
  if len(state["uploaded_files"]) != 0 and "endotype" in [
@@ -54,7 +54,7 @@ class EnrichmentWithOLS(Enrichments):
54
54
  response_body = json.loads(r.text)
55
55
  # if the response body is empty
56
56
  if "_embedded" not in response_body:
57
- descriptions.append(None)
57
+ descriptions.append("")
58
58
  continue
59
59
  # Add the description to the list
60
60
  description = []
@@ -65,13 +65,15 @@ class EnrichmentWithOLS(Enrichments):
65
65
  description += term.get("synonyms", [])
66
66
  # Add the label to the description
67
67
  # Label is not provided as list, so we need to convert it to a list
68
- description += [term.get("label", [])]
68
+ label = term.get("label", "")
69
+ if label:
70
+ description += [label]
69
71
  # Make unique the description
70
72
  description = list(set(description))
71
73
  # Join the description with new line
72
74
  description = "\n".join(description)
73
- # Add the description to the list
74
- descriptions.append(description)
75
+ # Ensure we always return a string, even if empty
76
+ descriptions.append(description if description else "")
75
77
  return descriptions
76
78
 
77
79
  def enrich_documents_with_rag(self, texts, docs):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aiagents4pharma
3
- Version: 1.46.3
3
+ Version: 1.46.5
4
4
  Summary: AI Agents for drug discovery, drug development, and other pharmaceutical R&D.
5
5
  License-File: LICENSE
6
6
  Classifier: License :: OSI Approved :: MIT License
@@ -3,7 +3,7 @@ aiagents4pharma/talk2aiagents4pharma/.dockerignore,sha256=-hAM7RzkGbjDeU411-kXOm
3
3
  aiagents4pharma/talk2aiagents4pharma/Dockerfile,sha256=J2QvwifcCxtz9EzPaq99zhSygHV9ADsB5ZQ_cmiwATI,3533
4
4
  aiagents4pharma/talk2aiagents4pharma/README.md,sha256=0eGxj7jxi_LrCvX-4I4KrQv-7T2ivo3pqLslG7suaCk,74
5
5
  aiagents4pharma/talk2aiagents4pharma/__init__.py,sha256=gjVTAhBHKPEFBbv_2T-MWuDdwHhAKfWIo-lQSrcsLNE,97
6
- aiagents4pharma/talk2aiagents4pharma/install.md,sha256=YYzY1vIEA6RrVUvNuv-h_YTarBp4khTWfFTzRQ3iKSA,4286
6
+ aiagents4pharma/talk2aiagents4pharma/install.md,sha256=5oyy9P8fd03x3f_jOFZTGg2G1AzxMIOrH2vVd8ZS4Iw,4201
7
7
  aiagents4pharma/talk2aiagents4pharma/agents/__init__.py,sha256=NpNI6Vr9XIr5m0ZaO32c6NEUTDOZvJUqd8gKzNZhcSw,130
8
8
  aiagents4pharma/talk2aiagents4pharma/agents/main_agent.py,sha256=1nRIhj3huv9eVT7v3nhSvx1dDOEEaiApPi7AMRDviXE,2750
9
9
  aiagents4pharma/talk2aiagents4pharma/configs/__init__.py,sha256=hwLAR-uhZGEbD5R7mp4kiltSvxuKkG6-_ac17sF-4xU,68
@@ -96,8 +96,8 @@ aiagents4pharma/talk2knowledgegraphs/Dockerfile,sha256=qGy6I4oBvQonpDEANQaCW-5JM
96
96
  aiagents4pharma/talk2knowledgegraphs/README.md,sha256=0eGxj7jxi_LrCvX-4I4KrQv-7T2ivo3pqLslG7suaCk,74
97
97
  aiagents4pharma/talk2knowledgegraphs/__init__.py,sha256=ZztaRzRlovSXtVX3i9Rvf84ivIjPn8RMPiYRkbkEJ0E,114
98
98
  aiagents4pharma/talk2knowledgegraphs/entrypoint.sh,sha256=EK_jGau1VuW1uTmFWZcKhLMK9VanC5l3q9axF4ZYgmI,5758
99
- aiagents4pharma/talk2knowledgegraphs/install.md,sha256=6eZe9czZ8nRrsOHbRmn9WaAk9xgl2kxDZac4Exs1WqU,6022
100
- aiagents4pharma/talk2knowledgegraphs/milvus_data_dump.py,sha256=nLChVpn9KwJ2F5zgtb51zG2s81vOzc6o5Ut-xhkjpJI,35550
99
+ aiagents4pharma/talk2knowledgegraphs/install.md,sha256=1rCv2e6ywb-kZOxtqFJ25qpWHl2MCa9bW6nHYHfxJMI,5555
100
+ aiagents4pharma/talk2knowledgegraphs/milvus_data_dump.py,sha256=D5BB700QshEhY6paByXNLyMjOYyO4Csm4ODgbeeWfmc,35616
101
101
  aiagents4pharma/talk2knowledgegraphs/agents/__init__.py,sha256=ugUvVYEdjbZ3y_dogfF5hpQ3lFPFrAvLSydlcpbkGo0,93
102
102
  aiagents4pharma/talk2knowledgegraphs/agents/t2kg_agent.py,sha256=GDeSjJNhAqQWagZOxAWUKqDhzUohHViSsu444W9SzRQ,3240
103
103
  aiagents4pharma/talk2knowledgegraphs/configs/__init__.py,sha256=H-yhTbJ_RXBLe3XSto5x6FmVrgbi7y1WKEfiwmKzLAk,87
@@ -118,15 +118,15 @@ aiagents4pharma/talk2knowledgegraphs/configs/tools/subgraph_summarization/__init
118
118
  aiagents4pharma/talk2knowledgegraphs/configs/tools/subgraph_summarization/default.yaml,sha256=kiHerHXsGuohu_FLq58nXkY3bwlSFMw3kGqtQ68LMfA,562
119
119
  aiagents4pharma/talk2knowledgegraphs/configs/utils/database/milvus/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
120
120
  aiagents4pharma/talk2knowledgegraphs/configs/utils/database/milvus/default.yaml,sha256=4J6cIguIy4Z794UM9EHwzwzwrwWv7qvsKTN8c5QucLc,2394
121
- aiagents4pharma/talk2knowledgegraphs/configs/utils/enrichments/ols_terms/default.yaml,sha256=7Fu03IkClglxbAzwTUdy-z-Tv6MZuo4oIdBYpQ7TPjI,107
121
+ aiagents4pharma/talk2knowledgegraphs/configs/utils/enrichments/ols_terms/default.yaml,sha256=PKBc8BmnxVW4mosHFSyp05Ksw9zZ1_K8SaMQZXPC8rI,105
122
122
  aiagents4pharma/talk2knowledgegraphs/configs/utils/enrichments/reactome_pathways/default.yaml,sha256=gyYLJREMO1jDex8-0CcCPfsuD5KguY6fKDmDCL_S4Uc,125
123
123
  aiagents4pharma/talk2knowledgegraphs/configs/utils/enrichments/uniprot_proteins/default.yaml,sha256=JlmawYim5ECgAKCZta3UrPD2YIjkD2d5TvLmR5V6wXM,184
124
124
  aiagents4pharma/talk2knowledgegraphs/configs/utils/pubchem_utils/default.yaml,sha256=J9261xMxU8Bq0x9J5YNz21gnxXpzUBbBgCQc4OIkn-w,384
125
125
  aiagents4pharma/talk2knowledgegraphs/datasets/__init__.py,sha256=jNa-ehuOVOthQuGHigYuQN4moUEQKfn5WsdnrX0WQVc,135
126
- aiagents4pharma/talk2knowledgegraphs/datasets/biobridge_primekg.py,sha256=w3H_cwSpZyZUhiFUcg9tbTSSmnb3ttLU3ho7GgkTyyo,23739
126
+ aiagents4pharma/talk2knowledgegraphs/datasets/biobridge_primekg.py,sha256=M6NtpMTmSahqKzk0WYG9JEi7Yu7YORqUKgtsA21W_4M,23632
127
127
  aiagents4pharma/talk2knowledgegraphs/datasets/dataset.py,sha256=ls0e15uudIbb4zwHMHxjEovmH145RJ_hPeZni89KSnM,411
128
128
  aiagents4pharma/talk2knowledgegraphs/datasets/primekg.py,sha256=1WHlQCAyKjpBiX3JnIsSohUZe8Yi5pY-VDP4tCugxkg,7709
129
- aiagents4pharma/talk2knowledgegraphs/datasets/starkqa_primekg.py,sha256=rM1FnlOfQA6Vn7iPuCEW-aP70zSMsEP99Zm6VeNUXL8,7498
129
+ aiagents4pharma/talk2knowledgegraphs/datasets/starkqa_primekg.py,sha256=JtP-jNIFDA3xQbzy5DkK2OHin5NnbWUGa_EJ_1OH6vE,7483
130
130
  aiagents4pharma/talk2knowledgegraphs/docker-compose/cpu/.env.example,sha256=twsuiEN-FczXfka40_bVoSQXaMDR_3J3ESKCqU4qSkg,585
131
131
  aiagents4pharma/talk2knowledgegraphs/docker-compose/cpu/docker-compose.yml,sha256=7BNUIkwP0cAUH1jznun5-RgsIAZisegLFGiiUZG9F1Y,2631
132
132
  aiagents4pharma/talk2knowledgegraphs/docker-compose/gpu/.env.example,sha256=twsuiEN-FczXfka40_bVoSQXaMDR_3J3ESKCqU4qSkg,585
@@ -152,7 +152,7 @@ aiagents4pharma/talk2knowledgegraphs/tests/test_utils_embeddings_ollama.py,sha25
152
152
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_embeddings_sentencetransformer.py,sha256=Xkuf2UFGCXldj1zcsh6kqfQYLDf5i0B6KP3KcmNLSzQ,1452
153
153
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_enrichments.py,sha256=JoSRfz6vXLnLY6INxYm731Pu5pGdQy4r3KYlIwV8cog,1428
154
154
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_ollama.py,sha256=dJS9wXE1pmgqNSnudYkSgU_TgdFBDNBdIhnO24a-1vE,3021
155
- aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_ols.py,sha256=YTWDJfpmkKhAduAv8sncMMhmlIYnLeYX6yu3hBu7uNI,2281
155
+ aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_ols.py,sha256=ysy6W-UfUFiXFHoYptpySmk3dO2lv7mviUoFOG3j8CE,2277
156
156
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_pubchem.py,sha256=USHsjpfHxAktxjc5Q8INZ7qQJDWn_XFQYsS6mpeTLv0,1704
157
157
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_reactome.py,sha256=eseSw3flQLQAmXLS2R-bobN3QGEs_i6iId8l4bLEitE,1658
158
158
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_uniprot.py,sha256=8JqptVypftx4IrObzxGPDjQ1C7I24f3zUUx69D8svAo,1615
@@ -163,8 +163,8 @@ aiagents4pharma/talk2knowledgegraphs/tools/__init__.py,sha256=unrqqDUAmfTpgiJSV6
163
163
  aiagents4pharma/talk2knowledgegraphs/tools/graphrag_reasoning.py,sha256=cCPBH1tKs9MjX1q9v6BXi-dInz_gxKwMyIVA-XdKocg,5251
164
164
  aiagents4pharma/talk2knowledgegraphs/tools/load_arguments.py,sha256=zhmsRp-8vjB5rRekqTA07d3yb-42HWqng9dDMkvK6hM,623
165
165
  aiagents4pharma/talk2knowledgegraphs/tools/milvus_multimodal_subgraph_extraction.py,sha256=chyHEOlbzLwPd8IXwgA4B2YMWbJAyBikBkf2-x_XH1E,39730
166
- aiagents4pharma/talk2knowledgegraphs/tools/multimodal_subgraph_extraction.py,sha256=cAsRkBklFxitBDNvhOIqmqd0ZTjtRYKsgb-rySC0PTk,14774
167
- aiagents4pharma/talk2knowledgegraphs/tools/subgraph_extraction.py,sha256=DSUucfzKf3LoOo1v9snp6-Zk1vTGB9jeH-hshatd0PY,11161
166
+ aiagents4pharma/talk2knowledgegraphs/tools/multimodal_subgraph_extraction.py,sha256=GC950xmnd5ZKq9Eru-wZl4LKZ_xdPbOPTscd2t-rE2s,14718
167
+ aiagents4pharma/talk2knowledgegraphs/tools/subgraph_extraction.py,sha256=JW14u3w4m-2GtQxFLgyKpT0GCUPMgojE2aMJZEmUDPE,11107
168
168
  aiagents4pharma/talk2knowledgegraphs/tools/subgraph_summarization.py,sha256=mXuKTahLXXFYFMS-0HkmiP7o6MSLjE_REEEsxPwCF7c,4372
169
169
  aiagents4pharma/talk2knowledgegraphs/utils/__init__.py,sha256=OwsMjDLRsVdHm6jS_oKbUP9tUKOwGt2yOjz5EgWCv_M,144
170
170
  aiagents4pharma/talk2knowledgegraphs/utils/kg_utils.py,sha256=IeCekDG__hjvIBXk4geLLBzlrLJukVg2y8IZfXTosQ0,2188
@@ -180,7 +180,7 @@ aiagents4pharma/talk2knowledgegraphs/utils/embeddings/sentence_transformer.py,sh
180
180
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/__init__.py,sha256=VvW-zb7hdE1oIZloKDMszAJbLQr5lzofzLxshGxUC44,195
181
181
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/enrichments.py,sha256=gCd0dLfozemOZxgtFVI5dE4dsQ9YmREcvU-a2us_zvw,925
182
182
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/ollama.py,sha256=1WiOQm8OCuhMFV4SmF85cUe4m940HCNRTghvHzp0ELI,4137
183
- aiagents4pharma/talk2knowledgegraphs/utils/enrichments/ols_terms.py,sha256=c4jJG7H6PiemzTD4zoCvMImC0uk6AuEBnYAV1jSomxc,2691
183
+ aiagents4pharma/talk2knowledgegraphs/utils/enrichments/ols_terms.py,sha256=asn0Ko5sULojZyjz6ouTfEuc_FEeZDKsBtXqURrA-xY,2788
184
184
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/pubchem_strings.py,sha256=9CyDA0aRYmfdR-ZiSQA5edh8hBWVEKjvPNkzVKWxnK4,2578
185
185
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/reactome_pathways.py,sha256=8iyrT1p01GJpi7k7FCuwZBqn-hyueRWYxPqabM5U80Q,1990
186
186
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/uniprot_proteins.py,sha256=0mTFX5sVgBZHkEuf057lCFNciTBapn9CqPi2-YublUg,3013
@@ -328,7 +328,7 @@ aiagents4pharma/talk2scholars/tools/zotero/utils/review_helper.py,sha256=-q-UuzP
328
328
  aiagents4pharma/talk2scholars/tools/zotero/utils/write_helper.py,sha256=K1EatPfC-riGyFmkOAS3ReNBaGPY-znne1KqOnFahkI,7339
329
329
  aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_path.py,sha256=sKkfJu3u4LKSZjfoQRfeqz26IESHRwBtcSDzLMLlJMo,6311
330
330
  aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_pdf_downloader.py,sha256=DBrF5IiF7VRP58hUK8T9LST3lQWLFixLUfnpMSTccoQ,4614
331
- aiagents4pharma-1.46.3.dist-info/METADATA,sha256=Meyuvauy5hfSodpwNdv5AqHVcjglDxZNBuLLIKzl7m8,17035
332
- aiagents4pharma-1.46.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
333
- aiagents4pharma-1.46.3.dist-info/licenses/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
334
- aiagents4pharma-1.46.3.dist-info/RECORD,,
331
+ aiagents4pharma-1.46.5.dist-info/METADATA,sha256=Tnoq6TWzRepVimi-VimoYIP_hTsz24Z9jk328UuAfxw,17035
332
+ aiagents4pharma-1.46.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
333
+ aiagents4pharma-1.46.5.dist-info/licenses/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
334
+ aiagents4pharma-1.46.5.dist-info/RECORD,,