aiagents4pharma 1.39.4__py3-none-any.whl → 1.40.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. aiagents4pharma/talk2aiagents4pharma/configs/agents/main_agent/default.yaml +26 -13
  2. aiagents4pharma/talk2aiagents4pharma/tests/test_main_agent.py +83 -3
  3. aiagents4pharma/talk2knowledgegraphs/agents/t2kg_agent.py +4 -1
  4. aiagents4pharma/talk2knowledgegraphs/configs/app/frontend/default.yaml +36 -5
  5. aiagents4pharma/talk2knowledgegraphs/milvus_data_dump.py +509 -0
  6. aiagents4pharma/talk2knowledgegraphs/tests/test_agents_t2kg_agent.py +85 -23
  7. aiagents4pharma/talk2knowledgegraphs/tests/test_tools_milvus_multimodal_subgraph_extraction.py +413 -0
  8. aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_ols.py +10 -10
  9. aiagents4pharma/talk2knowledgegraphs/tests/test_utils_extractions_milvus_multimodal_pcst.py +175 -0
  10. aiagents4pharma/talk2knowledgegraphs/tests/test_utils_pubchem_utils.py +11 -0
  11. aiagents4pharma/talk2knowledgegraphs/tools/__init__.py +1 -0
  12. aiagents4pharma/talk2knowledgegraphs/tools/milvus_multimodal_subgraph_extraction.py +509 -0
  13. aiagents4pharma/talk2knowledgegraphs/utils/enrichments/ols_terms.py +15 -7
  14. aiagents4pharma/talk2knowledgegraphs/utils/enrichments/pubchem_strings.py +31 -9
  15. aiagents4pharma/talk2knowledgegraphs/utils/extractions/__init__.py +1 -0
  16. aiagents4pharma/talk2knowledgegraphs/utils/extractions/milvus_multimodal_pcst.py +393 -0
  17. aiagents4pharma/talk2knowledgegraphs/utils/pubchem_utils.py +33 -2
  18. {aiagents4pharma-1.39.4.dist-info → aiagents4pharma-1.40.0.dist-info}/METADATA +13 -14
  19. {aiagents4pharma-1.39.4.dist-info → aiagents4pharma-1.40.0.dist-info}/RECORD +22 -17
  20. {aiagents4pharma-1.39.4.dist-info → aiagents4pharma-1.40.0.dist-info}/WHEEL +0 -0
  21. {aiagents4pharma-1.39.4.dist-info → aiagents4pharma-1.40.0.dist-info}/licenses/LICENSE +0 -0
  22. {aiagents4pharma-1.39.4.dist-info → aiagents4pharma-1.40.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,393 @@
1
+ """
2
+ Exctraction of multimodal subgraph using Prize-Collecting Steiner Tree (PCST) algorithm.
3
+ """
4
+
5
+ from typing import Tuple, NamedTuple
6
+ import logging
7
+ import pickle
8
+ import pandas as pd
9
+ import pcst_fast
10
+ from pymilvus import Collection
11
+ try:
12
+ import cupy as py
13
+ import cudf
14
+ df = cudf
15
+ except ImportError:
16
+ import numpy as py
17
+ df = pd
18
+
19
+ # Initialize logger
20
+ logging.basicConfig(level=logging.INFO)
21
+ logger = logging.getLogger(__name__)
22
+
23
+ class MultimodalPCSTPruning(NamedTuple):
24
+ """
25
+ Prize-Collecting Steiner Tree (PCST) pruning algorithm implementation inspired by G-Retriever
26
+ (He et al., 'G-Retriever: Retrieval-Augmented Generation for Textual Graph Understanding and
27
+ Question Answering', NeurIPS 2024) paper.
28
+ https://arxiv.org/abs/2402.07630
29
+ https://github.com/XiaoxinHe/G-Retriever/blob/main/src/dataset/utils/retrieval.py
30
+
31
+ Args:
32
+ topk: The number of top nodes to consider.
33
+ topk_e: The number of top edges to consider.
34
+ cost_e: The cost of the edges.
35
+ c_const: The constant value for the cost of the edges computation.
36
+ root: The root node of the subgraph, -1 for unrooted.
37
+ num_clusters: The number of clusters.
38
+ pruning: The pruning strategy to use.
39
+ verbosity_level: The verbosity level.
40
+ """
41
+ topk: int = 3
42
+ topk_e: int = 3
43
+ cost_e: float = 0.5
44
+ c_const: float = 0.01
45
+ root: int = -1
46
+ num_clusters: int = 1
47
+ pruning: str = "gw"
48
+ verbosity_level: int = 0
49
+ use_description: bool = False
50
+ metric_type: str = "IP" # Inner Product
51
+
52
+ def prepare_collections(self, cfg: dict, modality: str) -> dict:
53
+ """
54
+ Prepare the collections for nodes, node-type specific nodes, and edges in Milvus.
55
+
56
+ Args:
57
+ cfg: The configuration dictionary containing the Milvus setup.
58
+ modality: The modality to use for the subgraph extraction.
59
+
60
+ Returns:
61
+ A dictionary containing the collections of nodes, node-type specific nodes, and edges.
62
+ """
63
+ # Initialize the collections dictionary
64
+ colls = {}
65
+
66
+ # Load the collection for nodes
67
+ colls["nodes"] = Collection(name=f"{cfg.milvus_db.database_name}_nodes")
68
+
69
+ if modality != "prompt":
70
+ # Load the collection for the specific node type
71
+ colls["nodes_type"] = Collection(
72
+ f"{cfg.milvus_db.database_name}_nodes_{modality.replace('/', '_')}"
73
+ )
74
+
75
+ # Load the collection for edges
76
+ colls["edges"] = Collection(name=f"{cfg.milvus_db.database_name}_edges")
77
+
78
+ # Load the collections
79
+ for coll in colls.values():
80
+ coll.load()
81
+
82
+ return colls
83
+
84
+ def _compute_node_prizes(self,
85
+ query_emb: list,
86
+ colls: dict) -> dict:
87
+ """
88
+ Compute the node prizes based on the cosine similarity between the query and nodes.
89
+
90
+ Args:
91
+ query_emb: The query embedding. This can be an embedding of
92
+ a prompt, sequence, or any other feature to be used for the subgraph extraction.
93
+ colls: The collections of nodes, node-type specific nodes, and edges in Milvus.
94
+
95
+ Returns:
96
+ The prizes of the nodes.
97
+ """
98
+ # Intialize several variables
99
+ topk = min(self.topk, colls["nodes"].num_entities)
100
+ n_prizes = py.zeros(colls["nodes"].num_entities, dtype=py.float32)
101
+
102
+ # Calculate cosine similarity for text features and update the score
103
+ if self.use_description:
104
+ # Search the collection with the text embedding
105
+ res = colls["nodes"].search(
106
+ data=[query_emb],
107
+ anns_field="desc_emb",
108
+ param={"metric_type": self.metric_type},
109
+ limit=topk,
110
+ output_fields=["node_id"])
111
+ else:
112
+ # Search the collection with the query embedding
113
+ res = colls["nodes_type"].search(
114
+ data=[query_emb],
115
+ anns_field="feat_emb",
116
+ param={"metric_type": self.metric_type},
117
+ limit=topk,
118
+ output_fields=["node_id"])
119
+
120
+ # Update the prizes based on the search results
121
+ n_prizes[[r.id for r in res[0]]] = py.arange(topk, 0, -1).astype(py.float32)
122
+
123
+ return n_prizes
124
+
125
+ def _compute_edge_prizes(self,
126
+ text_emb: list,
127
+ colls: dict) -> py.ndarray:
128
+ """
129
+ Compute the node prizes based on the cosine similarity between the query and nodes.
130
+
131
+ Args:
132
+ text_emb: The textual description embedding.
133
+ colls: The collections of nodes, node-type specific nodes, and edges in Milvus.
134
+
135
+ Returns:
136
+ The prizes of the nodes.
137
+ """
138
+ # Intialize several variables
139
+ topk_e = min(self.topk_e, colls["edges"].num_entities)
140
+ e_prizes = py.zeros(colls["edges"].num_entities, dtype=py.float32)
141
+
142
+ # Search the collection with the query embedding
143
+ res = colls["edges"].search(
144
+ data=[text_emb],
145
+ anns_field="feat_emb",
146
+ param={"metric_type": self.metric_type},
147
+ limit=topk_e, # Only retrieve the top-k edges
148
+ # limit=colls["edges"].num_entities,
149
+ output_fields=["head_id", "tail_id"])
150
+
151
+ # Update the prizes based on the search results
152
+ e_prizes[[r.id for r in res[0]]] = [r.score for r in res[0]]
153
+
154
+ # Further process the edge_prizes
155
+ unique_prizes, inverse_indices = py.unique(e_prizes, return_inverse=True)
156
+ topk_e_values = unique_prizes[py.argsort(-unique_prizes)[:topk_e]]
157
+ # e_prizes[e_prizes < topk_e_values[-1]] = 0.0
158
+ last_topk_e_value = topk_e
159
+ for k in range(topk_e):
160
+ indices = inverse_indices == (unique_prizes == topk_e_values[k]).nonzero()[0]
161
+ value = min((topk_e - k) / indices.sum().item(), last_topk_e_value)
162
+ e_prizes[indices] = value
163
+ last_topk_e_value = value * (1 - self.c_const)
164
+
165
+ return e_prizes
166
+
167
+ def compute_prizes(self,
168
+ text_emb: list,
169
+ query_emb: list,
170
+ colls: dict) -> dict:
171
+ """
172
+ Compute the node prizes based on the cosine similarity between the query and nodes,
173
+ as well as the edge prizes based on the cosine similarity between the query and edges.
174
+ Note that the node and edge embeddings shall use the same embedding model and dimensions
175
+ with the query.
176
+
177
+ Args:
178
+ text_emb: The textual description embedding.
179
+ query_emb: The query embedding. This can be an embedding of
180
+ a prompt, sequence, or any other feature to be used for the subgraph extraction.
181
+ colls: The collections of nodes, node-type specific nodes, and edges in Milvus.
182
+
183
+ Returns:
184
+ The prizes of the nodes and edges.
185
+ """
186
+ # Compute prizes for nodes
187
+ logger.log(logging.INFO, "_compute_node_prizes")
188
+ n_prizes = self._compute_node_prizes(query_emb, colls)
189
+
190
+ # Compute prizes for edges
191
+ logger.log(logging.INFO, "_compute_edge_prizes")
192
+ e_prizes = self._compute_edge_prizes(text_emb, colls)
193
+
194
+ return {"nodes": n_prizes, "edges": e_prizes}
195
+
196
+ def compute_subgraph_costs(self,
197
+ edge_index: py.ndarray,
198
+ num_nodes: int,
199
+ prizes: dict) -> Tuple[py.ndarray, py.ndarray, py.ndarray]:
200
+ """
201
+ Compute the costs in constructing the subgraph proposed by G-Retriever paper.
202
+
203
+ Args:
204
+ edge_index: The edge index of the graph, consisting of source and destination nodes.
205
+ num_nodes: The number of nodes in the graph.
206
+ prizes: The prizes of the nodes and the edges.
207
+
208
+ Returns:
209
+ edges: The edges of the subgraph, consisting of edges and number of edges without
210
+ virtual edges.
211
+ prizes: The prizes of the subgraph.
212
+ costs: The costs of the subgraph.
213
+ """
214
+ # Initialize several variables
215
+ real_ = {}
216
+ virt_ = {}
217
+
218
+ # Update edge cost threshold
219
+ updated_cost_e = min(
220
+ self.cost_e,
221
+ py.max(prizes["edges"]).item() * (1 - self.c_const / 2),
222
+ )
223
+
224
+ # Masks for real and virtual edges
225
+ logger.log(logging.INFO, "Creating masks for real and virtual edges")
226
+ real_["mask"] = prizes["edges"] <= updated_cost_e
227
+ virt_["mask"] = ~real_["mask"]
228
+
229
+ # Real edge indices
230
+ logger.log(logging.INFO, "Computing real edges")
231
+ real_["indices"] = py.nonzero(real_["mask"])[0]
232
+ real_["src"] = edge_index[0][real_["indices"]]
233
+ real_["dst"] = edge_index[1][real_["indices"]]
234
+ real_["edges"] = py.stack([real_["src"], real_["dst"]], axis=1)
235
+ real_["costs"] = updated_cost_e - prizes["edges"][real_["indices"]]
236
+
237
+ # Edge index mapping: local real edge idx -> original global index
238
+ logger.log(logging.INFO, "Creating mapping for real edges")
239
+ mapping_edges = dict(zip(range(len(real_["indices"])), real_["indices"].tolist()))
240
+
241
+ # Virtual edge handling
242
+ logger.log(logging.INFO, "Computing virtual edges")
243
+ virt_["indices"] = py.nonzero(virt_["mask"])[0]
244
+ virt_["src"] = edge_index[0][virt_["indices"]]
245
+ virt_["dst"] = edge_index[1][virt_["indices"]]
246
+ virt_["prizes"] = prizes["edges"][virt_["indices"]] - updated_cost_e
247
+
248
+ # Generate virtual node IDs
249
+ logger.log(logging.INFO, "Generating virtual node IDs")
250
+ virt_["num"] = virt_["indices"].shape[0]
251
+ virt_["node_ids"] = py.arange(num_nodes, num_nodes + virt_["num"])
252
+
253
+ # Virtual edges: (src → virtual), (virtual → dst)
254
+ logger.log(logging.INFO, "Creating virtual edges")
255
+ virt_["edges_1"] = py.stack([virt_["src"], virt_["node_ids"]], axis=1)
256
+ virt_["edges_2"] = py.stack([virt_["node_ids"], virt_["dst"]], axis=1)
257
+ virt_["edges"] = py.concatenate([virt_["edges_1"],
258
+ virt_["edges_2"]], axis=0)
259
+ virt_["costs"] = py.zeros((virt_["edges"].shape[0],), dtype=real_["costs"].dtype)
260
+
261
+ # Combine real and virtual edges/costs
262
+ logger.log(logging.INFO, "Combining real and virtual edges/costs")
263
+ all_edges = py.concatenate([real_["edges"], virt_["edges"]], axis=0)
264
+ all_costs = py.concatenate([real_["costs"], virt_["costs"]], axis=0)
265
+
266
+ # Final prizes
267
+ logger.log(logging.INFO, "Getting final prizes")
268
+ final_prizes = py.concatenate([prizes["nodes"], virt_["prizes"]], axis=0)
269
+
270
+ # Mapping virtual node ID -> edge index in original graph
271
+ logger.log(logging.INFO, "Creating mapping for virtual nodes")
272
+ mapping_nodes = dict(zip(virt_["node_ids"].tolist(), virt_["indices"].tolist()))
273
+
274
+ # Build return values
275
+ logger.log(logging.INFO, "Building return values")
276
+ edges_dict = {
277
+ "edges": all_edges,
278
+ "num_prior_edges": real_["edges"].shape[0],
279
+ }
280
+ mapping = {
281
+ "edges": mapping_edges,
282
+ "nodes": mapping_nodes,
283
+ }
284
+
285
+ return edges_dict, final_prizes, all_costs, mapping
286
+
287
+ def get_subgraph_nodes_edges(self,
288
+ num_nodes: int,
289
+ vertices: py.ndarray,
290
+ edges_dict: dict,
291
+ mapping: dict) -> dict:
292
+ """
293
+ Get the selected nodes and edges of the subgraph based on the vertices and edges computed
294
+ by the PCST algorithm.
295
+
296
+ Args:
297
+ num_nodes: The number of nodes in the graph.
298
+ vertices: The vertices selected by the PCST algorithm.
299
+ edges_dict: A dictionary containing the edges and the number of prior edges.
300
+ mapping: A dictionary containing the mapping of nodes and edges.
301
+
302
+ Returns:
303
+ The selected nodes and edges of the extracted subgraph.
304
+ """
305
+ # Get edges information
306
+ edges = edges_dict["edges"]
307
+ num_prior_edges = edges_dict["num_prior_edges"]
308
+ # Get edges information
309
+ edges = edges_dict["edges"]
310
+ num_prior_edges = edges_dict["num_prior_edges"]
311
+ # Retrieve the selected nodes and edges based on the given vertices and edges
312
+ subgraph_nodes = vertices[vertices < num_nodes]
313
+ subgraph_edges = [mapping["edges"][e.item()] for e in edges if e < num_prior_edges]
314
+ virtual_vertices = vertices[vertices >= num_nodes]
315
+ if len(virtual_vertices) > 0:
316
+ virtual_vertices = vertices[vertices >= num_nodes]
317
+ virtual_edges = [mapping["nodes"][i.item()] for i in virtual_vertices]
318
+ subgraph_edges = py.array(subgraph_edges + virtual_edges)
319
+ edge_index = edges_dict["edge_index"][:, subgraph_edges]
320
+ subgraph_nodes = py.unique(
321
+ py.concatenate(
322
+ [subgraph_nodes, edge_index[0], edge_index[1]]
323
+ )
324
+ )
325
+
326
+ return {"nodes": subgraph_nodes, "edges": subgraph_edges}
327
+
328
+ def extract_subgraph(self,
329
+ text_emb: list,
330
+ query_emb: list,
331
+ modality: str,
332
+ cfg: dict) -> dict:
333
+ """
334
+ Perform the Prize-Collecting Steiner Tree (PCST) algorithm to extract the subgraph.
335
+
336
+ Args:
337
+ text_emb: The textual description embedding.
338
+ query_emb: The query embedding. This can be an embedding of
339
+ a prompt, sequence, or any other feature to be used for the subgraph extraction.
340
+ modality: The modality to use for the subgraph extraction
341
+ (e.g., "text", "sequence", "smiles").
342
+ cfg: The configuration dictionary containing the Milvus setup.
343
+
344
+ Returns:
345
+ The selected nodes and edges of the subgraph.
346
+ """
347
+ # Load the collections for nodes
348
+ logger.log(logging.INFO, "Preparing collections")
349
+ colls = self.prepare_collections(cfg, modality)
350
+
351
+ # Load cache edge index
352
+ logger.log(logging.INFO, "Loading cache edge index")
353
+ with open(cfg.milvus_db.cache_edge_index_path, "rb") as f:
354
+ edge_index = pickle.load(f)
355
+ edge_index = py.array(edge_index)
356
+
357
+ # Assert the topk and topk_e values for subgraph retrieval
358
+ assert self.topk > 0, "topk must be greater than or equal to 0"
359
+ assert self.topk_e > 0, "topk_e must be greater than or equal to 0"
360
+
361
+ # Retrieve the top-k nodes and edges based on the query embedding
362
+ logger.log(logging.INFO, "compute_prizes")
363
+ prizes = self.compute_prizes(text_emb, query_emb, colls)
364
+
365
+ # Compute costs in constructing the subgraph
366
+ logger.log(logging.INFO, "compute_subgraph_costs")
367
+ edges_dict, prizes, costs, mapping = self.compute_subgraph_costs(
368
+ edge_index, colls["nodes"].num_entities, prizes)
369
+
370
+ # Retrieve the subgraph using the PCST algorithm
371
+ logger.log(logging.INFO, "Running PCST algorithm")
372
+ result_vertices, result_edges = pcst_fast.pcst_fast(
373
+ edges_dict["edges"].tolist(),
374
+ prizes.tolist(),
375
+ costs.tolist(),
376
+ self.root,
377
+ self.num_clusters,
378
+ self.pruning,
379
+ self.verbosity_level,
380
+ )
381
+
382
+ # Get subgraph nodes and edges based on the result of the PCST algorithm
383
+ logger.log(logging.INFO, "Getting subgraph nodes and edges")
384
+ subgraph = self.get_subgraph_nodes_edges(
385
+ colls["nodes"].num_entities,
386
+ py.asarray(result_vertices),
387
+ {"edges": py.asarray(result_edges),
388
+ "num_prior_edges": edges_dict["num_prior_edges"],
389
+ "edge_index": edge_index},
390
+ mapping)
391
+ print(subgraph)
392
+
393
+ return subgraph
@@ -12,6 +12,37 @@ import hydra
12
12
  logging.basicConfig(level=logging.INFO)
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
+ def cas_rn2pubchem_cid(casrn):
16
+ """
17
+ Convert CAS RN to PubChem CID.
18
+
19
+ Args:
20
+ casrn: The CAS RN of the drug.
21
+
22
+ Returns:
23
+ The PubChem CID of the drug.
24
+ """
25
+ # Load Hydra configuration for PubChem ID conversion
26
+ with hydra.initialize(version_base=None, config_path="../configs"):
27
+ cfg = hydra.compose(config_name='config',
28
+ overrides=['utils/pubchem_utils=default'])
29
+ cfg = cfg.utils.pubchem_utils
30
+ # Prepare the URL
31
+ pubchem_url_for_drug = f"{cfg.pubchem_casrn2cid_url}{casrn}/record/JSON"
32
+ # Get the data
33
+ response = requests.get(pubchem_url_for_drug, timeout=60)
34
+ data = response.json()
35
+ # Extract the PubChem CID
36
+ cid = None
37
+ for substance in data.get("PC_Substances", []):
38
+ for compound in substance.get("compound", []):
39
+ if "id" in compound and "type" in compound["id"] and compound["id"]["type"] == 1:
40
+ cid = compound["id"].get("id", {}).get("cid")
41
+ break
42
+ if cid is not None:
43
+ break
44
+ return cid
45
+
15
46
  def external_id2pubchem_cid(db, db_id):
16
47
  """
17
48
  Convert external DB ID to PubChem CID.
@@ -26,7 +57,7 @@ def external_id2pubchem_cid(db, db_id):
26
57
  Returns:
27
58
  The PubChem CID of the drug.
28
59
  """
29
- logger.log(logging.INFO, "Load Hydra configuration for PubChem ID conversion.")
60
+ # Load Hydra configuration for PubChem ID conversion
30
61
  with hydra.initialize(version_base=None, config_path="../configs"):
31
62
  cfg = hydra.compose(config_name='config',
32
63
  overrides=['utils/pubchem_utils=default'])
@@ -55,7 +86,7 @@ def pubchem_cid_description(cid):
55
86
  Returns:
56
87
  The description of the PubChem CID.
57
88
  """
58
- logger.log(logging.INFO, "Load Hydra configuration for PubChem CID description.")
89
+ # Load Hydra configuration for PubChem CID description
59
90
  with hydra.initialize(version_base=None, config_path="../configs"):
60
91
  cfg = hydra.compose(config_name='config',
61
92
  overrides=['utils/pubchem_utils=default'])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aiagents4pharma
3
- Version: 1.39.4
3
+ Version: 1.40.0
4
4
  Summary: AI Agents for drug discovery, drug development, and other pharmaceutical R&D.
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: MIT License
@@ -63,6 +63,7 @@ Requires-Dist: plotly-express==0.4.1
63
63
  Requires-Dist: seaborn==0.13.2
64
64
  Requires-Dist: scanpy==1.11.0
65
65
  Requires-Dist: openpyxl==3.1.5
66
+ Requires-Dist: pymilvus==2.5.11
66
67
  Dynamic: license-file
67
68
 
68
69
  [![Talk2BioModels](https://github.com/VirtualPatientEngine/AIAgents4Pharma/actions/workflows/tests_talk2biomodels.yml/badge.svg)](https://github.com/VirtualPatientEngine/AIAgents4Pharma/actions/workflows/tests_talk2biomodels.yml)
@@ -112,7 +113,9 @@ _We now have all the agents available on Docker Hub._
112
113
 
113
114
  ##### **To run Talk2AIAgents4Pharma / Talk2KnowledgeGraphs**
114
115
 
115
- Both agents require [Ollama](https://ollama.com/) to run embedding models like `nomic-embed-text`. We use a **single startup script** that automatically detects your hardware (NVIDIA, AMD, or CPU) and handles container startup, model loading, and service orchestration.
116
+ If your machine has NVIDIA GPU(s), please install the following this:
117
+ - [nvidia-cuda-toolkit](https://developer.nvidia.com/cuda-toolkit)
118
+ - [nvidia-container-toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/1.17.8/install-guide.html) (required for GPU support with Docker; enables containers to access NVIDIA GPUs for accelerated computing). After installing `nvidia-container-toolkit`, please restart Docker to ensure GPU support is enabled.
116
119
 
117
120
  ##### **1. Download docker-compose.yml, .env.example and startup.sh from GitHub**
118
121
 
@@ -141,7 +144,6 @@ Then edit `.env` and add your API keys:
141
144
  ```env
142
145
  OPENAI_API_KEY=... # Required for both agents
143
146
  NVIDIA_API_KEY=... # Required for both agents
144
- OLLAMA_HOST=http://ollama:11434 # Required for AA4P / T2KG
145
147
  LANGCHAIN_TRACING_V2=true # Optional for both agents
146
148
  LANGCHAIN_API_KEY=... # Optional for both agents
147
149
  ```
@@ -203,6 +205,10 @@ If you are using docker on Windows, please follow these [Windows Setup Notes](ht
203
205
 
204
206
  [More on running multiple agents simultaneously](https://github.com/VirtualPatientEngine/AIAgents4Pharma/blob/main/aiagents4pharma/talk2aiagents4pharma/install.md#to-run-multiple-agents-simultaneously)
205
207
 
208
+ 📝 By default, `talk2knowledgegraphs` includes a small subset of the PrimeKG knowledge graph, allowing users to start interacting with it out of the box.
209
+ To switch to a different knowledge graph or use your own, refer to the [deployment guide](https://virtualpatientengine.github.io/AIAgents4Pharma/talk2knowledgegraphs/deployment/).
210
+
211
+
206
212
  #### Option 2: git (for developers and contributors)
207
213
 
208
214
  ![Python Version from PEP 621 TOML](https://img.shields.io/python/required-version-toml?tomlFilePath=https%3A%2F%2Fraw.githubusercontent.com%2FVirtualPatientEngine%2FAIAgents4Pharma%2Frefs%2Fheads%2Fmain%2Fpyproject.toml)
@@ -238,16 +244,6 @@ Only for **Talk2Scholars**, you also need a **Zotero API key**, which you can ge
238
244
 
239
245
  To use **Talk2Scholars**, you must have **FAISS** installed through **Conda**. Follow installation instructions for your OS [here](https://github.com/VirtualPatientEngine/AIAgents4Pharma/tree/main/aiagents4pharma/talk2scholars/install.md).
240
246
 
241
- To use **Talk2AIAgents4Pharma** or **Talk2KnowledgeGraphs**, you must have **Ollama** installed. Follow installation instructions for your OS [here](https://ollama.com/download).
242
-
243
- After installing, pull the `nomic-embed-text` model and start the server by running:
244
-
245
- ```sh
246
- ollama pull nomic-embed-text && ollama serve
247
- ```
248
-
249
- More details about the model are available [here](https://ollama.com/library/nomic-embed-text).
250
-
251
247
  Additionally on **Windows**, the `pcst_fast 1.0.10` library requires **Microsoft Visual C++ 14.0 or greater**.
252
248
  You can download the **Microsoft C++ Build Tools** [here](https://visualstudio.microsoft.com/visual-cpp-build-tools/).
253
249
 
@@ -273,6 +269,9 @@ _Replace `<agent>` with the agent name you are interested to launch:_
273
269
  - `talk2scholars`
274
270
  - `talk2cells`
275
271
 
272
+ 📝 By default, `talk2knowledgegraphs` includes a small subset of the PrimeKG knowledge graph, allowing users to start interacting with it out of the box.
273
+ To switch to a different knowledge graph or use your own, refer to the [deployment guide](https://virtualpatientengine.github.io/AIAgents4Pharma/talk2knowledgegraphs/deployment/).
274
+
276
275
  For detailed instructions on each agent, please refer to their respective modules.
277
276
 
278
277
  #### Option 3: pip (beta-release)
@@ -322,7 +321,7 @@ git push origin feat/your-feature-name
322
321
  #### Contacts for contributions
323
322
 
324
323
  - **Talk2Biomodels**: [@lilijap](https://github.com/lilijap), [@gurdeep330](https://github.com/gurdeep330)
325
- - **Talk2Cells**: [@gurdeep330](https://github.com/gurdeep330)
324
+ - **Talk2Cells**: [@tAndreaniSanofi](https://github.com/tAndreaniSanofi), [@gurdeep330](https://github.com/gurdeep330)
326
325
  - **Talk2KnowledgeGraphs**: [@awmulyadi](https://github.com/awmulyadi)
327
326
  - **Talk2Scholars**: [@ansh-info](https://github.com/ansh-info), [@gurdeep330](https://github.com/gurdeep330)
328
327
 
@@ -5,11 +5,11 @@ aiagents4pharma/talk2aiagents4pharma/agents/main_agent.py,sha256=gnXBvvi3t8DjUtX
5
5
  aiagents4pharma/talk2aiagents4pharma/configs/__init__.py,sha256=5ah__-8XyRblwT0U1ByRigNjt_GyCheu7zce4aM-eZE,68
6
6
  aiagents4pharma/talk2aiagents4pharma/configs/config.yaml,sha256=VnbMbVSYfCh68cHZ0JLu00UjOUmapejN3EsN3lnBXtU,51
7
7
  aiagents4pharma/talk2aiagents4pharma/configs/agents/__init__.py,sha256=zrJcq-4m0YUKfSlRGC8KzBmEooaASKuL_Y75yDp-ZoA,72
8
- aiagents4pharma/talk2aiagents4pharma/configs/agents/main_agent/default.yaml,sha256=GY8RiNatrkOkLClgkiAGDjL3bYspXaMbnh6n_e4JeJc,671
8
+ aiagents4pharma/talk2aiagents4pharma/configs/agents/main_agent/default.yaml,sha256=Qkq-p_TV67n5BsXyAj88rA239HC0ogLYDqbDrIuGprc,2426
9
9
  aiagents4pharma/talk2aiagents4pharma/states/__init__.py,sha256=3wSvCpM29oqvVjhbhabm7FNm9Zt0rHO5tEn63YW6doc,108
10
10
  aiagents4pharma/talk2aiagents4pharma/states/state_talk2aiagents4pharma.py,sha256=NxujEBDKubvpV9UG7ERTDRB6psr0XnObCNHyztLAhgo,485
11
11
  aiagents4pharma/talk2aiagents4pharma/tests/__init__.py,sha256=Jbw5tJxSrjGoaK5IX3pJWDCNzhrVQ10lkYq2oQ_KQD8,45
12
- aiagents4pharma/talk2aiagents4pharma/tests/test_main_agent.py,sha256=_zUm8i8vrBbcDgpwExa1sVGr1A9FgZFuwoLS395RnhU,4418
12
+ aiagents4pharma/talk2aiagents4pharma/tests/test_main_agent.py,sha256=V2GdYqbXhPa5vCC5-LD8HUw8S8DAaVC4X2kFa1MeF8o,7428
13
13
  aiagents4pharma/talk2biomodels/__init__.py,sha256=1cq1HX2xoi_a0nDPuXYoSTrnL26OHQBW3zXNwwwjFO0,181
14
14
  aiagents4pharma/talk2biomodels/agents/__init__.py,sha256=sn5-fREjMdEvb-OUan3iOqrgYGjplNx3J8hYOaW0Po8,128
15
15
  aiagents4pharma/talk2biomodels/agents/t2b_agent.py,sha256=g0DIW5P-dtJoVyG4weFdDgTrJPL_Dx1MMbTWextJDZ4,3455
@@ -72,15 +72,16 @@ aiagents4pharma/talk2cells/tools/scp_agent/__init__.py,sha256=s7g0lyH1lMD9pcWHLP
72
72
  aiagents4pharma/talk2cells/tools/scp_agent/display_studies.py,sha256=6q59gh_NQaiOU2rn55A3sIIFKlXi4SK3iKgySvUDrtQ,600
73
73
  aiagents4pharma/talk2cells/tools/scp_agent/search_studies.py,sha256=MLe-twtFnOu-P8P9diYq7jvHBHbWFRRCZLcfpUzqPMg,2806
74
74
  aiagents4pharma/talk2knowledgegraphs/__init__.py,sha256=Z0Eo7LTiKk0STsr8VI7wkCLq7PHrK1vYlH4I1hSNLiA,165
75
+ aiagents4pharma/talk2knowledgegraphs/milvus_data_dump.py,sha256=YOTO1HcSpNxQdc83jgwcuFkzGeCHov7CRDHQQL9PHNE,22608
75
76
  aiagents4pharma/talk2knowledgegraphs/agents/__init__.py,sha256=iOAzuy_8A03tQDFtSBhC9dldUo62z5gfxcVtXAdLOJs,92
76
- aiagents4pharma/talk2knowledgegraphs/agents/t2kg_agent.py,sha256=w4wSSQ9gw_fzpcHZ2Bnqok17YDkFeQ3d72JenvQm6Oc,3089
77
+ aiagents4pharma/talk2knowledgegraphs/agents/t2kg_agent.py,sha256=eUARAhzOz8PpHFzN2fP4L_fDEyqdllrAkBWa0otxGv4,3363
77
78
  aiagents4pharma/talk2knowledgegraphs/configs/__init__.py,sha256=4_DVdpahaJ55yPl0aZotlFA_MYWLFF2cubWyKtBVI_Q,126
78
79
  aiagents4pharma/talk2knowledgegraphs/configs/config.yaml,sha256=-AJXKnR2z5ig0SK_3vLL9JFjNRri7q7blHYFWxoTDl0,417
79
80
  aiagents4pharma/talk2knowledgegraphs/configs/agents/t2kg_agent/__init__.py,sha256=-fAORvyFmG2iSvFOFDixmt9OTQRR58y89uhhu2EgbA8,46
80
81
  aiagents4pharma/talk2knowledgegraphs/configs/agents/t2kg_agent/default.yaml,sha256=ENCGROwYFpR6g4QD518h73sshdn3vPVpotBMk1QJcpU,4830
81
82
  aiagents4pharma/talk2knowledgegraphs/configs/app/__init__.py,sha256=fKfc3FR7g5KjY9b6jzrU6cwKTVVpkoVZQS3dvUowu34,69
82
83
  aiagents4pharma/talk2knowledgegraphs/configs/app/frontend/__init__.py,sha256=-fAORvyFmG2iSvFOFDixmt9OTQRR58y89uhhu2EgbA8,46
83
- aiagents4pharma/talk2knowledgegraphs/configs/app/frontend/default.yaml,sha256=WJgd2ZU7_WQ1qlcTfkFlM8u23sH6eU2KgAm0E4kqqfs,941
84
+ aiagents4pharma/talk2knowledgegraphs/configs/app/frontend/default.yaml,sha256=6qE7j4k3gO83TNA31XVDJus0gTJloO2sG5R_ovZw7VM,2269
84
85
  aiagents4pharma/talk2knowledgegraphs/configs/tools/__init__.py,sha256=C1yyRZW8hqWw46p_bh1vAJp2z9aVvn4HpKjKkjlWIqY,150
85
86
  aiagents4pharma/talk2knowledgegraphs/configs/tools/graphrag_reasoning/__init__.py,sha256=-fAORvyFmG2iSvFOFDixmt9OTQRR58y89uhhu2EgbA8,46
86
87
  aiagents4pharma/talk2knowledgegraphs/configs/tools/graphrag_reasoning/default.yaml,sha256=Ua99yECXiwp4ZCUDgsDskYbKzcJrv7roQuLj31Zky4c,1037
@@ -97,12 +98,13 @@ aiagents4pharma/talk2knowledgegraphs/datasets/starkqa_primekg.py,sha256=Y-6-nORs
97
98
  aiagents4pharma/talk2knowledgegraphs/states/__init__.py,sha256=XaqorSvx634dWRRlXUdzlisHtYMyqgJ2q7TanzsKlhw,108
98
99
  aiagents4pharma/talk2knowledgegraphs/states/state_talk2knowledgegraphs.py,sha256=y5bp6yObN-AQtTq-m8ml7UnZaeKYUiPV_yjskAzBJaI,1087
99
100
  aiagents4pharma/talk2knowledgegraphs/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
100
- aiagents4pharma/talk2knowledgegraphs/tests/test_agents_t2kg_agent.py,sha256=PPfHKnfqMbUOBKU7q4VbQvHQymX1M_zTYdysQgVxKCs,3851
101
+ aiagents4pharma/talk2knowledgegraphs/tests/test_agents_t2kg_agent.py,sha256=CmN2bS9Jv6P0m58JPgP9W2mreJThq2ZNY6PFP0Tltfs,6507
101
102
  aiagents4pharma/talk2knowledgegraphs/tests/test_datasets_biobridge_primekg.py,sha256=crH0eFA3P8P6IYzi1UWNa4YvRVrtlBzoScf9NaE1lDk,9827
102
103
  aiagents4pharma/talk2knowledgegraphs/tests/test_datasets_dataset.py,sha256=NFUlsZvhfIrkF4YenWfahrLK93Xhm5UYEGG_uYN2LVM,566
103
104
  aiagents4pharma/talk2knowledgegraphs/tests/test_datasets_primekg.py,sha256=Pvu0r93CpnhjkfMxc-EiVLpAJ04FdW9iTamCnetu654,2272
104
105
  aiagents4pharma/talk2knowledgegraphs/tests/test_datasets_starkqa_primekg.py,sha256=TuIsqcN1Mww3DTqGk6ebgJBWzUWdMWEq2yRQuYSFqvA,4416
105
106
  aiagents4pharma/talk2knowledgegraphs/tests/test_tools_graphrag_reasoning.py,sha256=aOKHTber2Cg3mjNjfIa6RZU7XdFj5C2ps1YEUXw76CI,10650
107
+ aiagents4pharma/talk2knowledgegraphs/tests/test_tools_milvus_multimodal_subgraph_extraction.py,sha256=ATD3KDK-WQHG3t480u6sZavfMN7rTZcnhcCoQnPRocY,18889
106
108
  aiagents4pharma/talk2knowledgegraphs/tests/test_tools_multimodal_subgraph_extraction.py,sha256=Da-hXcu41_5Ge4DPlOoY6OqBwYnXPc58Q89wuywqVJM,5806
107
109
  aiagents4pharma/talk2knowledgegraphs/tests/test_tools_subgraph_extraction.py,sha256=C2HzmAG1XCeV1hwZzz3-9_2dm_84-i1BvTNWA1pqUwM,5393
108
110
  aiagents4pharma/talk2knowledgegraphs/tests/test_tools_subgraph_summarization.py,sha256=oBqfspXXOxH04OQuPb8BCW0liIQTGKXtaPNSrPpQtFc,7597
@@ -113,21 +115,23 @@ aiagents4pharma/talk2knowledgegraphs/tests/test_utils_embeddings_ollama.py,sha25
113
115
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_embeddings_sentencetransformer.py,sha256=Xkuf2UFGCXldj1zcsh6kqfQYLDf5i0B6KP3KcmNLSzQ,1452
114
116
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_enrichments.py,sha256=N6HRr4lWHXY7bTHe2uXJe4D_EG9WqZPibZne6qLl9_k,1447
115
117
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_ollama.py,sha256=JhY7axvVULLywDJ2ctA-gob5YPeaJYWsaMNjHT6L9CU,3021
116
- aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_ols.py,sha256=5GTSkfKSDS5geR0YfilfnDyUsYli0hv7N8PiDwRvlIE,2370
118
+ aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_ols.py,sha256=5nSBLwfBnwq4K78Q4GM0zTeym5csxHhOwGwx2utBBEU,2280
117
119
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_pubchem.py,sha256=0SgYvqdvxseUYTHx2KuSNI2hnmQ3VVVz0F-79_-P41o,1769
118
120
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_reactome.py,sha256=r1D74mavsnSCm4xnWl0n0nM9PZqgm3doD2dulNrKNVQ,1754
119
121
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_enrichments_uniprot.py,sha256=G13Diw7cA5TGINUNO1CDnN4rM6KbepxRXNjuzY578DI,1611
122
+ aiagents4pharma/talk2knowledgegraphs/tests/test_utils_extractions_milvus_multimodal_pcst.py,sha256=XlB39lhaCax1r5eAYdvaJqyWvxgG1MO8jKD4MWyuT68,7491
120
123
  aiagents4pharma/talk2knowledgegraphs/tests/test_utils_kg_utils.py,sha256=pal76wi7WgQWUNk56BrzfFV8jKpbDaHHdbwtgx_gXLI,2410
121
- aiagents4pharma/talk2knowledgegraphs/tests/test_utils_pubchem_utils.py,sha256=K1Y6QM0MDP1IrAdcWkigl8R-O-i-lsL4NCyOrWewhdM,1246
122
- aiagents4pharma/talk2knowledgegraphs/tools/__init__.py,sha256=uleTEbhgvlYw4fOqV4NmoFvxGTon2Oim7jTQ5qPmYoU,216
124
+ aiagents4pharma/talk2knowledgegraphs/tests/test_utils_pubchem_utils.py,sha256=31WPX8MrhnztoHUROAlH5KvHeXMbB_Jndp3ypAKJO9E,1543
125
+ aiagents4pharma/talk2knowledgegraphs/tools/__init__.py,sha256=u50fnnIhm7NHt4JhQeXdF_XtNYR2i35p4VRNQzP1CVQ,268
123
126
  aiagents4pharma/talk2knowledgegraphs/tools/graphrag_reasoning.py,sha256=OEuOFncDRdb7TQEGq4rkT5On-jI-R7Nt8K5EBzaND8w,5338
124
127
  aiagents4pharma/talk2knowledgegraphs/tools/load_arguments.py,sha256=zhmsRp-8vjB5rRekqTA07d3yb-42HWqng9dDMkvK6hM,623
128
+ aiagents4pharma/talk2knowledgegraphs/tools/milvus_multimodal_subgraph_extraction.py,sha256=eANDj6b6l9Oc5fOZxGx-PInd9YfD4_4sDFFPuzPkFNs,21945
125
129
  aiagents4pharma/talk2knowledgegraphs/tools/multimodal_subgraph_extraction.py,sha256=Qjl8hXG8Gv5jQ4pBX8me0pGGakqRZmcDfTGgdEHD9pc,15394
126
130
  aiagents4pharma/talk2knowledgegraphs/tools/subgraph_extraction.py,sha256=te06QMFQfgJWrjaGrqpcOYeaV38jwm0KY_rXVSMHkeI,11468
127
131
  aiagents4pharma/talk2knowledgegraphs/tools/subgraph_summarization.py,sha256=mDSBOxopDfNhEJeU8fVI8b5lXTYrRzcc97aLbFgYSy4,4413
128
132
  aiagents4pharma/talk2knowledgegraphs/utils/__init__.py,sha256=cZqb3LZLmBnmyAtWFv2Z-4uJvQmx0M4zKsfiWrlM3Pk,195
129
133
  aiagents4pharma/talk2knowledgegraphs/utils/kg_utils.py,sha256=6vQnPkeOWae_8jePjhma3sJuMTngy0I0tqzdFt6OqKg,2507
130
- aiagents4pharma/talk2knowledgegraphs/utils/pubchem_utils.py,sha256=8cve_KLtQUhG3uMKYpyelZvpETSsNGRdGE4X0NXMk4M,2442
134
+ aiagents4pharma/talk2knowledgegraphs/utils/pubchem_utils.py,sha256=kMSabEZDIa6BruoFtnvWQnUWX47FUmTo22CLihYz8F8,3458
131
135
  aiagents4pharma/talk2knowledgegraphs/utils/embeddings/__init__.py,sha256=POSDrSdFAWsBCueOPD-Fok-ARdTywJU1ivwpT9EU1Kw,199
132
136
  aiagents4pharma/talk2knowledgegraphs/utils/embeddings/embeddings.py,sha256=1nGznrAj-xT0xuSMBGz2dOujJ7M_IwSR84njxtxsy9A,2523
133
137
  aiagents4pharma/talk2knowledgegraphs/utils/embeddings/huggingface.py,sha256=2vi_elf6EgzfagFAO5QnL3a_aXZyN7B1EBziu44MTfM,3806
@@ -137,11 +141,12 @@ aiagents4pharma/talk2knowledgegraphs/utils/embeddings/sentence_transformer.py,sh
137
141
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/__init__.py,sha256=K157MWJ4dn2fj3G5ClhyAOXg9jI2H02GP07J6UpasJw,230
138
142
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/enrichments.py,sha256=Bx8x6zzk5614ApWB90N_iv4_Y_Uq0-KwUeBwYSdQMU4,924
139
143
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/ollama.py,sha256=8eoxR-VHo0G7ReQIwje7xEhE-SJlHdef7_wJRpnvFIc,4116
140
- aiagents4pharma/talk2knowledgegraphs/utils/enrichments/ols_terms.py,sha256=xSPP-h2q9fABz6Sd6ZlH9WiyoO8KZeEnPI5n2nJpWL4,2443
141
- aiagents4pharma/talk2knowledgegraphs/utils/enrichments/pubchem_strings.py,sha256=CQEGQ6Qsex2T91Vw7zTrclJBbSGGhxeWaVJb8tnURAQ,1691
144
+ aiagents4pharma/talk2knowledgegraphs/utils/enrichments/ols_terms.py,sha256=QLINhZM51A8JwFoF0INzdUFT2pdlc_h0rdYRgKr49vQ,2772
145
+ aiagents4pharma/talk2knowledgegraphs/utils/enrichments/pubchem_strings.py,sha256=zdBYhRxYfdNSViBwDNwc-9DTYkYGTPlyyHB_OIgbb3E,2630
142
146
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/reactome_pathways.py,sha256=I0cD0Fk2Uk27_4jEaIhpoGhoMh_RphY1VtkMnk4dkPg,2011
143
147
  aiagents4pharma/talk2knowledgegraphs/utils/enrichments/uniprot_proteins.py,sha256=z0Jb3tt8VzRjzqI9oVcUvRlPPg6BUdmslfKDIEFE_h8,3013
144
- aiagents4pharma/talk2knowledgegraphs/utils/extractions/__init__.py,sha256=5bt3H6gGSAwN2K-IG7AHwG2lC4yQeMd2_jbhu2z5XKg,116
148
+ aiagents4pharma/talk2knowledgegraphs/utils/extractions/__init__.py,sha256=6xVclMlSuAIHQXBvn5D9zRLDzSv2LWLcAwDQw-nwZgM,153
149
+ aiagents4pharma/talk2knowledgegraphs/utils/extractions/milvus_multimodal_pcst.py,sha256=peFGn9XXpE7H9leVqQHc2Ec7x2ZRiehYkcCCCNo3b_E,16415
145
150
  aiagents4pharma/talk2knowledgegraphs/utils/extractions/multimodal_pcst.py,sha256=Irh5JXEhaLZ6Rxv3h5Anif_rGNItyLOGDWg1RACmoDA,12628
146
151
  aiagents4pharma/talk2knowledgegraphs/utils/extractions/pcst.py,sha256=m5p0yoJb7I19ua5yeQfXPf7c4r6S1XPwttsrM7Qoy94,9336
147
152
  aiagents4pharma/talk2scholars/__init__.py,sha256=NOZxTklAH1j1ggu97Ib8Xn9LCKudEWt-8dx8w7yxVD8,180
@@ -251,8 +256,8 @@ aiagents4pharma/talk2scholars/tools/zotero/utils/review_helper.py,sha256=IPD1V9y
251
256
  aiagents4pharma/talk2scholars/tools/zotero/utils/write_helper.py,sha256=ALwLecy1QVebbsmXJiDj1GhGmyhq2R2tZlAyEl1vfhw,7410
252
257
  aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_path.py,sha256=oIrfbOySgts50ksHKyjcWjRkPRIS88g3Lc0v9mBkU8w,6375
253
258
  aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_pdf_downloader.py,sha256=ERBha8afU6Q1EaRBe9qB8tchOzZ4_KfFgDW6EElOJoU,4816
254
- aiagents4pharma-1.39.4.dist-info/licenses/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
255
- aiagents4pharma-1.39.4.dist-info/METADATA,sha256=0vzjxCMHm4xhDVEZoMhq68SJBox2dvrEV8GEJNcLvLY,15462
256
- aiagents4pharma-1.39.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
257
- aiagents4pharma-1.39.4.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
258
- aiagents4pharma-1.39.4.dist-info/RECORD,,
259
+ aiagents4pharma-1.40.0.dist-info/licenses/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
260
+ aiagents4pharma-1.40.0.dist-info/METADATA,sha256=jd8SL6NNmmj29FeubrC3KIZAQ3vWKuQjdPCQDCs5Wio,15945
261
+ aiagents4pharma-1.40.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
262
+ aiagents4pharma-1.40.0.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
263
+ aiagents4pharma-1.40.0.dist-info/RECORD,,