aiagents4pharma 1.39.0__py3-none-any.whl → 1.39.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. aiagents4pharma/talk2scholars/agents/main_agent.py +7 -7
  2. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml +88 -12
  3. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml +1 -20
  4. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/default.yaml +1 -26
  5. aiagents4pharma/talk2scholars/tests/test_main_agent.py +20 -2
  6. aiagents4pharma/talk2scholars/tests/test_nvidia_nim_reranker_utils.py +28 -0
  7. aiagents4pharma/talk2scholars/tests/test_paper_download_tools.py +107 -29
  8. aiagents4pharma/talk2scholars/tests/test_pdf_agent.py +2 -3
  9. aiagents4pharma/talk2scholars/tests/test_question_and_answer_tool.py +194 -543
  10. aiagents4pharma/talk2scholars/tests/test_s2_agent.py +2 -2
  11. aiagents4pharma/talk2scholars/tests/{test_s2_display.py → test_s2_display_dataframe.py} +2 -3
  12. aiagents4pharma/talk2scholars/tests/test_s2_query_dataframe.py +201 -0
  13. aiagents4pharma/talk2scholars/tests/test_s2_retrieve.py +7 -6
  14. aiagents4pharma/talk2scholars/tests/test_s2_utils_ext_ids.py +413 -0
  15. aiagents4pharma/talk2scholars/tests/test_tool_helper_utils.py +140 -0
  16. aiagents4pharma/talk2scholars/tests/test_zotero_agent.py +0 -1
  17. aiagents4pharma/talk2scholars/tests/test_zotero_read.py +16 -18
  18. aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py +92 -37
  19. aiagents4pharma/talk2scholars/tools/pdf/question_and_answer.py +73 -575
  20. aiagents4pharma/talk2scholars/tools/pdf/utils/__init__.py +10 -0
  21. aiagents4pharma/talk2scholars/tools/pdf/utils/generate_answer.py +97 -0
  22. aiagents4pharma/talk2scholars/tools/pdf/utils/nvidia_nim_reranker.py +77 -0
  23. aiagents4pharma/talk2scholars/tools/pdf/utils/retrieve_chunks.py +83 -0
  24. aiagents4pharma/talk2scholars/tools/pdf/utils/tool_helper.py +125 -0
  25. aiagents4pharma/talk2scholars/tools/pdf/utils/vector_store.py +162 -0
  26. aiagents4pharma/talk2scholars/tools/s2/display_dataframe.py +33 -10
  27. aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py +39 -16
  28. aiagents4pharma/talk2scholars/tools/s2/query_dataframe.py +124 -10
  29. aiagents4pharma/talk2scholars/tools/s2/retrieve_semantic_scholar_paper_id.py +49 -17
  30. aiagents4pharma/talk2scholars/tools/s2/search.py +39 -16
  31. aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py +34 -16
  32. aiagents4pharma/talk2scholars/tools/s2/utils/multi_helper.py +49 -16
  33. aiagents4pharma/talk2scholars/tools/s2/utils/search_helper.py +51 -16
  34. aiagents4pharma/talk2scholars/tools/s2/utils/single_helper.py +50 -17
  35. {aiagents4pharma-1.39.0.dist-info → aiagents4pharma-1.39.1.dist-info}/METADATA +58 -105
  36. {aiagents4pharma-1.39.0.dist-info → aiagents4pharma-1.39.1.dist-info}/RECORD +39 -32
  37. aiagents4pharma/talk2scholars/tests/test_llm_main_integration.py +0 -89
  38. aiagents4pharma/talk2scholars/tests/test_routing_logic.py +0 -74
  39. aiagents4pharma/talk2scholars/tests/test_s2_query.py +0 -95
  40. {aiagents4pharma-1.39.0.dist-info → aiagents4pharma-1.39.1.dist-info}/WHEEL +0 -0
  41. {aiagents4pharma-1.39.0.dist-info → aiagents4pharma-1.39.1.dist-info}/licenses/LICENSE +0 -0
  42. {aiagents4pharma-1.39.0.dist-info → aiagents4pharma-1.39.1.dist-info}/top_level.txt +0 -0
@@ -109,8 +109,28 @@ class SearchData:
109
109
 
110
110
  def _filter_papers(self) -> None:
111
111
  """Filter and format papers."""
112
- self.filtered_papers = {
113
- paper["paperId"]: {
112
+ # Build filtered papers mapping with unified paper_ids list
113
+ filtered: Dict[str, Any] = {}
114
+ for paper in self.papers:
115
+ if not paper.get("title") or not paper.get("authors"):
116
+ continue
117
+ ext = paper.get("externalIds", {}) or {}
118
+ # Prioritized list of IDs: arXiv, PubMed, PubMedCentral, DOI
119
+ ids: list[str] = []
120
+ arxiv = ext.get("ArXiv")
121
+ if arxiv:
122
+ ids.append(f"arxiv:{arxiv}")
123
+ pubmed = ext.get("PubMed")
124
+ if pubmed:
125
+ ids.append(f"pubmed:{pubmed}")
126
+ pmc = ext.get("PubMedCentral")
127
+ if pmc:
128
+ ids.append(f"pmc:{pmc}")
129
+ doi_id = ext.get("DOI")
130
+ if doi_id:
131
+ ids.append(f"doi:{doi_id}")
132
+ # Compose metadata dict
133
+ metadata = {
114
134
  "semantic_scholar_paper_id": paper["paperId"],
115
135
  "Title": paper.get("title", "N/A"),
116
136
  "Abstract": paper.get("abstract", "N/A"),
@@ -124,27 +144,42 @@ class SearchData:
124
144
  for author in paper.get("authors", [])
125
145
  ],
126
146
  "URL": paper.get("url", "N/A"),
127
- "arxiv_id": paper.get("externalIds", {}).get("ArXiv", "N/A"),
128
- "doi": paper.get("externalIds", {}).get("DOI", "N/A"),
147
+ "arxiv_id": arxiv or "N/A",
148
+ "pmc_id": pmc or "N/A",
149
+ "pm_id": pubmed or "N/A",
150
+ "doi": doi_id or "N/A",
151
+ "paper_ids": ids,
152
+ "source": "semantic_scholar",
129
153
  }
130
- for paper in self.papers
131
- if paper.get("title") and paper.get("authors")
132
- }
154
+ filtered[paper["paperId"]] = metadata
155
+ self.filtered_papers = filtered
133
156
 
134
157
  logger.info("Filtered %d papers", len(self.filtered_papers))
135
158
 
159
+ def _get_snippet(self, abstract: str) -> str:
160
+ """Extract the first one or two sentences from an abstract."""
161
+ if not abstract or abstract == "N/A":
162
+ return ""
163
+ sentences = abstract.split(". ")
164
+ snippet_sentences = sentences[:2]
165
+ snippet = ". ".join(snippet_sentences)
166
+ if not snippet.endswith("."):
167
+ snippet += "."
168
+ return snippet
169
+
136
170
  def _create_content(self) -> None:
137
171
  """Create the content message for the response."""
138
172
  top_papers = list(self.filtered_papers.values())[:3]
139
- top_papers_info = "\n".join(
140
- [
141
- f"{i+1}. {paper['Title']} ({paper['Year']}; "
142
- f"semantic_scholar_paper_id: {paper['semantic_scholar_paper_id']}; "
143
- f"arXiv ID: {paper['arxiv_id']})"
144
- f"doi: {paper['doi']})"
145
- for i, paper in enumerate(top_papers)
146
- ]
147
- )
173
+ entries = []
174
+ for i, paper in enumerate(top_papers):
175
+ title = paper.get("Title", "N/A")
176
+ year = paper.get("Year", "N/A")
177
+ snippet = self._get_snippet(paper.get("Abstract", ""))
178
+ entry = f"{i+1}. {title} ({year})"
179
+ if snippet:
180
+ entry += f"\n Abstract snippet: {snippet}"
181
+ entries.append(entry)
182
+ top_papers_info = "\n".join(entries)
148
183
 
149
184
  logger.info("-----------Filtered %d papers", self.get_paper_count())
150
185
 
@@ -5,7 +5,7 @@ Utility for fetching recommendations based on a single paper.
5
5
  """
6
6
 
7
7
  import logging
8
- from typing import Any, Optional, Dict
8
+ from typing import Any, Optional, Dict, List
9
9
  import hydra
10
10
  import requests
11
11
 
@@ -120,8 +120,26 @@ class SinglePaperRecData:
120
120
 
121
121
  def _filter_papers(self) -> None:
122
122
  """Filter and format papers."""
123
- self.filtered_papers = {
124
- paper["paperId"]: {
123
+ # Build filtered recommendations with unified paper_ids
124
+ filtered: Dict[str, Any] = {}
125
+ for paper in self.recommendations:
126
+ if not paper.get("title") or not paper.get("authors"):
127
+ continue
128
+ ext = paper.get("externalIds", {}) or {}
129
+ ids: List[str] = []
130
+ arxiv = ext.get("ArXiv")
131
+ if arxiv:
132
+ ids.append(f"arxiv:{arxiv}")
133
+ pubmed = ext.get("PubMed")
134
+ if pubmed:
135
+ ids.append(f"pubmed:{pubmed}")
136
+ pmc = ext.get("PubMedCentral")
137
+ if pmc:
138
+ ids.append(f"pmc:{pmc}")
139
+ doi_id = ext.get("DOI")
140
+ if doi_id:
141
+ ids.append(f"doi:{doi_id}")
142
+ metadata = {
125
143
  "semantic_scholar_paper_id": paper["paperId"],
126
144
  "Title": paper.get("title", "N/A"),
127
145
  "Abstract": paper.get("abstract", "N/A"),
@@ -135,27 +153,42 @@ class SinglePaperRecData:
135
153
  for author in paper.get("authors", [])
136
154
  ],
137
155
  "URL": paper.get("url", "N/A"),
138
- "arxiv_id": paper.get("externalIds", {}).get("ArXiv", "N/A"),
139
- "doi": paper.get("externalIds", {}).get("DOI", "N/A"),
156
+ "arxiv_id": arxiv or "N/A",
157
+ "pm_id": pubmed or "N/A",
158
+ "pmc_id": pmc or "N/A",
159
+ "doi": doi_id or "N/A",
160
+ "paper_ids": ids,
161
+ "source": "semantic_scholar",
140
162
  }
141
- for paper in self.recommendations
142
- if paper.get("title") and paper.get("authors")
143
- }
163
+ filtered[paper["paperId"]] = metadata
164
+ self.filtered_papers = filtered
144
165
 
145
166
  logger.info("Filtered %d papers", len(self.filtered_papers))
146
167
 
168
+ def _get_snippet(self, abstract: str) -> str:
169
+ """Extract the first one or two sentences from an abstract."""
170
+ if not abstract or abstract == "N/A":
171
+ return ""
172
+ sentences = abstract.split(". ")
173
+ snippet_sentences = sentences[:2]
174
+ snippet = ". ".join(snippet_sentences)
175
+ if not snippet.endswith("."):
176
+ snippet += "."
177
+ return snippet
178
+
147
179
  def _create_content(self) -> None:
148
180
  """Create the content message for the response."""
149
181
  top_papers = list(self.filtered_papers.values())[:3]
150
- top_papers_info = "\n".join(
151
- [
152
- f"{i+1}. {paper['Title']} ({paper['Year']}; "
153
- f"semantic_scholar_paper_id: {paper['semantic_scholar_paper_id']}; "
154
- f"arXiv ID: {paper['arxiv_id']})"
155
- f"doi: {paper['doi']})"
156
- for i, paper in enumerate(top_papers)
157
- ]
158
- )
182
+ entries: list[str] = []
183
+ for i, paper in enumerate(top_papers):
184
+ title = paper.get("Title", "N/A")
185
+ year = paper.get("Year", "N/A")
186
+ snippet = self._get_snippet(paper.get("Abstract", ""))
187
+ entry = f"{i+1}. {title} ({year})"
188
+ if snippet:
189
+ entry += f"\n Abstract snippet: {snippet}"
190
+ entries.append(entry)
191
+ top_papers_info = "\n".join(entries)
159
192
 
160
193
  self.content = (
161
194
  "Recommendations based on the single paper were successful. "
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aiagents4pharma
3
- Version: 1.39.0
3
+ Version: 1.39.1
4
4
  Summary: AI Agents for drug discovery, drug development, and other pharmaceutical R&D.
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: MIT License
@@ -96,10 +96,10 @@ Our toolkit currently consists of the following agents:
96
96
  - T2B and T2KG accepted at the MLGenX workshop during ICLR #2025 in Singapore. [Read More](https://openreview.net/forum?id=av4QhBNeZo)
97
97
 
98
98
  <div align="center">
99
- <strong>Watch the presentation:</strong><br><br>
100
- <a href="https://www.youtube.com/watch?v=3cU_OxY4HiE">
101
- <img src="https://img.youtube.com/vi/3cU_OxY4HiE/0.jpg" alt="Watch the presentation" width="480">
102
- </a>
99
+ <strong>Watch the presentation:</strong><br><br>
100
+ <a href="https://www.youtube.com/watch?v=3cU_OxY4HiE">
101
+ <img src="https://img.youtube.com/vi/3cU_OxY4HiE/0.jpg" alt="Watch the presentation" width="480">
102
+ </a>
103
103
  </div>
104
104
 
105
105
  ## Getting Started
@@ -145,117 +145,62 @@ LANGCHAIN_TRACING_V2=true # Optional for both agents
145
145
  LANGCHAIN_API_KEY=... # Optional for both agents
146
146
  ```
147
147
 
148
- To use **Talk2AIAgents4Pharma** or **Talk2KnowledgeGraphs**, you need a free **NVIDIA API key**. Create an account and apply for free credits [here](https://build.nvidia.com/explore/discover).
149
-
150
- ###### Notes for Windows Users
151
-
152
- If you are using Windows, it is recommended to install **Git Bash** for a smoother experience when running the bash commands in this guide.
153
-
154
- - For applications that use **Docker Compose**, Git Bash is **required**.
155
- - For applications that use **docker run** manually, Git Bash is **optional**, but recommended for consistency.
156
-
157
- You can download Git Bash here: [Git for Windows](https://git-scm.com/downloads).
158
-
159
- When using Docker on Windows, make sure you **run Docker with administrative privileges** if you face permission issues.
160
-
161
- To resolve for permission issues, you can:
162
-
163
- - Review the official Docker documentation on [Windows permission requirements](https://docs.docker.com/desktop/setup/install/windows-permission-requirements/).
164
- - Alternatively, follow the community discussion and solutions on [Docker Community Forums](https://forums.docker.com/t/error-when-trying-to-run-windows-containers-docker-client-must-be-run-with-elevated-privileges/136619).
165
-
166
- **LangSmith** support is optional. To enable it, create an API key [here](https://docs.smith.langchain.com/administration/how_to_guides/organization_management/create_account_api_key).
148
+ [Additional Notes for Windows Users](https://github.com/VirtualPatientEngine/AIAgents4Pharma/tree/main/aiagents4pharma/install.md)
167
149
 
168
150
  ##### **3. Start the application**
169
151
 
170
- Run the startup script. It will:
171
-
172
- - Detect your hardware configuration (NVIDIA GPU, AMD GPU, or CPU). Apple Metal is unavailable inside Docker, and Intel SIMD optimizations are automatically handled without special configuration.
173
- - Choose the correct Ollama image (`latest` or `rocm`)
174
- - Launch the Ollama container with appropriate runtime settings
175
- - Pull the required embedding model (`nomic-embed-text`)
176
- - Start the agent **after the model is available**
177
-
178
152
  ```sh
179
153
  chmod +x startup.sh
180
154
  ./startup.sh # Add --cpu flag to force CPU mode if needed
181
155
  ```
182
156
 
183
- ##### **4. Access the Web UI**
184
-
185
- Once started, the agent is available at:
186
-
187
- ```
188
- http://localhost:8501
189
- ```
157
+ [More about startup script](https://github.com/VirtualPatientEngine/AIAgents4Pharma/tree/main/aiagents4pharma/install.md)
190
158
 
191
159
  ##### **To Run Talk2Biomodels / Talk2Scholars**
192
160
 
193
- 1. **Run the containers**
194
-
195
161
  ###### Talk2Biomodels
196
162
 
197
163
  ```docker
198
164
  docker run -d \
199
- --name talk2biomodels \
200
- -e OPENAI_API_KEY=<your_openai_api_key> \
201
- -e NVIDIA_API_KEY=<your_nvidia_api_key> \
202
- -p 8501:8501 \
203
- virtualpatientengine/talk2biomodels
165
+ --name talk2biomodels \
166
+ -e OPENAI_API_KEY=<your_openai_api_key> \
167
+ -e NVIDIA_API_KEY=<your_nvidia_api_key> \
168
+ -p 8501:8501 \
169
+ virtualpatientengine/talk2biomodels
204
170
  ```
205
171
 
206
172
  ###### Talk2Scholars
207
173
 
208
174
  ```docker
209
175
  docker run -d \
210
- --name talk2scholars \
211
- -e OPENAI_API_KEY=<your_openai_api_key> \
212
- -e ZOTERO_API_KEY=<your_zotero_api_key> \
213
- -e ZOTERO_USER_ID=<your_zotero_user_id> \
214
- -e NVIDIA_API_KEY=<your_nvidia_api_key> \
215
- -p 8501:8501 \
216
- virtualpatientengine/talk2scholars
176
+ --name talk2scholars \
177
+ -e OPENAI_API_KEY=<your_openai_api_key> \
178
+ -e ZOTERO_API_KEY=<your_zotero_api_key> \
179
+ -e ZOTERO_USER_ID=<your_zotero_user_id> \
180
+ -e NVIDIA_API_KEY=<your_nvidia_api_key> \
181
+ -p 8501:8501 \
182
+ virtualpatientengine/talk2scholars
217
183
  ```
218
184
 
219
- 2. **Access the Web App**
220
- Open your browser and go to:
185
+ ##### **4. Access the Web UI**
186
+
187
+ Once started, the agent is available at:
188
+
189
+ ```
190
+ http://localhost:8501
191
+ ```
221
192
 
222
- ```
223
- http://localhost:8501
224
- ```
193
+ To use **Talk2AIAgents4Pharma** or **Talk2KnowledgeGraphs**, you need a free **NVIDIA API key**. Create an account and apply for free credits [here](https://build.nvidia.com/explore/discover).
225
194
 
226
195
  To use **Talk2BioModels** or **Talk2Scholars**, you need a free **NVIDIA API key**. Create an account and apply for free credits [here](https://build.nvidia.com/explore/discover).
227
196
 
228
197
  Only for **Talk2Scholars**, you also need a **Zotero API key**, which you can generate [here](https://www.zotero.org/user/login#applications). _(For all other agents, the Zotero key is not required.)_
229
198
 
230
- If you are using docker on Windows, please follow these [Windows Setup Notes](#notes-for-windows-users).
199
+ If you are using docker on Windows, please follow these [Windows Setup Notes](https://github.com/VirtualPatientEngine/AIAgents4Pharma/tree/main/aiagents4pharma).
231
200
 
232
201
  **LangSmith** support is optional. To enable it, create an API key [here](https://docs.smith.langchain.com/administration/how_to_guides/organization_management/create_account_api_key).
233
202
 
234
- #### Notes
235
-
236
- - Be sure to **replace the placeholder values** with your actual credentials before running any container:
237
-
238
- - `<your_openai_api_key>`
239
- - `<your_nvidia_api_key>`
240
- - `<your_zotero_api_key>`
241
- - `<your_zotero_user_id>`
242
-
243
- - All agents default to **port `8501`**. If you plan to run multiple agents simultaneously, make sure to assign **different ports** to avoid conflicts.
244
-
245
- Example (Talk2Scholars on port `8502`):
246
-
247
- ```docker
248
- docker run -d \
249
- --name talk2scholars \
250
- -e OPENAI_API_KEY=<your_openai_api_key> \
251
- -e ZOTERO_API_KEY=<your_zotero_api_key> \
252
- -e ZOTERO_USER_ID=<your_zotero_user_id> \
253
- -e NVIDIA_API_KEY=<your_nvidia_api_key> \
254
- -p 8502:8501 \
255
- virtualpatientengine/talk2scholars
256
- ```
257
-
258
- Then access the app at: [http://localhost:8502](http://localhost:8502)
203
+ [More on running multiple agents simultaneously](https://github.com/VirtualPatientEngine/AIAgents4Pharma/tree/main/aiagents4pharma/install.md)
259
204
 
260
205
  #### Option 2: git (for developers and contributors)
261
206
 
@@ -278,14 +223,14 @@ conda create --name AIAgents4Pharma python=3.12 -y && conda activate AIAgents4Ph
278
223
 
279
224
  3. **Initialize API Keys**
280
225
 
281
- ```env
282
- export OPENAI_API_KEY=.... # Required for all agents
283
- export NVIDIA_API_KEY=.... # Required for all agents
284
- export ZOTERO_API_KEY=.... # Required for T2S
285
- export ZOTERO_USER_ID=.... # Required for T2S
286
- export LANGCHAIN_TRACING_V2=true # Optional for all agents
287
- export LANGCHAIN_API_KEY=... # Optional for all agents
288
- ```
226
+ ```env
227
+ export OPENAI_API_KEY=.... # Required for all agents
228
+ export NVIDIA_API_KEY=.... # Required for all agents
229
+ export ZOTERO_API_KEY=.... # Required for T2S
230
+ export ZOTERO_USER_ID=.... # Required for T2S
231
+ export LANGCHAIN_TRACING_V2=true # Optional for all agents
232
+ export LANGCHAIN_API_KEY=... # Optional for all agents
233
+ ```
289
234
 
290
235
  To use **Talk2AIAgents4Pharma**, **Talk2BioModels**, **Talk2KnowledgeGraphs**, or **Talk2Scholars**, you need a free **NVIDIA API key**. Create an account and apply for free credits [here](https://build.nvidia.com/explore/discover).
291
236
 
@@ -315,10 +260,12 @@ If you skip the previous step, it will default to the name `default`.
315
260
  `xxxx` will be the 4-digit ID created for the session._
316
261
 
317
262
  4. **Launch the app:**
318
- ```sh
319
- streamlit run app/frontend/streamlit_app_<agent>.py
320
- ```
321
- _Replace `<agent>` with the agent name you are interested to launch:_
263
+
264
+ ```sh
265
+ streamlit run app/frontend/streamlit_app_<agent>.py
266
+ ```
267
+
268
+ _Replace `<agent>` with the agent name you are interested to launch:_
322
269
 
323
270
  - `talk2aiagents4pharma`
324
271
  - `talk2biomodels`
@@ -348,17 +295,23 @@ All types of contributions are appreciated — whether you're fixing bugs, addin
348
295
  1. Star this repository to show your support.
349
296
  2. Fork the repository.
350
297
  3. Create a new branch for your work:
351
- ```sh
352
- git checkout -b feat/your-feature-name
353
- ```
298
+
299
+ ```sh
300
+ git checkout -b feat/your-feature-name
301
+ ```
302
+
354
303
  4. Make your changes and commit them:
355
- ```sh
356
- git commit -m "feat: add a brief description of your change"
357
- ```
304
+
305
+ ```sh
306
+ git commit -m "feat: add a brief description of your change"
307
+ ```
308
+
358
309
  5. Push your branch:
359
- ```sh
360
- git push origin feat/your-feature-name
361
- ```
310
+
311
+ ```sh
312
+ git push origin feat/your-feature-name
313
+ ```
314
+
362
315
  6. Open a Pull Request.
363
316
 
364
317
  #### Areas where you can help
@@ -146,7 +146,7 @@ aiagents4pharma/talk2knowledgegraphs/utils/extractions/multimodal_pcst.py,sha256
146
146
  aiagents4pharma/talk2knowledgegraphs/utils/extractions/pcst.py,sha256=m5p0yoJb7I19ua5yeQfXPf7c4r6S1XPwttsrM7Qoy94,9336
147
147
  aiagents4pharma/talk2scholars/__init__.py,sha256=NOZxTklAH1j1ggu97Ib8Xn9LCKudEWt-8dx8w7yxVD8,180
148
148
  aiagents4pharma/talk2scholars/agents/__init__.py,sha256=c_0Pk85bt-RfK5RMyALM3MXo3qXVMoYS7BOqM9wuFME,317
149
- aiagents4pharma/talk2scholars/agents/main_agent.py,sha256=oCSWPj3TUgTIERmYbBTYipNrU1g956LXJEUx-7-KAQ0,3354
149
+ aiagents4pharma/talk2scholars/agents/main_agent.py,sha256=oQqa1z4nvfUvPWCX-SUHGs9jOCJKtzjw86jXJZ68gCk,3382
150
150
  aiagents4pharma/talk2scholars/agents/paper_download_agent.py,sha256=J_kEl8joQfM80211xlNLZA9RkN52fY58dbCisuiEft8,3687
151
151
  aiagents4pharma/talk2scholars/agents/pdf_agent.py,sha256=GEXzJMQxIeZ7zLP-AlnTMU-n_KXZ7g22Qd9L3USIc_4,3626
152
152
  aiagents4pharma/talk2scholars/agents/s2_agent.py,sha256=oui0CMSyXmBGBJ7LnYq8Ce0V8Qc3BS6GgH5Qx5wI6oM,4565
@@ -156,13 +156,13 @@ aiagents4pharma/talk2scholars/configs/config.yaml,sha256=F7BCgmcnhfkyKT6qFL11E_i
156
156
  aiagents4pharma/talk2scholars/configs/agents/__init__.py,sha256=plv5Iw34gvbGZbRyJapvoOiiFXekRQIwjV_yy5AR_SI,104
157
157
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/__init__.py,sha256=D94LW4cXLmJe4dNl5qoR9QN0JnBqGLbQDgDLqhCNUE0,213
158
158
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
159
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml,sha256=EmUAxeQSnH4U5Op5_XOzCbcexDCp-Rpz3z0yVPRtQUg,1315
159
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml,sha256=hew5vyrhLeJktoN6DTPRRpnINrXqKZ4trLJQDOuDGOA,4712
160
160
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/paper_download_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
161
161
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/pdf_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
162
162
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
163
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml,sha256=_sSt2jCgSILwrXkywDAxkXONCZn896owLBaf46iFI0I,1323
163
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml,sha256=N3a_brrB1ilUCeqHQrqu97Olz2snko3tTNPMNR8yTHI,208
164
164
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
165
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/default.yaml,sha256=SOdjRiGUxjW9JeCRDd_U1RjCclItkoPODrj5RpIrxSY,2030
165
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/default.yaml,sha256=FUbJIiH_FHeL0n1M-HSsLX_R2gMBRxBnPcejhd0HrYc,399
166
166
  aiagents4pharma/talk2scholars/configs/app/__init__.py,sha256=tXpOW3R4eAfNoqvoaHfabSG-DcMHmUGSTg_4zH_vlgw,94
167
167
  aiagents4pharma/talk2scholars/configs/app/frontend/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
168
168
  aiagents4pharma/talk2scholars/configs/app/frontend/default.yaml,sha256=A6nYjrgzEyRv5JYsGN7oqNX4-tufMBZ6mg-A7bMX6V4,906
@@ -186,48 +186,55 @@ aiagents4pharma/talk2scholars/configs/tools/zotero_write/default.yaml,sha256=gB7
186
186
  aiagents4pharma/talk2scholars/state/__init__.py,sha256=ReScKLpEvedq4P6ww52NRQS0Xr6SSQV7hqoQ83Mt75U,138
187
187
  aiagents4pharma/talk2scholars/state/state_talk2scholars.py,sha256=Z2zV-SXB2SMnn8PnjWjmK-OD5KjUwMTChBpXBAcl2hg,3885
188
188
  aiagents4pharma/talk2scholars/tests/__init__.py,sha256=U3PsTiUZaUBD1IZanFGkDIOdFieDVJtGKQ5-woYUo8c,45
189
- aiagents4pharma/talk2scholars/tests/test_llm_main_integration.py,sha256=FBRqS06IKJYFOudQEHQr-9oJ4tftkH-gTCowTAqwWSg,3686
190
- aiagents4pharma/talk2scholars/tests/test_main_agent.py,sha256=IZYSocYVwqPil2lF6L07mKm8PUq7vjopmqNiCm6IJEA,6876
189
+ aiagents4pharma/talk2scholars/tests/test_main_agent.py,sha256=4Z3xLq8MGlayGhQE5qKOirYotwJrlf7fk8rqAaORorg,7617
190
+ aiagents4pharma/talk2scholars/tests/test_nvidia_nim_reranker_utils.py,sha256=-q4Y2CMTAOvrSyyZ1MmpeEuKvJcZSPe6jmUD0rZhUew,947
191
191
  aiagents4pharma/talk2scholars/tests/test_paper_download_agent.py,sha256=gKSQp-sw62FplNnGYW0wv2ZIUEefh3o0tFWbRzy9yLs,5068
192
192
  aiagents4pharma/talk2scholars/tests/test_paper_download_biorxiv.py,sha256=gosuW4VBXyorQXbf0TpgAIT2hQjEeuvTTnT1jnoBYqM,6405
193
193
  aiagents4pharma/talk2scholars/tests/test_paper_download_medrxiv.py,sha256=iNq9vEIVapmnUZTRJXCv_UoaWThGapW7Vt_2BmZG9NE,6414
194
- aiagents4pharma/talk2scholars/tests/test_paper_download_tools.py,sha256=3mycLeEgH5XkwxuoXfTpQb8c8xFtIX2HjVnACPrSf60,7141
195
- aiagents4pharma/talk2scholars/tests/test_pdf_agent.py,sha256=scGCTgka2JuoUhzZwzDn0OgIYihOLhXbwb5uGFR02aI,4302
196
- aiagents4pharma/talk2scholars/tests/test_question_and_answer_tool.py,sha256=KR4GjjGgBjWXwEVzSh4ZpYjcWPq-EaZTT_fzRheb0uY,37286
194
+ aiagents4pharma/talk2scholars/tests/test_paper_download_tools.py,sha256=lGXbHl3lEXDjMHAX9uCgrREBOUuOHWv9TsYEshiG_tc,10421
195
+ aiagents4pharma/talk2scholars/tests/test_pdf_agent.py,sha256=9Kr0FcyFWmUDTasYh6ZdS-OWQqy37mH9K3p5Y0dqQHw,4283
196
+ aiagents4pharma/talk2scholars/tests/test_question_and_answer_tool.py,sha256=7S9bqQSdvm7xihtBSAQD3RQimFtVHejPIkaaEddeNRs,22057
197
197
  aiagents4pharma/talk2scholars/tests/test_read_helper_utils.py,sha256=yTT1aLpTydDSdGcRZur5cMktwYZbFK5NEUgOBvltcWg,3819
198
- aiagents4pharma/talk2scholars/tests/test_routing_logic.py,sha256=g79tG68ZrUOL3-duCCJwvFK6OieR5KedRf3yTUDqIFk,2784
199
- aiagents4pharma/talk2scholars/tests/test_s2_agent.py,sha256=xvlPU4Lz_DdQLTpdtoHW9l_AMvFrzC-FXE5royGbtLM,7806
200
- aiagents4pharma/talk2scholars/tests/test_s2_display.py,sha256=Q1q0TEavO2kkXBjo2yeSbzV7xHspnDSvTveaUB-NkQE,3116
198
+ aiagents4pharma/talk2scholars/tests/test_s2_agent.py,sha256=TsdNlZ6vHz18bbX6Vto28nbBLRDI94wSFt5-1acDK64,7768
199
+ aiagents4pharma/talk2scholars/tests/test_s2_display_dataframe.py,sha256=2VPPZitQRWDZV0ceaK2-hQqkIvoigSPWNHoFOgKJjQE,3107
201
200
  aiagents4pharma/talk2scholars/tests/test_s2_multi.py,sha256=VCTfexhtX7FgWOBS0YtSm1zghbByZnni1NBLGVTJVGI,11166
202
- aiagents4pharma/talk2scholars/tests/test_s2_query.py,sha256=8Em_bcexpv3odC20TRPi6eoz-6fPXGKabob1Ye0jdsg,3286
203
- aiagents4pharma/talk2scholars/tests/test_s2_retrieve.py,sha256=YtA2nbPRtoSR7mPqEjqLF5ERGVzTfeULztsNoCI48X8,2003
201
+ aiagents4pharma/talk2scholars/tests/test_s2_query_dataframe.py,sha256=6FBA1RwYx3_n-y1rntJuw3R5a0WMgcQuYVChhpM8uSo,7603
202
+ aiagents4pharma/talk2scholars/tests/test_s2_retrieve.py,sha256=bCcy4i3LyoVf4qm0kcCqsmyjhwqEif3v38jsalqD8yc,2130
204
203
  aiagents4pharma/talk2scholars/tests/test_s2_search.py,sha256=mCGpoCYVn0SJ9BPcEjTz2MLy_K2XJIxvPngwsMoKijA,9945
205
204
  aiagents4pharma/talk2scholars/tests/test_s2_single.py,sha256=KjSh7V2cl1IuO_M9O6dj0vnMHr13H-xKxia_ZgT4qag,10313
205
+ aiagents4pharma/talk2scholars/tests/test_s2_utils_ext_ids.py,sha256=6xbHzClkYI_ZcR3-Xl7nGs8-hB0IyLlxn8rzeXrJxFQ,15129
206
206
  aiagents4pharma/talk2scholars/tests/test_state.py,sha256=A2lqA4h37QptLnwKWwm1Y79yELE4wtEBXzCiQ13YdLw,1270
207
- aiagents4pharma/talk2scholars/tests/test_zotero_agent.py,sha256=jFEtfQVEwEQ6v3kq7A1_p2MKCu5wbtX47V4bE-fKD6M,6158
207
+ aiagents4pharma/talk2scholars/tests/test_tool_helper_utils.py,sha256=gJBzV1-hkzGi3VY9SG4JSoKTyz7KavPy1HjEF6bnfIQ,6141
208
+ aiagents4pharma/talk2scholars/tests/test_zotero_agent.py,sha256=iWbWlat5RWE8mmCSqKGNG7Xzbdieua6cKGq-jwqP4ws,6119
208
209
  aiagents4pharma/talk2scholars/tests/test_zotero_human_in_the_loop.py,sha256=YelLQu9Y_r1SNQsC1xoLHJoJ3soIZtBt1MFbbNhY-Dg,10744
209
210
  aiagents4pharma/talk2scholars/tests/test_zotero_path.py,sha256=Ko0HyXCrpm-vs8Bkf-syxp3MfL1IvZwXXgPExyQy_F8,18618
210
211
  aiagents4pharma/talk2scholars/tests/test_zotero_pdf_downloader_utils.py,sha256=N9CBRG0rQyqptKRCaYCH2VJk87O-wc9Cc1KI5MMnyjA,1670
211
- aiagents4pharma/talk2scholars/tests/test_zotero_read.py,sha256=E7ncgspEzhJTvmZuKplugZJPPWsoiFU_xLUg-oz6qkI,29100
212
+ aiagents4pharma/talk2scholars/tests/test_zotero_read.py,sha256=qkudWMjxjjTYKJ1zvpWs0EJXCIvFx-iNKyKs_Tv1CSI,29061
212
213
  aiagents4pharma/talk2scholars/tests/test_zotero_write.py,sha256=qWlO0XoZJ6vxUxgisjYv9Np87CoTEDxiQBEOhdj9foo,6111
213
214
  aiagents4pharma/talk2scholars/tools/__init__.py,sha256=c8pYHDqR9P0Frz2jWjbvyizfSTBMlMFzGsiQzx2KC9c,189
214
215
  aiagents4pharma/talk2scholars/tools/paper_download/__init__.py,sha256=Lu5FmBxDH8mIIYE41G8_BKYXUf-vHIYVwujidbeydl4,295
215
- aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py,sha256=WTWvXbh0C96OoMoPf8Bgu0AgorsdkWslac_WqlHc4bo,3900
216
+ aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py,sha256=e3S8JusJVVSEC_tFh8H5CS9ZqD20jX8hkFO9EYESBXo,5653
216
217
  aiagents4pharma/talk2scholars/tools/paper_download/download_biorxiv_input.py,sha256=R92OaR4Omilj-v-rT0Me_BhxN8-AF0sbDwhUxNCUTm4,3718
217
218
  aiagents4pharma/talk2scholars/tools/paper_download/download_medrxiv_input.py,sha256=UaHsdZXseUMQfiIovD0kS8r9DZ6KJpRGtTZyOCTRYVs,3786
218
219
  aiagents4pharma/talk2scholars/tools/pdf/__init__.py,sha256=DPpOfON3AySko5EBBAe_3udOoSaAdQWNyGeNvJyV5R8,138
219
- aiagents4pharma/talk2scholars/tools/pdf/question_and_answer.py,sha256=pzJhSOdchyS3J4Tzoh7aFMALJFCqEk4Xh4LCDa-5I1I,23406
220
+ aiagents4pharma/talk2scholars/tools/pdf/question_and_answer.py,sha256=PI9ltnKZpORXAPPowPt6qVbXclXXoY4mamj8ZKA0tsM,5586
221
+ aiagents4pharma/talk2scholars/tools/pdf/utils/__init__.py,sha256=Ghsh5nnESckMqvnokMRW_3mo2sDstOPKAAuiAHF1b8o,273
222
+ aiagents4pharma/talk2scholars/tools/pdf/utils/generate_answer.py,sha256=YvnBWD7yn12H5nchPBFBD9txviwJtX56f3fDRwkGBBE,3299
223
+ aiagents4pharma/talk2scholars/tools/pdf/utils/nvidia_nim_reranker.py,sha256=M4OSqact8QMk-Ov05Bcz7Y7tr2rBAgdM3DRsQKB0r0o,2851
224
+ aiagents4pharma/talk2scholars/tools/pdf/utils/retrieve_chunks.py,sha256=agV7SHy5ool0x_N7WmNI-C1Wpc-6EToYoiqkMJX6xWs,2599
225
+ aiagents4pharma/talk2scholars/tools/pdf/utils/tool_helper.py,sha256=maOIh6sLSOqU3PTsty5AZKIoBGxziFDi-LliSULQyK0,4702
226
+ aiagents4pharma/talk2scholars/tools/pdf/utils/vector_store.py,sha256=8yumaryNLZdRP_WcRt6GHk9Oi8asYPkOS9vKSn7zJak,5810
220
227
  aiagents4pharma/talk2scholars/tools/s2/__init__.py,sha256=w_eiw0pG8HNp79F9O_icXs_Yl_4odsmagYNKDTjIsvk,428
221
- aiagents4pharma/talk2scholars/tools/s2/display_dataframe.py,sha256=wOZ7UJq4b8vl7NU9mU3BW_nRmCIkeBvc6nbGGegysek,3181
222
- aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py,sha256=N7-6dzRI71bK7MG3-A4G505YnNvAMJW_Qjjtcoo4JYw,2799
223
- aiagents4pharma/talk2scholars/tools/s2/query_dataframe.py,sha256=uI6-UnZu96Uirzohx-F7vMHOVSPlPrD4XJdwgF5GcMo,2866
224
- aiagents4pharma/talk2scholars/tools/s2/retrieve_semantic_scholar_paper_id.py,sha256=llzMMnEQKeYVamJbF4_DTMx-BgVe79vwDcUIFGLrmUY,2615
225
- aiagents4pharma/talk2scholars/tools/s2/search.py,sha256=p86RLy_9bMxm3KTDL2L0Ilb3yeF4K6IIkZCgbt4CsiE,2529
226
- aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py,sha256=rnl6Bb7mKXg_lsProAYaSEJNIzWgNVZuDHqD-dDe9EI,2763
228
+ aiagents4pharma/talk2scholars/tools/s2/display_dataframe.py,sha256=qnY7AQDnAs0SrmV7AZ9pWm10HEmPlO7EBfzYvpb3jvs,3965
229
+ aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py,sha256=NY8nTsW9xP6qakiQ0wbq9AXP2mcT5-GMZoa9cnr4EjY,3770
230
+ aiagents4pharma/talk2scholars/tools/s2/query_dataframe.py,sha256=omff-2g85qcLZj5Qo35m2LJlE1AOzESEfT4jt3ZVYz0,7066
231
+ aiagents4pharma/talk2scholars/tools/s2/retrieve_semantic_scholar_paper_id.py,sha256=aJTIbUKXLvjD9P7S-SM0O5iTRdUdomOcE67lHwsmGjg,3954
232
+ aiagents4pharma/talk2scholars/tools/s2/search.py,sha256=SUAN32x1d9dNikFKitcXZZ0BhFfsGMdLDk0z0DpJXuA,3334
233
+ aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py,sha256=3lvrIPlgP8gp_3YQCcUQKyHmNMHevN8OqLTq4mPeBEY,3502
227
234
  aiagents4pharma/talk2scholars/tools/s2/utils/__init__.py,sha256=wBTPVgiXbmIJUMouOQRwojgk5PJXeEinDJzHzEToZbU,229
228
- aiagents4pharma/talk2scholars/tools/s2/utils/multi_helper.py,sha256=kjzZ90Cd23hXBQ861Z2BEjE1VvI02zxc1mIj2S7YWFo,7379
229
- aiagents4pharma/talk2scholars/tools/s2/utils/search_helper.py,sha256=AembYVndEOwgcDz_n1VWAydfL8ufQ5pEokTKkrx47jA,6474
230
- aiagents4pharma/talk2scholars/tools/s2/utils/single_helper.py,sha256=zLENnFSyQIpXqmJKow1XHS9pWbf27tsSUEvzydNCj9I,7094
235
+ aiagents4pharma/talk2scholars/tools/s2/utils/multi_helper.py,sha256=Z5G4e7R7cUO60_HYbcd3BJC_-jtybc5DGcOC8yjpprY,8642
236
+ aiagents4pharma/talk2scholars/tools/s2/utils/search_helper.py,sha256=wVkQW2KTmS2av4W5PqqRhCb53n-egZLpAD95xV2mO68,7839
237
+ aiagents4pharma/talk2scholars/tools/s2/utils/single_helper.py,sha256=ClTOpDfjgj1BtfCFtBVkl7ANAIZhG_mUh6WCTUegtlQ,8363
231
238
  aiagents4pharma/talk2scholars/tools/zotero/__init__.py,sha256=wXiQILLq-utV35PkDUpm_F074mG9yRMyGQAFlr9UAOw,197
232
239
  aiagents4pharma/talk2scholars/tools/zotero/zotero_read.py,sha256=Fgv7PIkIlRqfl8EprcXqr1S4wtbSG8itv7x-3nMf3Rc,3990
233
240
  aiagents4pharma/talk2scholars/tools/zotero/zotero_review.py,sha256=iqwpolg7GWAjXizubLrPaAsgOpsOhKz-tFRyLOiBvC0,6325
@@ -238,8 +245,8 @@ aiagents4pharma/talk2scholars/tools/zotero/utils/review_helper.py,sha256=IPD1V9y
238
245
  aiagents4pharma/talk2scholars/tools/zotero/utils/write_helper.py,sha256=ALwLecy1QVebbsmXJiDj1GhGmyhq2R2tZlAyEl1vfhw,7410
239
246
  aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_path.py,sha256=oIrfbOySgts50ksHKyjcWjRkPRIS88g3Lc0v9mBkU8w,6375
240
247
  aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_pdf_downloader.py,sha256=ERBha8afU6Q1EaRBe9qB8tchOzZ4_KfFgDW6EElOJoU,4816
241
- aiagents4pharma-1.39.0.dist-info/licenses/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
242
- aiagents4pharma-1.39.0.dist-info/METADATA,sha256=ITwj9yujMnDVZtQM3n09ZxDv4ueGCGDlG2JZOvU3n7k,16788
243
- aiagents4pharma-1.39.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
244
- aiagents4pharma-1.39.0.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
245
- aiagents4pharma-1.39.0.dist-info/RECORD,,
248
+ aiagents4pharma-1.39.1.dist-info/licenses/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
249
+ aiagents4pharma-1.39.1.dist-info/METADATA,sha256=9ncdNvJh3EUIjSlFJI9w8DF7k2DPmaRlOFiQ0tj1HSs,14578
250
+ aiagents4pharma-1.39.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
251
+ aiagents4pharma-1.39.1.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
252
+ aiagents4pharma-1.39.1.dist-info/RECORD,,
@@ -1,89 +0,0 @@
1
- """
2
- Integration tests for talk2scholars system with OpenAI.
3
- This test triggers all sub-agents by sending a conversation that covers:
4
- - Searching Semantic Scholar (S2 agent)
5
- - Retrieving Zotero results (Zotero agent)
6
- - Querying PDF content (PDF agent)
7
- - Downloading paper details from arXiv (Paper Download agent)
8
- """
9
-
10
- # This will be covered in the next pr.
11
-
12
- #
13
- # import os
14
- # import pytest
15
- # import hydra
16
- # from langchain_openai import ChatOpenAI
17
- # from langchain_core.messages import HumanMessage, AIMessage
18
- # from ..agents.main_agent import get_app
19
- # from ..state.state_talk2scholars import Talk2Scholars
20
- #
21
- # # pylint: disable=redefined-outer-name,too-few-public-methods
22
- #
23
- #
24
- # @pytest.mark.skipif(
25
- # not os.getenv("OPENAI_API_KEY"), reason="Requires OpenAI API key to run"
26
- # )
27
- # def test_main_agent_real_llm():
28
- # """
29
- # Integration test for the Talk2Scholars system using a real OpenAI LLM.
30
- # This test verifies that the supervisor correctly routes to all sub-agents by
31
- # providing a conversation with queries intended to trigger each agent.
32
- # """
33
- # # Load Hydra configuration EXACTLY like in main_agent.py
34
- # with hydra.initialize(version_base=None, config_path="../configs"):
35
- # cfg = hydra.compose(
36
- # config_name="config", overrides=["agents/talk2scholars/main_agent=default"]
37
- # )
38
- # hydra_cfg = cfg.agents.talk2scholars.main_agent
39
- # assert hydra_cfg is not None, "Hydra config failed to load"
40
- #
41
- # # Use the real OpenAI API (ensure OPENAI_API_KEY is set in environment)
42
- # llm = ChatOpenAI(model="gpt-4o-mini", temperature=hydra_cfg.temperature)
43
- #
44
- # # Initialize the main agent workflow (with real Hydra config)
45
- # thread_id = "test_thread"
46
- # app = get_app(thread_id, llm)
47
- #
48
- # # Provide a multi-turn conversation intended to trigger all sub-agents:
49
- # # - S2 agent: "Search Semantic Scholar for AI papers on transformers."
50
- # # - Zotero agent: "Retrieve Zotero results for these papers."
51
- # # - PDF agent: "Analyze the attached PDF and summarize its key findings."
52
- # # - Paper Download agent: "Download the paper details from arXiv."
53
- # initial_state = Talk2Scholars(
54
- # messages=[
55
- # HumanMessage(
56
- # content="Search Semantic Scholar for AI papers on transformers."
57
- # ),
58
- # HumanMessage(content="Also, retrieve Zotero results for these papers."),
59
- # HumanMessage(
60
- # content="I have attached a PDF; analyze it and tell me the key findings."
61
- # ),
62
- # HumanMessage(content="Finally, download the paper from arXiv."),
63
- # ]
64
- # )
65
- #
66
- # # Invoke the agent (which routes to the appropriate sub-agents)
67
- # result = app.invoke(
68
- # initial_state,
69
- # {"configurable": {"config_id": thread_id, "thread_id": thread_id}},
70
- # )
71
- #
72
- # # Assert that the result contains messages and that the final message is valid.
73
- # assert "messages" in result, "Expected 'messages' in the response"
74
- # last_message = result["messages"][-1]
75
- # assert isinstance(
76
- # last_message, (HumanMessage, AIMessage, str)
77
- # ), "Last message should be a valid response type"
78
- #
79
- # # Concatenate message texts (if available) to perform keyword checks.
80
- # output_text = " ".join(
81
- # msg.content if hasattr(msg, "content") else str(msg)
82
- # for msg in result["messages"]
83
- # ).lower()
84
- #
85
- # # Check for keywords that suggest each sub-agent was invoked.
86
- # for keyword in ["semantic scholar", "zotero", "pdf", "arxiv"]:
87
- # assert (
88
- # keyword in output_text
89
- # ), f"Expected keyword '{keyword}' in the output response"