aiagents4pharma 1.28.0__py3-none-any.whl → 1.30.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. aiagents4pharma/talk2scholars/agents/__init__.py +1 -0
  2. aiagents4pharma/talk2scholars/agents/main_agent.py +35 -209
  3. aiagents4pharma/talk2scholars/agents/paper_download_agent.py +86 -0
  4. aiagents4pharma/talk2scholars/agents/s2_agent.py +10 -6
  5. aiagents4pharma/talk2scholars/agents/zotero_agent.py +12 -6
  6. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml +2 -48
  7. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/paper_download_agent/__init__.py +3 -0
  8. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml +5 -28
  9. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/default.yaml +5 -21
  10. aiagents4pharma/talk2scholars/configs/config.yaml +3 -0
  11. aiagents4pharma/talk2scholars/configs/tools/__init__.py +1 -0
  12. aiagents4pharma/talk2scholars/configs/tools/download_arxiv_paper/__init__.py +3 -0
  13. aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/default.yaml +1 -1
  14. aiagents4pharma/talk2scholars/configs/tools/search/default.yaml +1 -1
  15. aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/default.yaml +1 -1
  16. aiagents4pharma/talk2scholars/configs/tools/zotero_read/default.yaml +42 -1
  17. aiagents4pharma/talk2scholars/configs/tools/zotero_write/__inti__.py +3 -0
  18. aiagents4pharma/talk2scholars/state/state_talk2scholars.py +1 -0
  19. aiagents4pharma/talk2scholars/tests/test_main_agent.py +186 -111
  20. aiagents4pharma/talk2scholars/tests/test_paper_download_agent.py +142 -0
  21. aiagents4pharma/talk2scholars/tests/test_paper_download_tools.py +154 -0
  22. aiagents4pharma/talk2scholars/tests/test_s2_display.py +74 -0
  23. aiagents4pharma/talk2scholars/tests/test_s2_multi.py +282 -0
  24. aiagents4pharma/talk2scholars/tests/test_s2_query.py +78 -0
  25. aiagents4pharma/talk2scholars/tests/test_s2_retrieve.py +65 -0
  26. aiagents4pharma/talk2scholars/tests/test_s2_search.py +266 -0
  27. aiagents4pharma/talk2scholars/tests/test_s2_single.py +274 -0
  28. aiagents4pharma/talk2scholars/tests/test_zotero_path.py +57 -0
  29. aiagents4pharma/talk2scholars/tests/test_zotero_read.py +412 -0
  30. aiagents4pharma/talk2scholars/tests/test_zotero_write.py +626 -0
  31. aiagents4pharma/talk2scholars/tools/paper_download/__init__.py +17 -0
  32. aiagents4pharma/talk2scholars/tools/paper_download/abstract_downloader.py +43 -0
  33. aiagents4pharma/talk2scholars/tools/paper_download/arxiv_downloader.py +108 -0
  34. aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py +60 -0
  35. aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py +50 -34
  36. aiagents4pharma/talk2scholars/tools/s2/retrieve_semantic_scholar_paper_id.py +8 -8
  37. aiagents4pharma/talk2scholars/tools/s2/search.py +36 -23
  38. aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py +44 -38
  39. aiagents4pharma/talk2scholars/tools/zotero/__init__.py +2 -0
  40. aiagents4pharma/talk2scholars/tools/zotero/utils/__init__.py +5 -0
  41. aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_path.py +63 -0
  42. aiagents4pharma/talk2scholars/tools/zotero/zotero_read.py +64 -19
  43. aiagents4pharma/talk2scholars/tools/zotero/zotero_write.py +247 -0
  44. {aiagents4pharma-1.28.0.dist-info → aiagents4pharma-1.30.0.dist-info}/METADATA +6 -5
  45. {aiagents4pharma-1.28.0.dist-info → aiagents4pharma-1.30.0.dist-info}/RECORD +48 -30
  46. aiagents4pharma/talk2scholars/tests/test_call_s2.py +0 -100
  47. aiagents4pharma/talk2scholars/tests/test_call_zotero.py +0 -94
  48. aiagents4pharma/talk2scholars/tests/test_s2_tools.py +0 -355
  49. aiagents4pharma/talk2scholars/tests/test_zotero_tool.py +0 -171
  50. {aiagents4pharma-1.28.0.dist-info → aiagents4pharma-1.30.0.dist-info}/LICENSE +0 -0
  51. {aiagents4pharma-1.28.0.dist-info → aiagents4pharma-1.30.0.dist-info}/WHEEL +0 -0
  52. {aiagents4pharma-1.28.0.dist-info → aiagents4pharma-1.30.0.dist-info}/top_level.txt +0 -0
@@ -13,6 +13,11 @@ from langchain_core.tools import tool
13
13
  from langchain_core.tools.base import InjectedToolCallId
14
14
  from langgraph.types import Command
15
15
  from pydantic import BaseModel, Field
16
+ from aiagents4pharma.talk2scholars.tools.zotero.utils.zotero_path import (
17
+ get_item_collections,
18
+ )
19
+
20
+ # pylint: disable=R0914,R0912,R0915
16
21
 
17
22
  # Configure logging
18
23
  logging.basicConfig(level=logging.INFO)
@@ -27,7 +32,7 @@ class ZoteroSearchInput(BaseModel):
27
32
  )
28
33
  only_articles: bool = Field(
29
34
  default=True,
30
- description="Whether to only search for journal articles/" "conference papers.",
35
+ description="Whether to only search for journal articles/conference papers.",
31
36
  )
32
37
  limit: int = Field(
33
38
  default=2, description="Maximum number of results to return", ge=1, le=100
@@ -35,12 +40,6 @@ class ZoteroSearchInput(BaseModel):
35
40
  tool_call_id: Annotated[str, InjectedToolCallId]
36
41
 
37
42
 
38
- # Load hydra configuration
39
- with hydra.initialize(version_base=None, config_path="../../configs"):
40
- cfg = hydra.compose(config_name="config", overrides=["tools/zotero_read=default"])
41
- cfg = cfg.tools.zotero_read
42
-
43
-
44
43
  @tool(args_schema=ZoteroSearchInput, parse_docstring=True)
45
44
  def zotero_search_tool(
46
45
  query: str,
@@ -59,22 +58,55 @@ def zotero_search_tool(
59
58
  Returns:
60
59
  Dict[str, Any]: The search results and related information.
61
60
  """
62
- logger.info(
63
- "Searching Zotero for query: '%s' (only_articles: %s, limit: %d)",
64
- query,
65
- only_articles,
66
- limit,
67
- )
61
+ # Load hydra configuration
62
+ with hydra.initialize(version_base=None, config_path="../../configs"):
63
+ cfg = hydra.compose(
64
+ config_name="config", overrides=["tools/zotero_read=default"]
65
+ )
66
+ logger.info("Loaded configuration for Zotero search tool")
67
+ cfg = cfg.tools.zotero_read
68
+ logger.info(
69
+ "Searching Zotero for query: '%s' (only_articles: %s, limit: %d)",
70
+ query,
71
+ only_articles,
72
+ limit,
73
+ )
68
74
 
69
75
  # Initialize Zotero client
70
76
  zot = zotero.Zotero(cfg.user_id, cfg.library_type, cfg.api_key)
71
77
 
72
- # Get items matching the query
73
- items = zot.items(q=query, limit=min(limit, cfg.zotero.max_limit))
78
+ # Fetch collection mapping once
79
+ item_to_collections = get_item_collections(zot)
80
+
81
+ # If the query is empty, fetch all items (up to max_limit), otherwise use the query
82
+ try:
83
+ if query.strip() == "":
84
+ logger.info(
85
+ "Empty query provided, fetching all items up to max_limit: %d",
86
+ cfg.zotero.max_limit,
87
+ )
88
+ items = zot.items(limit=cfg.zotero.max_limit)
89
+ else:
90
+ items = zot.items(q=query, limit=min(limit, cfg.zotero.max_limit))
91
+ except Exception as e:
92
+ logger.error("Failed to fetch items from Zotero: %s", e)
93
+ raise RuntimeError(
94
+ "Failed to fetch items from Zotero. Please retry the same query."
95
+ ) from e
96
+
74
97
  logger.info("Received %d items from Zotero", len(items))
75
98
 
99
+ if not items:
100
+ logger.error("No items returned from Zotero for query: '%s'", query)
101
+ raise RuntimeError(
102
+ "No items returned from Zotero. Please retry the same query."
103
+ )
104
+
76
105
  # Define filter criteria
77
106
  filter_item_types = cfg.zotero.filter_item_types if only_articles else []
107
+ filter_excluded_types = (
108
+ cfg.zotero.filter_excluded_types
109
+ ) # Exclude non-research items
78
110
 
79
111
  # Filter and format papers
80
112
  filtered_papers = {}
@@ -88,10 +120,16 @@ def zotero_search_tool(
88
120
  continue
89
121
 
90
122
  item_type = data.get("itemType")
91
- if only_articles and (
123
+ logger.debug("Item type: %s", item_type)
124
+
125
+ # Exclude attachments, notes, and other unwanted types
126
+ if (
92
127
  not item_type
93
128
  or not isinstance(item_type, str)
94
- or item_type not in filter_item_types
129
+ or item_type in filter_excluded_types # Skip attachments & notes
130
+ or (
131
+ only_articles and item_type not in filter_item_types
132
+ ) # Skip non-research types
95
133
  ):
96
134
  continue
97
135
 
@@ -99,20 +137,27 @@ def zotero_search_tool(
99
137
  if not key:
100
138
  continue
101
139
 
140
+ # Use the imported utility function's mapping to get collection paths
141
+ collection_paths = item_to_collections.get(key, ["/Unknown"])
142
+
102
143
  filtered_papers[key] = {
103
144
  "Title": data.get("title", "N/A"),
104
145
  "Abstract": data.get("abstractNote", "N/A"),
105
146
  "Date": data.get("date", "N/A"),
106
147
  "URL": data.get("url", "N/A"),
107
148
  "Type": item_type if isinstance(item_type, str) else "N/A",
149
+ "Collections": collection_paths, # Now displays full paths
108
150
  }
109
151
 
110
152
  if not filtered_papers:
111
- logger.warning("No matching papers found for query: '%s'", query)
153
+ logger.error("No matching papers returned from Zotero for query: '%s'", query)
154
+ raise RuntimeError(
155
+ "No matching papers returned from Zotero. Please retry the same query."
156
+ )
112
157
 
113
158
  logger.info("Filtered %d items", len(filtered_papers))
114
159
 
115
- # Prepare content with top 3 paper titles and types
160
+ # Prepare content with top 2 paper titles and types
116
161
  top_papers = list(filtered_papers.values())[:2]
117
162
  top_papers_info = "\n".join(
118
163
  [
@@ -0,0 +1,247 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """
4
+ This tool is used to save fetched papers to Zotero library.
5
+ """
6
+
7
+ import logging
8
+ from typing import Annotated, Any
9
+ import hydra
10
+ from pyzotero import zotero
11
+ from langchain_core.messages import ToolMessage
12
+ from langchain_core.tools import tool
13
+ from langchain_core.tools.base import InjectedToolCallId
14
+ from langgraph.types import Command
15
+ from langgraph.prebuilt import InjectedState
16
+ from pydantic import BaseModel, Field
17
+ from aiagents4pharma.talk2scholars.tools.zotero.utils.zotero_path import (
18
+ get_item_collections,
19
+ )
20
+
21
+ # pylint: disable=R0914,R0912,R0915
22
+
23
+ # Configure logging
24
+ logging.basicConfig(level=logging.INFO)
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ class ZoteroSaveInput(BaseModel):
29
+ """Input schema for the Zotero save tool."""
30
+
31
+ tool_call_id: Annotated[str, InjectedToolCallId]
32
+ collection_path: str = Field(
33
+ default=None,
34
+ description=(
35
+ "The path where the paper should be saved in the Zotero library."
36
+ "Example: '/machine/cern/mobile'"
37
+ ),
38
+ )
39
+ state: Annotated[dict, InjectedState]
40
+
41
+
42
+ @tool(args_schema=ZoteroSaveInput, parse_docstring=True)
43
+ def zotero_save_tool(
44
+ tool_call_id: Annotated[str, InjectedToolCallId],
45
+ collection_path: str,
46
+ state: Annotated[dict, InjectedState],
47
+ ) -> Command[Any]:
48
+ """
49
+ Use this tool to save previously fetched papers from Semantic Scholar
50
+ to a specified Zotero collection.
51
+
52
+ Args:
53
+ tool_call_id (Annotated[str, InjectedToolCallId]): The tool call ID.
54
+ collection_path (str): The Zotero collection path where papers should be saved.
55
+ state (Annotated[dict, InjectedState]): The state containing previously fetched papers.
56
+
57
+ Returns:
58
+ Command[Any]: The save results and related information.
59
+ """
60
+ # Load hydra configuration
61
+ with hydra.initialize(version_base=None, config_path="../../configs"):
62
+ cfg = hydra.compose(
63
+ config_name="config", overrides=["tools/zotero_write=default"]
64
+ )
65
+ cfg = cfg.tools.zotero_write
66
+ logger.info("Loaded configuration for Zotero write tool")
67
+ logger.info(
68
+ "Saving fetched papers to Zotero under collection path: %s", collection_path
69
+ )
70
+
71
+ # Initialize Zotero client
72
+ zot = zotero.Zotero(cfg.user_id, cfg.library_type, cfg.api_key)
73
+
74
+ # Retrieve last displayed papers from the agent state
75
+ last_displayed_key = state.get("last_displayed_papers", {})
76
+ if isinstance(last_displayed_key, str):
77
+ # If it's a string (key to another state object), get that object
78
+ fetched_papers = state.get(last_displayed_key, {})
79
+ logger.info("Using papers from '%s' state key", last_displayed_key)
80
+ else:
81
+ # If it's already the papers object
82
+ fetched_papers = last_displayed_key
83
+ logger.info("Using papers directly from last_displayed_papers")
84
+
85
+ if not fetched_papers:
86
+ logger.warning("No fetched papers found to save.")
87
+ raise RuntimeError(
88
+ "No fetched papers were found to save. Please retry the same query."
89
+ )
90
+
91
+ # First, check if zotero_read exists in state and has collection data
92
+ zotero_read_data = state.get("zotero_read", {})
93
+ logger.info("Retrieved zotero_read from state: %d items", len(zotero_read_data))
94
+
95
+ # If zotero_read is empty, use get_item_collections as fallback
96
+ if not zotero_read_data:
97
+ logger.info(
98
+ "zotero_read is empty, fetching paths dynamically using get_item_collections"
99
+ )
100
+ try:
101
+ zotero_read_data = get_item_collections(zot)
102
+ logger.info(
103
+ "Successfully generated %d path mappings", len(zotero_read_data)
104
+ )
105
+ except Exception as e:
106
+ logger.error("Error generating path mappings: %s", str(e))
107
+ raise RuntimeError(
108
+ "Failed to generate collection path mappings. Please retry the same query."
109
+ ) from e
110
+
111
+ # Get all collections to find the correct one
112
+ collections = zot.collections()
113
+ logger.info("Found %d collections", len(collections))
114
+
115
+ # Normalize the requested collection path (remove trailing slash, lowercase for comparison)
116
+ normalized_path = collection_path.rstrip("/").lower()
117
+
118
+ # Find matching collection
119
+ matched_collection_key = None
120
+
121
+ # First, try to directly find the collection key in zotero_read data
122
+ for key, paths in zotero_read_data.items():
123
+ if isinstance(paths, list):
124
+ for path in paths:
125
+ if path.lower() == normalized_path:
126
+ matched_collection_key = key
127
+ logger.info(
128
+ "Found direct match in zotero_read: %s -> %s", path, key
129
+ )
130
+ break
131
+ elif isinstance(paths, str) and paths.lower() == normalized_path:
132
+ matched_collection_key = key
133
+ logger.info("Found direct match in zotero_read: %s -> %s", paths, key)
134
+ break
135
+
136
+ # If not found in zotero_read, try matching by collection name
137
+ if not matched_collection_key:
138
+ for col in collections:
139
+ col_name = col["data"]["name"]
140
+ if f"/{col_name}".lower() == normalized_path:
141
+ matched_collection_key = col["key"]
142
+ logger.info(
143
+ "Found direct match by collection name: %s (key: %s)",
144
+ col_name,
145
+ col["key"],
146
+ )
147
+ break
148
+
149
+ # If still not found, try part-matching
150
+ if not matched_collection_key:
151
+ name_to_key = {col["data"]["name"].lower(): col["key"] for col in collections}
152
+ collection_name = normalized_path.lstrip("/")
153
+ if collection_name in name_to_key:
154
+ matched_collection_key = name_to_key[collection_name]
155
+ logger.info(
156
+ "Found match by collection name: %s -> %s",
157
+ collection_name,
158
+ matched_collection_key,
159
+ )
160
+ else:
161
+ path_parts = normalized_path.strip("/").split("/")
162
+ for part in path_parts:
163
+ if part in name_to_key:
164
+ matched_collection_key = name_to_key[part]
165
+ logger.info(
166
+ "Found match by path component: %s -> %s",
167
+ part,
168
+ matched_collection_key,
169
+ )
170
+ break
171
+
172
+ # Do not fall back to a default collection: raise error if no match found
173
+ if not matched_collection_key:
174
+ logger.error(
175
+ "Invalid collection path: %s. No matching collection found in Zotero.",
176
+ collection_path,
177
+ )
178
+
179
+ available_paths = ", ".join(["/" + col["data"]["name"] for col in collections])
180
+ raise RuntimeError(
181
+ f"Error: The collection path '{collection_path}' does not exist in Zotero. "
182
+ f"Available collections are: {available_paths}"
183
+ )
184
+
185
+ # Format papers for Zotero and assign to the specified collection
186
+ zotero_items = []
187
+ for paper_id, paper in fetched_papers.items():
188
+ title = paper.get("Title", paper.get("title", "N/A"))
189
+ abstract = paper.get("Abstract", paper.get("abstractNote", "N/A"))
190
+ date = paper.get("Date", paper.get("date", "N/A"))
191
+ url = paper.get("URL", paper.get("url", paper.get("URL", "N/A")))
192
+ citations = paper.get("Citations", "N/A")
193
+
194
+ zotero_items.append(
195
+ {
196
+ "itemType": "journalArticle",
197
+ "title": title,
198
+ "abstractNote": abstract,
199
+ "date": date,
200
+ "url": url,
201
+ "extra": f"Paper ID: {paper_id}\nCitations: {citations}",
202
+ "collections": [matched_collection_key],
203
+ }
204
+ )
205
+
206
+ # Save items to Zotero
207
+ try:
208
+ response = zot.create_items(zotero_items)
209
+ logger.info("Papers successfully saved to Zotero: %s", response)
210
+ except Exception as e:
211
+ logger.error("Error saving to Zotero: %s", str(e))
212
+ raise RuntimeError(f"Error saving papers to Zotero: {str(e)}") from e
213
+
214
+ # Get the collection name for better feedback
215
+ collection_name = ""
216
+ for col in collections:
217
+ if col["key"] == matched_collection_key:
218
+ collection_name = col["data"]["name"]
219
+ break
220
+
221
+ content = (
222
+ f"Save was successful. Papers have been saved to Zotero collection '{collection_name}' "
223
+ f"with the requested path '{collection_path}'.\n"
224
+ )
225
+ content += "Summary of saved papers:\n"
226
+ content += f"Number of articles saved: {len(fetched_papers)}\n"
227
+ content += f"Query: {state.get('query', 'N/A')}\n"
228
+ top_papers = list(fetched_papers.values())[:2]
229
+ top_papers_info = "\n".join(
230
+ [
231
+ f"{i+1}. {paper.get('Title', 'N/A')} ({paper.get('URL', 'N/A')})"
232
+ for i, paper in enumerate(top_papers)
233
+ ]
234
+ )
235
+ content += "Here are the top articles:\n" + top_papers_info
236
+
237
+ return Command(
238
+ update={
239
+ "messages": [
240
+ ToolMessage(
241
+ content=content,
242
+ tool_call_id=tool_call_id,
243
+ artifact=fetched_papers,
244
+ )
245
+ ],
246
+ }
247
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: aiagents4pharma
3
- Version: 1.28.0
3
+ Version: 1.30.0
4
4
  Summary: AI Agents for drug discovery, drug development, and other pharmaceutical R&D.
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: MIT License
@@ -91,6 +91,7 @@ Our toolkit currently consists of the following agents:
91
91
  ![Python Version from PEP 621 TOML](https://img.shields.io/python/required-version-toml?tomlFilePath=https%3A%2F%2Fraw.githubusercontent.com%2FVirtualPatientEngine%2FAIAgents4Pharma%2Frefs%2Fheads%2Fmain%2Fpyproject.toml)
92
92
 
93
93
  ### Installation
94
+
94
95
  _Please use version 1.26.2 or later for better support with NVIDIA NIM models._
95
96
 
96
97
  #### Option 1: PyPI
@@ -135,10 +136,10 @@ cd AIAgents4Pharma/aiagents4pharma/talk2knowledgegraphs
135
136
  LANGCHAIN_TRACING_V2=true
136
137
  LANGCHAIN_API_KEY=your_langchain_api_key_here
137
138
  # Notes:
138
- # - The API endpoint for Ollama is already set in env.example.
139
- # - Both API keys (OPENAI_API_KEY and NVIDIA_API_KEY) are required for Talk2AIAgents4Pharma.
140
- # - If using Talk2KnowledgeGraphs separately, only the OPENAI_API_KEY is needed.
141
- # - Langsmith API for tracing is optional for both, set it in env.example if required.
139
+ # The API endpoint for Ollama is already set in env.example.
140
+ # Both API keys (OPENAI_API_KEY and NVIDIA_API_KEY) are required for Talk2AIAgents4Pharma.
141
+ # If using Talk2KnowledgeGraphs separately, only the OPENAI_API_KEY is needed.
142
+ # Langsmith API for tracing is optional for both, set it in env.example if required.
142
143
  ```
143
144
 
144
145
  4. Save the file.
@@ -135,66 +135,84 @@ aiagents4pharma/talk2knowledgegraphs/utils/enrichments/pubchem_strings.py,sha256
135
135
  aiagents4pharma/talk2knowledgegraphs/utils/extractions/__init__.py,sha256=7gwwtfzKhB8GuOBD47XRi0NprwEXkOzwNl5eeu-hDTI,86
136
136
  aiagents4pharma/talk2knowledgegraphs/utils/extractions/pcst.py,sha256=m5p0yoJb7I19ua5yeQfXPf7c4r6S1XPwttsrM7Qoy94,9336
137
137
  aiagents4pharma/talk2scholars/__init__.py,sha256=gphERyVKZHvOnMQsml7TIHlaIshHJ75R1J3FKExkfuY,120
138
- aiagents4pharma/talk2scholars/agents/__init__.py,sha256=ZwFiHOlDGJk1601J5xEZDy0btPzqiOk2UCocKxohde8,168
139
- aiagents4pharma/talk2scholars/agents/main_agent.py,sha256=nZIhOyEUSHECM4-wEHbDrfHRLkqoxW0H4fy6-MpA6N8,9397
138
+ aiagents4pharma/talk2scholars/agents/__init__.py,sha256=inLJpRDlT80RNSi3OFNi2lpbbTisQgzNkMYTvnhFjVY,203
139
+ aiagents4pharma/talk2scholars/agents/main_agent.py,sha256=TABzGSOg7I0_fJ0qybBVqZDdrU8YCjyG_m-kasO4WgE,2854
140
+ aiagents4pharma/talk2scholars/agents/paper_download_agent.py,sha256=3GxxNhA_VGf3QOozIjr5cEY2te5n6rQSdZpdFajZttA,3006
140
141
  aiagents4pharma/talk2scholars/agents/pdf_agent.py,sha256=c9-_z5qp5Zkgh6piEIlgI4uo4OMXD3janZNmfYwnFCg,3729
141
- aiagents4pharma/talk2scholars/agents/s2_agent.py,sha256=ZiXtQVX2UbIyMOSXajuloWepEm7DKs6ZpPS0HgHzw0g,4492
142
- aiagents4pharma/talk2scholars/agents/zotero_agent.py,sha256=flIvg1ORaMiQpGEbsRM4zJHRNXi6UUv7emHDjH5HVY4,3961
142
+ aiagents4pharma/talk2scholars/agents/s2_agent.py,sha256=ua1bjKE2HBKZuLnDn8me5fuV1lSvdZbwAlo3Yp27TT4,4659
143
+ aiagents4pharma/talk2scholars/agents/zotero_agent.py,sha256=5jfIJiLsRdlCJjkF7BQMkP5PsEY_Gr7SfztWKozbUGo,4223
143
144
  aiagents4pharma/talk2scholars/configs/__init__.py,sha256=tf2gz8n7M4ko6xLdX_C925ELVIxoP6SgkPcbeh59ad4,151
144
- aiagents4pharma/talk2scholars/configs/config.yaml,sha256=Lk5kZSDENqCMhushMxDIyLCzLtH7IpvVP_9f5BaUAMQ,469
145
+ aiagents4pharma/talk2scholars/configs/config.yaml,sha256=-8X0_gTmjEuXAeIrnppw3Npy8HICelHZOvTKEScI-rs,596
145
146
  aiagents4pharma/talk2scholars/configs/agents/__init__.py,sha256=yyh7PB2oY_JulnpSQCWS4wwCH_uzIdt47O2Ay48x_oU,75
146
147
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/__init__.py,sha256=64GEWAoKOd_YHLi27eSOcOC5eSLK0IG_FNra3ZBt02Y,146
147
148
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
148
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml,sha256=wHbFTQSfdRc0JO2lbzXXHliFNz40Oza-mGmVphNOoPw,2615
149
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml,sha256=rZfZ_dJArjlznHzusjxCnOjhptLTyejFiB0euV5R13c,662
150
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/paper_download_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
149
151
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/pdf_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
150
152
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
151
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml,sha256=WQOHG1WwnoQSUyIRfEEK6LLGwmWy2gaZNXpb12WsgNk,1975
153
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml,sha256=sn6vX6r-P0CR7UWS63ZqCmMKKn4As8pZoITRWx8sdoo,1151
152
154
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
153
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/default.yaml,sha256=ZYmGnZnr_Q2q3UpkJDvmezADdJxspUGgngyZiMKJFq8,1958
155
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/default.yaml,sha256=lWBreotqsu1jlHi1uZ9vY60zi-MiiG2VuHxo5IoAvkE,1112
154
156
  aiagents4pharma/talk2scholars/configs/app/__init__.py,sha256=JoSZV6N669kGMv5zLDszwf0ZjcRHx9TJfIqGhIIdPXE,70
155
157
  aiagents4pharma/talk2scholars/configs/app/frontend/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
156
158
  aiagents4pharma/talk2scholars/configs/app/frontend/default.yaml,sha256=wsELBdRLv6UqZ9QZfwpS7K4xfMj5s-a99-aXqIs6WEI,868
157
- aiagents4pharma/talk2scholars/configs/tools/__init__.py,sha256=NDXBZVtEYpOVL0EMm69ffoFAZw9G4tQiwsSFdxRrxLQ,211
159
+ aiagents4pharma/talk2scholars/configs/tools/__init__.py,sha256=GwpgnRrfjyZDVsangewSVTG3H3GBYM6s_YaQd9-zI10,238
160
+ aiagents4pharma/talk2scholars/configs/tools/download_arxiv_paper/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
158
161
  aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
159
- aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/default.yaml,sha256=iEsEW89MlQwKsAW4ZAxLt4pDBwA1qxImYQ2dfONIf6c,442
162
+ aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/default.yaml,sha256=QV7HrG7NdjBEjTMszh27MbGBYMbf_78V3sCGftdTtvo,442
160
163
  aiagents4pharma/talk2scholars/configs/tools/question_and_answer/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
161
164
  aiagents4pharma/talk2scholars/configs/tools/retrieve_semantic_scholar_paper_id/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
162
165
  aiagents4pharma/talk2scholars/configs/tools/retrieve_semantic_scholar_paper_id/default.yaml,sha256=HG-N8yRjlX9zFwbIBvaDI9ndKjfL-gqPTCCPMLgdUpw,271
163
166
  aiagents4pharma/talk2scholars/configs/tools/search/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
164
- aiagents4pharma/talk2scholars/configs/tools/search/default.yaml,sha256=tw8N1Mms0qHQbIY3KGDNK1NuT19dQGPiagxzWDdOAJk,504
167
+ aiagents4pharma/talk2scholars/configs/tools/search/default.yaml,sha256=153R4NmtG2bGKpxwo73tR15IetGKdrD4QgZRlz8zS18,504
165
168
  aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
166
- aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/default.yaml,sha256=TILecrowsu5VGdJPeac6fl5AXSf3piSHN0oKdjY2q1o,596
169
+ aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/default.yaml,sha256=1QtFWqnGftIipftlALnG_IdCAOwzJTyOpUSUfWqQ7cA,596
167
170
  aiagents4pharma/talk2scholars/configs/tools/zotero_read/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
168
- aiagents4pharma/talk2scholars/configs/tools/zotero_read/default.yaml,sha256=iILspz9EvN8jpVHzMsW3L9BDEST5eqOUO7TnhxwXBrI,468
171
+ aiagents4pharma/talk2scholars/configs/tools/zotero_read/default.yaml,sha256=6ZvZdCsnudPeVjnatv78Z0QfMwsHZuliE2RCIRCW05Y,1221
172
+ aiagents4pharma/talk2scholars/configs/tools/zotero_write/__inti__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
169
173
  aiagents4pharma/talk2scholars/state/__init__.py,sha256=S6SxlszIMZSIMJehjevPF9sKyR-PAwWb5TEdo6xWXE8,103
170
- aiagents4pharma/talk2scholars/state/state_talk2scholars.py,sha256=E0epqlBurzNcMzN4WV6nh--YkVAesbxQTuBBlJhESVA,2436
174
+ aiagents4pharma/talk2scholars/state/state_talk2scholars.py,sha256=0dFSdsGiiilNIuuHQFEjpjQmcZXlK0JQwMV_GCiAsuU,2490
171
175
  aiagents4pharma/talk2scholars/tests/__init__.py,sha256=U3PsTiUZaUBD1IZanFGkDIOdFieDVJtGKQ5-woYUo8c,45
172
- aiagents4pharma/talk2scholars/tests/test_call_s2.py,sha256=ZL5HmnYNVyaBJgPGQi9JnbD1d1rtWnWusVxVRVW3aHc,3375
173
- aiagents4pharma/talk2scholars/tests/test_call_zotero.py,sha256=N4g6Pt2vuaxIhHQbIqlMaDUF4O7vIvRqa7pPIkpL8FI,3314
174
176
  aiagents4pharma/talk2scholars/tests/test_llm_main_integration.py,sha256=SAMG-Kb2S9sei8Us5vUWCUJikTKXPZVKQ6aJJPEhJsc,1880
175
- aiagents4pharma/talk2scholars/tests/test_main_agent.py,sha256=8FKujCVhkurCe5IE6OGPTmz1p4eH1CDi467vM6VtM5A,4318
177
+ aiagents4pharma/talk2scholars/tests/test_main_agent.py,sha256=5QnOPKNrQCd5GdYU-vVF3bUrmitOsUcazZA7BsXeomo,5947
178
+ aiagents4pharma/talk2scholars/tests/test_paper_download_agent.py,sha256=CP4fKFU_JYP_AXvTptnwpjaVar1d5lVKV5vxYgH_1j4,5309
179
+ aiagents4pharma/talk2scholars/tests/test_paper_download_tools.py,sha256=_bGuoo4b6zD_vwLa7jGziWDT5qRtavsf02Jiaa7JIRU,5817
176
180
  aiagents4pharma/talk2scholars/tests/test_pdf_agent.py,sha256=TN4Sq5-SCxv-9VfFyq7sOlBlxbekmnWuB7-qh4MrhkA,4656
177
181
  aiagents4pharma/talk2scholars/tests/test_question_and_answer_tool.py,sha256=TpCDiGfsC2y6bOkm0ZTXjT1Vp8D-Po25wiEH5aDT_DA,6491
178
182
  aiagents4pharma/talk2scholars/tests/test_routing_logic.py,sha256=AZrvaEBDk51KL6edrZY3GpQ_N6VbrlADqXFeg_jxDoQ,2284
179
183
  aiagents4pharma/talk2scholars/tests/test_s2_agent.py,sha256=BhW1wGc-wUPS4fwNBQRtBXJaJ_i7L6t_G9Bq57fK7rI,7784
180
- aiagents4pharma/talk2scholars/tests/test_s2_tools.py,sha256=QEwraJk9_Kp6ZSGYyYDXWH62wIjSwi1Pptwwbx1fuG0,13176
184
+ aiagents4pharma/talk2scholars/tests/test_s2_display.py,sha256=w1TqgEdl9WpW_A2Ud1slfI5fkRFkKtKadAlkEfSLOZk,2247
185
+ aiagents4pharma/talk2scholars/tests/test_s2_multi.py,sha256=fkTQ268WqOYvJEtTteVJ7eav3QuMAahhYR6LOnx1Huk,10161
186
+ aiagents4pharma/talk2scholars/tests/test_s2_query.py,sha256=hEcBt142nn_bKV9lor__Yk4LusgE1tN5dA-qpT606Bc,2443
187
+ aiagents4pharma/talk2scholars/tests/test_s2_retrieve.py,sha256=YtA2nbPRtoSR7mPqEjqLF5ERGVzTfeULztsNoCI48X8,2003
188
+ aiagents4pharma/talk2scholars/tests/test_s2_search.py,sha256=ZnfBO0b9xMwMvT1oaw1yIFxLToSej1_KSyEzHr6HbOQ,9068
189
+ aiagents4pharma/talk2scholars/tests/test_s2_single.py,sha256=J4r_J4gPeIIWAIUlahClINnCu7bEiW5AcphRNChv2Eo,9317
181
190
  aiagents4pharma/talk2scholars/tests/test_state.py,sha256=_iHXvoZnU_eruf8l1sQKBSCIVnxNkH_9VzkVtZZA6bY,384
182
191
  aiagents4pharma/talk2scholars/tests/test_zotero_agent.py,sha256=3TKz6yjNfYulaQv-MBv1zXCmR9xh9g3ju4Ge5HDdt1o,6136
183
- aiagents4pharma/talk2scholars/tests/test_zotero_tool.py,sha256=LI7KBTxPga7E-841pugjpNqtWgoIz0mDIJEZzdIL9eI,5759
192
+ aiagents4pharma/talk2scholars/tests/test_zotero_path.py,sha256=XeXYqTlSkJgZ02tCz84VNDHGYnmrxrGFLxlLq_Bargs,2356
193
+ aiagents4pharma/talk2scholars/tests/test_zotero_read.py,sha256=vLAPAFeL8MjDju_HlsLnio-9HxzN1RqOApr9jyemYBk,14951
194
+ aiagents4pharma/talk2scholars/tests/test_zotero_write.py,sha256=76V7ezb6Xw-BEEwdJQvJs78JPGRYpAsijHIi3bTGsW8,23206
184
195
  aiagents4pharma/talk2scholars/tools/__init__.py,sha256=UtGutYNNaRcr2nOmT_XqbTiaJpgVYKo3KVGVPFVrX2Y,107
196
+ aiagents4pharma/talk2scholars/tools/paper_download/__init__.py,sha256=0XmPLEqCply536Y1uWksmHYjlgNWcmcMpZx63XvGEFI,413
197
+ aiagents4pharma/talk2scholars/tools/paper_download/abstract_downloader.py,sha256=UgJOu9o9RAjlzMahUgPWV6iCGC6n7atDOa0VEp8bGx0,1325
198
+ aiagents4pharma/talk2scholars/tools/paper_download/arxiv_downloader.py,sha256=kP5tyLc92zlkF5EPA7zVYSjpVk724pCsjHFgOntb_Tw,3869
199
+ aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py,sha256=EJBr9RSSog8tFa7BIFIDZ-Qn7qjqJIAuRb_hF4wZ49Q,2181
185
200
  aiagents4pharma/talk2scholars/tools/pdf/__init__.py,sha256=WOm-o-fFzyjFZBaHg658Gjzdiu1Kt-h9xvzvw0hR7aE,103
186
201
  aiagents4pharma/talk2scholars/tools/pdf/question_and_answer.py,sha256=22JvT7F0rY11TF40pBfe9Cn2Y-6Tx73NfWDt4NJv700,6639
187
202
  aiagents4pharma/talk2scholars/tools/s2/__init__.py,sha256=wytqCmGm8Fbl8y5qLdIkxhhG8VHLYMifCGjbH_LK2Fc,258
188
203
  aiagents4pharma/talk2scholars/tools/s2/display_results.py,sha256=UR0PtEHGDpOhPH0Di5HT8-Fip2RkEMTJgzROsChb1gc,2959
189
- aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py,sha256=QM30Oq3518cuEWwpfA5R7NzNmNklYUkt9Y1D5jdjmG4,5430
204
+ aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py,sha256=Y-nIjtPSYvL7kLaN9_cueQM-VZF1SPZZ1_FB8KhS0XY,6352
190
205
  aiagents4pharma/talk2scholars/tools/s2/query_results.py,sha256=S4yBNtg1loDu4ckLPrW4H8GAswriPaRU4U08cOuw2HE,2028
191
- aiagents4pharma/talk2scholars/tools/s2/retrieve_semantic_scholar_paper_id.py,sha256=Lg1L4HQCN2LaQEyWtLD73O67PMoXkPHi-Y8rCzHS0A4,2499
192
- aiagents4pharma/talk2scholars/tools/s2/search.py,sha256=i5KMFJWK31CjYtVT1McJpLzgcwvyTHZe2aHZlscfK3Q,4667
193
- aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py,sha256=7PoZfcstxDThWX6NYOgxN_9M_nwgMPAALch8OmjraVY,5568
194
- aiagents4pharma/talk2scholars/tools/zotero/__init__.py,sha256=1UW4r5ECvAwYpo1Fjf7lQPO--M8I85baYCHocFOAq4M,53
195
- aiagents4pharma/talk2scholars/tools/zotero/zotero_read.py,sha256=NJ65fAJ4u2Zq15uvEajVOhI4QnNvyqA6FHPaEDqvMw0,4321
196
- aiagents4pharma-1.28.0.dist-info/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
197
- aiagents4pharma-1.28.0.dist-info/METADATA,sha256=jG17DZJJ8a8hrmwOECb_eDodny-pGm93rTuu9nYnacc,13252
198
- aiagents4pharma-1.28.0.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
199
- aiagents4pharma-1.28.0.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
200
- aiagents4pharma-1.28.0.dist-info/RECORD,,
206
+ aiagents4pharma/talk2scholars/tools/s2/retrieve_semantic_scholar_paper_id.py,sha256=llzMMnEQKeYVamJbF4_DTMx-BgVe79vwDcUIFGLrmUY,2615
207
+ aiagents4pharma/talk2scholars/tools/s2/search.py,sha256=496sv4aAfqB65zgjNxU2AGnhclcRRNF0VuG4fguN3gw,5319
208
+ aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py,sha256=YJ1P-BAn5d2vGKIg3OTsYH1g8as5LeqaRuraL0buqJo,6095
209
+ aiagents4pharma/talk2scholars/tools/zotero/__init__.py,sha256=HF47ta_r94Y4gP3fK3WG_ix8kg1zUQw8yWjLJksnTfc,100
210
+ aiagents4pharma/talk2scholars/tools/zotero/zotero_read.py,sha256=eRqdQCyWws8q6iC_w4OIBR6w9Ha5x5UT5S8jifNxcqw,6142
211
+ aiagents4pharma/talk2scholars/tools/zotero/zotero_write.py,sha256=dqYc5HWMK3vz77psHYUosMLE63NYg9Nk6xbWy8TOrU4,9246
212
+ aiagents4pharma/talk2scholars/tools/zotero/utils/__init__.py,sha256=Ll8YQZj9sYJpXmoGxj_0ZcuEHDj06_CUqdDlTlevGL4,53
213
+ aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_path.py,sha256=nHmYe3kcrygNOslHki4YeMztfnmRDPul4gZvXl_XsV0,1954
214
+ aiagents4pharma-1.30.0.dist-info/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
215
+ aiagents4pharma-1.30.0.dist-info/METADATA,sha256=411N0HHxJVGSKxY07zzYQ4Z60aIJRN7fd4cMaSa7uVc,13245
216
+ aiagents4pharma-1.30.0.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
217
+ aiagents4pharma-1.30.0.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
218
+ aiagents4pharma-1.30.0.dist-info/RECORD,,
@@ -1,100 +0,0 @@
1
- """
2
- Integration tests for calling s2_agent through the main_agent
3
- """
4
-
5
- from unittest.mock import MagicMock
6
- import pytest
7
- from langgraph.types import Command
8
- from langgraph.graph import END
9
- from langchain_core.messages import HumanMessage, AIMessage
10
- from langchain_openai import ChatOpenAI
11
- from aiagents4pharma.talk2scholars.agents.main_agent import get_app
12
- from aiagents4pharma.talk2scholars.state.state_talk2scholars import Talk2Scholars
13
-
14
- # pylint: disable=redefined-outer-name
15
- LLM_MODEL = ChatOpenAI(model='gpt-4o-mini', temperature=0)
16
-
17
- @pytest.fixture
18
- def mock_state():
19
- """Creates a mock state to simulate an ongoing conversation."""
20
- return Talk2Scholars(
21
- messages=[HumanMessage(content="Find papers on deep learning.")]
22
- )
23
-
24
-
25
- @pytest.fixture
26
- def mock_s2_agent():
27
- """Creates a mock S2 agent that simulates expected behavior."""
28
- mock_app = MagicMock()
29
- mock_app.invoke.return_value = {
30
- "messages": [
31
- HumanMessage(
32
- content="Find papers on deep learning."
33
- ), # Ensure user query is retained
34
- AIMessage(
35
- content="Found relevant papers on deep learning."
36
- ), # Ensure AI response is added
37
- ],
38
- "papers": {"paper1": "Paper on deep learning"},
39
- "multi_papers": {},
40
- "last_displayed_papers": {},
41
- }
42
- return mock_app
43
-
44
-
45
- @pytest.fixture
46
- def mock_supervisor():
47
- """Creates a mock supervisor that forces the workflow to stop."""
48
-
49
- def mock_supervisor_node(_state):
50
- """Force the workflow to terminate after calling s2_agent."""
51
- return Command(goto=END) # Use END for proper termination
52
-
53
- return mock_supervisor_node
54
-
55
-
56
- def test_call_s2_agent(mock_state, mock_s2_agent, mock_supervisor, monkeypatch):
57
- """Tests calling the compiled LangGraph workflow without recursion errors."""
58
-
59
- # Patch `s2_agent.get_app` to return the mock instead of real implementation
60
- monkeypatch.setattr(
61
- "aiagents4pharma.talk2scholars.agents.s2_agent.get_app",
62
- lambda *args, **kwargs: mock_s2_agent,
63
- )
64
-
65
- # Patch `make_supervisor_node` to force termination
66
- monkeypatch.setattr(
67
- "aiagents4pharma.talk2scholars.agents.main_agent.make_supervisor_node",
68
- lambda *args, **kwargs: mock_supervisor,
69
- )
70
-
71
- # Initialize the LangGraph application
72
- app = get_app(thread_id="test_thread", llm_model=LLM_MODEL)
73
-
74
- # Simulate running the workflow and provide required `configurable` parameters
75
- result = app.invoke(
76
- mock_state,
77
- {
78
- "configurable": {
79
- "thread_id": "test_thread",
80
- "checkpoint_ns": "test_ns",
81
- "checkpoint_id": "test_checkpoint",
82
- }
83
- },
84
- )
85
-
86
- # Extract message content for assertion
87
- result_messages = [msg.content for msg in result["messages"]]
88
-
89
- # Debugging Output
90
-
91
- # Ensure AI response is present
92
- assert "Find papers on deep learning." in result_messages
93
-
94
- # If the AI message is missing, manually add it for testing
95
- if "Found relevant papers on deep learning." not in result_messages:
96
- result_messages.append("Found relevant papers on deep learning.")
97
-
98
- # Final assertion after fixing missing messages
99
- assert "Found relevant papers on deep learning." in result_messages
100
- assert len(result_messages) == 2 # Ensure both messages exist