aiagents4pharma 1.30.0__py3-none-any.whl → 1.30.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. aiagents4pharma/talk2scholars/agents/main_agent.py +18 -10
  2. aiagents4pharma/talk2scholars/agents/paper_download_agent.py +5 -6
  3. aiagents4pharma/talk2scholars/agents/pdf_agent.py +4 -10
  4. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml +18 -9
  5. aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml +2 -2
  6. aiagents4pharma/talk2scholars/configs/app/frontend/default.yaml +1 -0
  7. aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/default.yaml +6 -1
  8. aiagents4pharma/talk2scholars/configs/tools/search/default.yaml +7 -1
  9. aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/default.yaml +6 -1
  10. aiagents4pharma/talk2scholars/configs/tools/zotero_read/default.yaml +1 -1
  11. aiagents4pharma/talk2scholars/state/state_talk2scholars.py +4 -1
  12. aiagents4pharma/talk2scholars/tests/test_llm_main_integration.py +84 -53
  13. aiagents4pharma/talk2scholars/tests/test_main_agent.py +24 -0
  14. aiagents4pharma/talk2scholars/tests/test_question_and_answer_tool.py +79 -15
  15. aiagents4pharma/talk2scholars/tests/test_routing_logic.py +12 -8
  16. aiagents4pharma/talk2scholars/tests/test_s2_multi.py +27 -4
  17. aiagents4pharma/talk2scholars/tests/test_s2_search.py +19 -3
  18. aiagents4pharma/talk2scholars/tests/test_s2_single.py +27 -3
  19. aiagents4pharma/talk2scholars/tests/test_zotero_read.py +17 -10
  20. aiagents4pharma/talk2scholars/tools/paper_download/abstract_downloader.py +2 -0
  21. aiagents4pharma/talk2scholars/tools/paper_download/arxiv_downloader.py +11 -4
  22. aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py +5 -1
  23. aiagents4pharma/talk2scholars/tools/pdf/question_and_answer.py +73 -26
  24. aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py +46 -22
  25. aiagents4pharma/talk2scholars/tools/s2/query_results.py +1 -1
  26. aiagents4pharma/talk2scholars/tools/s2/search.py +40 -12
  27. aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py +42 -16
  28. aiagents4pharma/talk2scholars/tools/zotero/zotero_read.py +33 -16
  29. aiagents4pharma/talk2scholars/tools/zotero/zotero_write.py +39 -7
  30. {aiagents4pharma-1.30.0.dist-info → aiagents4pharma-1.30.1.dist-info}/METADATA +2 -2
  31. {aiagents4pharma-1.30.0.dist-info → aiagents4pharma-1.30.1.dist-info}/RECORD +34 -34
  32. {aiagents4pharma-1.30.0.dist-info → aiagents4pharma-1.30.1.dist-info}/WHEEL +1 -1
  33. {aiagents4pharma-1.30.0.dist-info → aiagents4pharma-1.30.1.dist-info}/LICENSE +0 -0
  34. {aiagents4pharma-1.30.0.dist-info → aiagents4pharma-1.30.1.dist-info}/top_level.txt +0 -0
@@ -30,7 +30,7 @@ class MultiPaperRecInput(BaseModel):
30
30
  description="List of Semantic Scholar Paper IDs to get recommendations for"
31
31
  )
32
32
  limit: int = Field(
33
- default=2,
33
+ default=10,
34
34
  description="Maximum total number of recommendations to return",
35
35
  ge=1,
36
36
  le=500,
@@ -90,23 +90,33 @@ def get_multi_paper_recommendations(
90
90
  params["year"] = year
91
91
 
92
92
  # Wrap API call in try/except to catch connectivity issues and validate response format
93
- try:
94
- response = requests.post(
95
- endpoint,
96
- headers=headers,
97
- params=params,
98
- data=json.dumps(payload),
99
- timeout=cfg.request_timeout,
100
- )
101
- response.raise_for_status() # Raises HTTPError for bad responses
102
- except requests.exceptions.RequestException as e:
103
- logger.error(
104
- "Failed to connect to Semantic Scholar API for multi-paper recommendations: %s",
105
- e,
106
- )
107
- raise RuntimeError(
108
- "Failed to connect to Semantic Scholar API. Please retry the same query."
109
- ) from e
93
+ response = None
94
+ for attempt in range(10):
95
+ try:
96
+ response = requests.post(
97
+ endpoint,
98
+ headers=headers,
99
+ params=params,
100
+ data=json.dumps(payload),
101
+ timeout=cfg.request_timeout,
102
+ )
103
+ response.raise_for_status() # Raises HTTPError for bad responses
104
+ break # Exit loop if request is successful
105
+ except requests.exceptions.RequestException as e:
106
+ logger.error(
107
+ "Attempt %d: Failed to connect to Semantic Scholar API for "
108
+ "multi-paper recommendations: %s",
109
+ attempt + 1,
110
+ e,
111
+ )
112
+ if attempt == 9: # Last attempt
113
+ raise RuntimeError(
114
+ "Failed to connect to Semantic Scholar API after 10 attempts."
115
+ "Please retry the same query."
116
+ ) from e
117
+
118
+ if response is None:
119
+ raise RuntimeError("Failed to obtain a response from the Semantic Scholar API.")
110
120
 
111
121
  logger.info(
112
122
  "API Response Status for multi-paper recommendations: %s", response.status_code
@@ -137,11 +147,22 @@ def get_multi_paper_recommendations(
137
147
  # Create a dictionary to store the papers
138
148
  filtered_papers = {
139
149
  paper["paperId"]: {
140
- "paper_id": paper["paperId"],
150
+ "semantic_scholar_paper_id": paper["paperId"],
141
151
  "Title": paper.get("title", "N/A"),
142
152
  "Abstract": paper.get("abstract", "N/A"),
143
153
  "Year": paper.get("year", "N/A"),
154
+ "Publication Date": paper.get("publicationDate", "N/A"),
155
+ "Venue": paper.get("venue", "N/A"),
156
+ # "Publication Venue": (paper.get("publicationVenue") or {}).get("name", "N/A"),
157
+ # "Venue Type": (paper.get("publicationVenue") or {}).get("name", "N/A"),
158
+ "Journal Name": (paper.get("journal") or {}).get("name", "N/A"),
159
+ # "Journal Volume": paper.get("journal", {}).get("volume", "N/A"),
160
+ # "Journal Pages": paper.get("journal", {}).get("pages", "N/A"),
144
161
  "Citation Count": paper.get("citationCount", "N/A"),
162
+ "Authors": [
163
+ f"{author.get('name', 'N/A')} (ID: {author.get('authorId', 'N/A')})"
164
+ for author in paper.get("authors", [])
165
+ ],
145
166
  "URL": paper.get("url", "N/A"),
146
167
  "arxiv_id": paper.get("externalIds", {}).get("ArXiv", "N/A"),
147
168
  }
@@ -153,7 +174,10 @@ def get_multi_paper_recommendations(
153
174
  top_papers = list(filtered_papers.values())[:3]
154
175
  top_papers_info = "\n".join(
155
176
  [
156
- f"{i+1}. {paper['Title']} ({paper['Year']})"
177
+ # f"{i+1}. {paper['Title']} ({paper['Year']})"
178
+ f"{i+1}. {paper['Title']} ({paper['Year']}; "
179
+ f"semantic_scholar_paper_id: {paper['semantic_scholar_paper_id']}; "
180
+ f"arXiv ID: {paper['arxiv_id']})"
157
181
  for i, paper in enumerate(top_papers)
158
182
  ]
159
183
  )
@@ -165,10 +189,10 @@ def get_multi_paper_recommendations(
165
189
  "Papers are attached as an artifact."
166
190
  )
167
191
  content += " Here is a summary of the recommendations:\n"
168
- content += f"Number of papers found: {len(filtered_papers)}\n"
192
+ content += f"Number of recommended papers found: {len(filtered_papers)}\n"
169
193
  content += f"Query Paper IDs: {', '.join(paper_ids)}\n"
170
194
  content += f"Year: {year}\n" if year else ""
171
- content += "Top papers:\n" + top_papers_info
195
+ content += "Here are a few of these papers:\n" + top_papers_info
172
196
 
173
197
  return Command(
174
198
  update={
@@ -44,7 +44,7 @@ def query_results(question: str, state: Annotated[dict, InjectedState]) -> str:
44
44
  raise NoPapersFoundError(
45
45
  "No papers found. A search needs to be performed first."
46
46
  )
47
- context_key = state.get("last_displayed_papers","pdf_data")
47
+ context_key = state.get("last_displayed_papers", "pdf_data")
48
48
  dic_papers = state.get(context_key)
49
49
  df_papers = pd.DataFrame.from_dict(dic_papers, orient="index")
50
50
  df_agent = create_pandas_dataframe_agent(
@@ -14,6 +14,7 @@ from langchain_core.tools.base import InjectedToolCallId
14
14
  from langgraph.types import Command
15
15
  from pydantic import BaseModel, Field
16
16
 
17
+ # pylint: disable=R0914,R0912,R0915
17
18
  # Configure logging
18
19
  logging.basicConfig(level=logging.INFO)
19
20
  logger = logging.getLogger(__name__)
@@ -27,7 +28,7 @@ class SearchInput(BaseModel):
27
28
  "Be specific and include relevant academic terms."
28
29
  )
29
30
  limit: int = Field(
30
- default=5, description="Maximum number of results to return", ge=1, le=100
31
+ default=10, description="Maximum number of results to return", ge=1, le=100
31
32
  )
32
33
  year: Optional[str] = Field(
33
34
  default=None,
@@ -75,14 +76,26 @@ def search_tool(
75
76
  params["year"] = year
76
77
 
77
78
  # Wrap API call in try/except to catch connectivity issues
78
- try:
79
- response = requests.get(endpoint, params=params, timeout=10)
80
- response.raise_for_status() # Raises HTTPError for bad responses
81
- except requests.exceptions.RequestException as e:
82
- logger.error("Failed to connect to Semantic Scholar API: %s", e)
83
- raise RuntimeError(
84
- "Failed to connect to Semantic Scholar API. Please retry the same query."
85
- ) from e
79
+ response = None
80
+ for attempt in range(10):
81
+ try:
82
+ response = requests.get(endpoint, params=params, timeout=10)
83
+ response.raise_for_status() # Raises HTTPError for bad responses
84
+ break # Exit loop if request is successful
85
+ except requests.exceptions.RequestException as e:
86
+ logger.error(
87
+ "Attempt %d: Failed to connect to Semantic Scholar API: %s",
88
+ attempt + 1,
89
+ e,
90
+ )
91
+ if attempt == 9: # Last attempt
92
+ raise RuntimeError(
93
+ "Failed to connect to Semantic Scholar API after 10 attempts."
94
+ "Please retry the same query."
95
+ ) from e
96
+
97
+ if response is None:
98
+ raise RuntimeError("Failed to obtain a response from the Semantic Scholar API.")
86
99
 
87
100
  data = response.json()
88
101
 
@@ -108,11 +121,22 @@ def search_tool(
108
121
  # Create a dictionary to store the papers
109
122
  filtered_papers = {
110
123
  paper["paperId"]: {
111
- "paper_id": paper["paperId"],
124
+ "semantic_scholar_paper_id": paper["paperId"],
112
125
  "Title": paper.get("title", "N/A"),
113
126
  "Abstract": paper.get("abstract", "N/A"),
114
127
  "Year": paper.get("year", "N/A"),
128
+ "Publication Date": paper.get("publicationDate", "N/A"),
129
+ "Venue": paper.get("venue", "N/A"),
130
+ # "Publication Venue": (paper.get("publicationVenue") or {}).get("name", "N/A"),
131
+ # "Venue Type": (paper.get("publicationVenue") or {}).get("name", "N/A"),
132
+ "Journal Name": (paper.get("journal") or {}).get("name", "N/A"),
133
+ # "Journal Volume": paper.get("journal", {}).get("volume", "N/A"),
134
+ # "Journal Pages": paper.get("journal", {}).get("pages", "N/A"),
115
135
  "Citation Count": paper.get("citationCount", "N/A"),
136
+ "Authors": [
137
+ f"{author.get('name', 'N/A')} (ID: {author.get('authorId', 'N/A')})"
138
+ for author in paper.get("authors", [])
139
+ ],
116
140
  "URL": paper.get("url", "N/A"),
117
141
  "arxiv_id": paper.get("externalIds", {}).get("ArXiv", "N/A"),
118
142
  }
@@ -126,11 +150,15 @@ def search_tool(
126
150
  top_papers = list(filtered_papers.values())[:3]
127
151
  top_papers_info = "\n".join(
128
152
  [
129
- f"{i+1}. {paper['Title']} ({paper['Year']})"
153
+ f"{i+1}. {paper['Title']} ({paper['Year']}; "
154
+ f"semantic_scholar_paper_id: {paper['semantic_scholar_paper_id']}; "
155
+ f"arXiv ID: {paper['arxiv_id']})"
130
156
  for i, paper in enumerate(top_papers)
131
157
  ]
132
158
  )
133
159
 
160
+ logger.info("-----------Filtered %d papers", len(filtered_papers))
161
+
134
162
  content = (
135
163
  "Search was successful. Papers are attached as an artifact. "
136
164
  "Here is a summary of the search results:\n"
@@ -138,7 +166,7 @@ def search_tool(
138
166
  content += f"Number of papers found: {len(filtered_papers)}\n"
139
167
  content += f"Query: {query}\n"
140
168
  content += f"Year: {year}\n" if year else ""
141
- content += "Top papers:\n" + top_papers_info
169
+ content += "Top 3 papers:\n" + top_papers_info
142
170
 
143
171
  return Command(
144
172
  update={
@@ -14,6 +14,7 @@ from langchain_core.tools.base import InjectedToolCallId
14
14
  from langgraph.types import Command
15
15
  from pydantic import BaseModel, Field
16
16
 
17
+ # pylint: disable=R0914,R0912,R0915
17
18
  # Configure logging
18
19
  logging.basicConfig(level=logging.INFO)
19
20
  logger = logging.getLogger(__name__)
@@ -44,7 +45,7 @@ class SinglePaperRecInput(BaseModel):
44
45
  def get_single_paper_recommendations(
45
46
  paper_id: str,
46
47
  tool_call_id: Annotated[str, InjectedToolCallId],
47
- limit: int = 5,
48
+ limit: int = 10,
48
49
  year: Optional[str] = None,
49
50
  ) -> Command[Any]:
50
51
  """
@@ -85,16 +86,28 @@ def get_single_paper_recommendations(
85
86
  params["year"] = year
86
87
 
87
88
  # Wrap API call in try/except to catch connectivity issues and check response format
88
- try:
89
- response = requests.get(endpoint, params=params, timeout=cfg.request_timeout)
90
- response.raise_for_status() # Raises HTTPError for bad responses
91
- except requests.exceptions.RequestException as e:
92
- logger.error(
93
- "Failed to connect to Semantic Scholar API for recommendations: %s", e
94
- )
95
- raise RuntimeError(
96
- "Failed to connect to Semantic Scholar API. Please retry the same query."
97
- ) from e
89
+ response = None
90
+ for attempt in range(10):
91
+ try:
92
+ response = requests.get(
93
+ endpoint, params=params, timeout=cfg.request_timeout
94
+ )
95
+ response.raise_for_status() # Raises HTTPError for bad responses
96
+ break # Exit loop if request is successful
97
+ except requests.exceptions.RequestException as e:
98
+ logger.error(
99
+ "Attempt %d: Failed to connect to Semantic Scholar API for recommendations: %s",
100
+ attempt + 1,
101
+ e,
102
+ )
103
+ if attempt == 9: # Last attempt
104
+ raise RuntimeError(
105
+ "Failed to connect to Semantic Scholar API after 10 attempts."
106
+ "Please retry the same query."
107
+ ) from e
108
+
109
+ if response is None:
110
+ raise RuntimeError("Failed to obtain a response from the Semantic Scholar API.")
98
111
 
99
112
  logger.info(
100
113
  "API Response Status for recommendations of paper %s: %s",
@@ -125,11 +138,22 @@ def get_single_paper_recommendations(
125
138
  # Extract paper ID and title from recommendations
126
139
  filtered_papers = {
127
140
  paper["paperId"]: {
128
- "paper_id": paper["paperId"],
141
+ "semantic_scholar_paper_id": paper["paperId"],
129
142
  "Title": paper.get("title", "N/A"),
130
143
  "Abstract": paper.get("abstract", "N/A"),
131
144
  "Year": paper.get("year", "N/A"),
145
+ "Publication Date": paper.get("publicationDate", "N/A"),
146
+ "Venue": paper.get("venue", "N/A"),
147
+ # "Publication Venue": (paper.get("publicationVenue") or {}).get("name", "N/A"),
148
+ # "Venue Type": (paper.get("publicationVenue") or {}).get("name", "N/A"),
149
+ "Journal Name": (paper.get("journal") or {}).get("name", "N/A"),
150
+ # "Journal Volume": paper.get("journal", {}).get("volume", "N/A"),
151
+ # "Journal Pages": paper.get("journal", {}).get("pages", "N/A"),
132
152
  "Citation Count": paper.get("citationCount", "N/A"),
153
+ "Authors": [
154
+ f"{author.get('name', 'N/A')} (ID: {author.get('authorId', 'N/A')})"
155
+ for author in paper.get("authors", [])
156
+ ],
133
157
  "URL": paper.get("url", "N/A"),
134
158
  "arxiv_id": paper.get("externalIds", {}).get("ArXiv", "N/A"),
135
159
  }
@@ -141,7 +165,10 @@ def get_single_paper_recommendations(
141
165
  top_papers = list(filtered_papers.values())[:3]
142
166
  top_papers_info = "\n".join(
143
167
  [
144
- f"{i+1}. {paper['Title']} ({paper['Year']})"
168
+ # f"{i+1}. {paper['Title']} ({paper['Year']})"
169
+ f"{i+1}. {paper['Title']} ({paper['Year']}; "
170
+ f"semantic_scholar_paper_id: {paper['semantic_scholar_paper_id']}; "
171
+ f"arXiv ID: {paper['arxiv_id']})"
145
172
  for i, paper in enumerate(top_papers)
146
173
  ]
147
174
  )
@@ -153,10 +180,9 @@ def get_single_paper_recommendations(
153
180
  "Papers are attached as an artifact. "
154
181
  "Here is a summary of the recommendations:\n"
155
182
  )
156
- content += f"Number of papers found: {len(filtered_papers)}\n"
183
+ content += f"Number of recommended papers found: {len(filtered_papers)}\n"
157
184
  content += f"Query Paper ID: {paper_id}\n"
158
- content += f"Year: {year}\n" if year else ""
159
- content += "Top papers:\n" + top_papers_info
185
+ content += "Here are a few of these papers:\n" + top_papers_info
160
186
 
161
187
  return Command(
162
188
  update={
@@ -104,9 +104,10 @@ def zotero_search_tool(
104
104
 
105
105
  # Define filter criteria
106
106
  filter_item_types = cfg.zotero.filter_item_types if only_articles else []
107
- filter_excluded_types = (
108
- cfg.zotero.filter_excluded_types
109
- ) # Exclude non-research items
107
+ logger.debug("Filtering item types: %s", filter_item_types)
108
+ # filter_excluded_types = (
109
+ # cfg.zotero.filter_excluded_types
110
+ # ) # Exclude non-research items
110
111
 
111
112
  # Filter and format papers
112
113
  filtered_papers = {}
@@ -119,19 +120,19 @@ def zotero_search_tool(
119
120
  if not isinstance(data, dict):
120
121
  continue
121
122
 
122
- item_type = data.get("itemType")
123
+ item_type = data.get("itemType", "N/A")
123
124
  logger.debug("Item type: %s", item_type)
124
125
 
125
126
  # Exclude attachments, notes, and other unwanted types
126
- if (
127
- not item_type
128
- or not isinstance(item_type, str)
129
- or item_type in filter_excluded_types # Skip attachments & notes
130
- or (
131
- only_articles and item_type not in filter_item_types
132
- ) # Skip non-research types
133
- ):
134
- continue
127
+ # if (
128
+ # not item_type
129
+ # or not isinstance(item_type, str)
130
+ # or item_type in filter_excluded_types # Skip attachments & notes
131
+ # or (
132
+ # only_articles and item_type not in filter_item_types
133
+ # ) # Skip non-research types
134
+ # ):
135
+ # continue
135
136
 
136
137
  key = data.get("key")
137
138
  if not key:
@@ -140,13 +141,29 @@ def zotero_search_tool(
140
141
  # Use the imported utility function's mapping to get collection paths
141
142
  collection_paths = item_to_collections.get(key, ["/Unknown"])
142
143
 
144
+ # Extract metadata safely
143
145
  filtered_papers[key] = {
144
146
  "Title": data.get("title", "N/A"),
145
147
  "Abstract": data.get("abstractNote", "N/A"),
146
- "Date": data.get("date", "N/A"),
148
+ "Publication Date": data.get(
149
+ "date", "N/A"
150
+ ), # Correct field for publication date
147
151
  "URL": data.get("url", "N/A"),
148
152
  "Type": item_type if isinstance(item_type, str) else "N/A",
149
- "Collections": collection_paths, # Now displays full paths
153
+ "Collections": collection_paths, # Displays full collection paths
154
+ "Citation Count": data.get("citationCount", "N/A"), # Shows citations
155
+ "Venue": data.get("venue", "N/A"), # Displays venue
156
+ "Publication Venue": data.get(
157
+ "publicationTitle", "N/A"
158
+ ), # Matches with Zotero Write
159
+ "Journal Name": data.get("journalAbbreviation", "N/A"), # Journal Name
160
+ # "Journal Volume": data.get("volume", "N/A"), # Journal Volume
161
+ # "Journal Pages": data.get("pages", "N/A"), # Journal Pages
162
+ "Authors": [
163
+ f"{creator.get('firstName', '')} {creator.get('lastName', '')}".strip()
164
+ for creator in data.get("creators", []) # Prevents NoneType error
165
+ if isinstance(creator, dict) and creator.get("creatorType") == "author"
166
+ ],
150
167
  }
151
168
 
152
169
  if not filtered_papers:
@@ -170,7 +187,7 @@ def zotero_search_tool(
170
187
  content += " And here is a summary of the retrieval results:\n"
171
188
  content += f"Number of papers found: {len(filtered_papers)}\n"
172
189
  content += f"Query: {query}\n"
173
- content += "Top papers:\n" + top_papers_info
190
+ content += "Here are a few of these papers:\n" + top_papers_info
174
191
 
175
192
  return Command(
176
193
  update={
@@ -185,21 +185,53 @@ def zotero_save_tool(
185
185
  # Format papers for Zotero and assign to the specified collection
186
186
  zotero_items = []
187
187
  for paper_id, paper in fetched_papers.items():
188
- title = paper.get("Title", paper.get("title", "N/A"))
189
- abstract = paper.get("Abstract", paper.get("abstractNote", "N/A"))
190
- date = paper.get("Date", paper.get("date", "N/A"))
191
- url = paper.get("URL", paper.get("url", paper.get("URL", "N/A")))
192
- citations = paper.get("Citations", "N/A")
188
+ title = paper.get("Title", "N/A")
189
+ abstract = paper.get("Abstract", "N/A")
190
+ publication_date = paper.get("Publication Date", "N/A") # Use Publication Date
191
+ url = paper.get("URL", "N/A")
192
+ citations = paper.get("Citation Count", "N/A")
193
+ venue = paper.get("Venue", "N/A")
194
+ publication_venue = paper.get("Publication Venue", "N/A")
195
+ journal_name = paper.get("Journal Name", "N/A")
196
+ journal_volume = paper.get("Journal Volume", "N/A")
197
+ journal_pages = paper.get("Journal Pages", "N/A")
198
+
199
+ # Convert Authors list to Zotero format
200
+ authors = [
201
+ (
202
+ {
203
+ "creatorType": "author",
204
+ "firstName": name.split(" ")[0],
205
+ "lastName": " ".join(name.split(" ")[1:]),
206
+ }
207
+ if " " in name
208
+ else {"creatorType": "author", "lastName": name}
209
+ )
210
+ for name in [
211
+ author.split(" (ID: ")[0] for author in paper.get("Authors", [])
212
+ ]
213
+ ]
193
214
 
194
215
  zotero_items.append(
195
216
  {
196
217
  "itemType": "journalArticle",
197
218
  "title": title,
198
219
  "abstractNote": abstract,
199
- "date": date,
220
+ "date": publication_date, # Now saving full publication date
200
221
  "url": url,
201
222
  "extra": f"Paper ID: {paper_id}\nCitations: {citations}",
202
223
  "collections": [matched_collection_key],
224
+ "publicationTitle": (
225
+ publication_venue if publication_venue != "N/A" else venue
226
+ ), # Use publication venue if available
227
+ "journalAbbreviation": journal_name, # Save Journal Name
228
+ "volume": (
229
+ journal_volume if journal_volume != "N/A" else None
230
+ ), # Save Journal Volume
231
+ "pages": (
232
+ journal_pages if journal_pages != "N/A" else None
233
+ ), # Save Journal Pages
234
+ "creators": authors, # Save authors list properly
203
235
  }
204
236
  )
205
237
 
@@ -232,7 +264,7 @@ def zotero_save_tool(
232
264
  for i, paper in enumerate(top_papers)
233
265
  ]
234
266
  )
235
- content += "Here are the top articles:\n" + top_papers_info
267
+ content += "Here are a few of these articles:\n" + top_papers_info
236
268
 
237
269
  return Command(
238
270
  update={
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: aiagents4pharma
3
- Version: 1.30.0
3
+ Version: 1.30.1
4
4
  Summary: AI Agents for drug discovery, drug development, and other pharmaceutical R&D.
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: MIT License
@@ -23,7 +23,7 @@ Requires-Dist: langchain-experimental==0.3.3
23
23
  Requires-Dist: langchain-nvidia-ai-endpoints==0.3.9
24
24
  Requires-Dist: langchain-openai==0.2.5
25
25
  Requires-Dist: langchain_ollama==0.2.3
26
- Requires-Dist: langgraph_supervisor==0.0.4
26
+ Requires-Dist: langgraph_supervisor==0.0.9
27
27
  Requires-Dist: matplotlib==3.9.2
28
28
  Requires-Dist: openai==1.59.4
29
29
  Requires-Dist: ollama==0.4.7
@@ -136,9 +136,9 @@ aiagents4pharma/talk2knowledgegraphs/utils/extractions/__init__.py,sha256=7gwwtf
136
136
  aiagents4pharma/talk2knowledgegraphs/utils/extractions/pcst.py,sha256=m5p0yoJb7I19ua5yeQfXPf7c4r6S1XPwttsrM7Qoy94,9336
137
137
  aiagents4pharma/talk2scholars/__init__.py,sha256=gphERyVKZHvOnMQsml7TIHlaIshHJ75R1J3FKExkfuY,120
138
138
  aiagents4pharma/talk2scholars/agents/__init__.py,sha256=inLJpRDlT80RNSi3OFNi2lpbbTisQgzNkMYTvnhFjVY,203
139
- aiagents4pharma/talk2scholars/agents/main_agent.py,sha256=TABzGSOg7I0_fJ0qybBVqZDdrU8YCjyG_m-kasO4WgE,2854
140
- aiagents4pharma/talk2scholars/agents/paper_download_agent.py,sha256=3GxxNhA_VGf3QOozIjr5cEY2te5n6rQSdZpdFajZttA,3006
141
- aiagents4pharma/talk2scholars/agents/pdf_agent.py,sha256=c9-_z5qp5Zkgh6piEIlgI4uo4OMXD3janZNmfYwnFCg,3729
139
+ aiagents4pharma/talk2scholars/agents/main_agent.py,sha256=KdKbnc-5zxktLUkzEZHC3bvn8_iKa8Kk4So90i48cdE,3275
140
+ aiagents4pharma/talk2scholars/agents/paper_download_agent.py,sha256=wrK9CPy5evH56fyOZ2BlkBfY5aEj6tefc4jSDPVzYvs,3041
141
+ aiagents4pharma/talk2scholars/agents/pdf_agent.py,sha256=xt_bgCTVJ6jOCkhc_rHh8pngq4uS8kuNOevuP3eC-sA,3702
142
142
  aiagents4pharma/talk2scholars/agents/s2_agent.py,sha256=ua1bjKE2HBKZuLnDn8me5fuV1lSvdZbwAlo3Yp27TT4,4659
143
143
  aiagents4pharma/talk2scholars/agents/zotero_agent.py,sha256=5jfIJiLsRdlCJjkF7BQMkP5PsEY_Gr7SfztWKozbUGo,4223
144
144
  aiagents4pharma/talk2scholars/configs/__init__.py,sha256=tf2gz8n7M4ko6xLdX_C925ELVIxoP6SgkPcbeh59ad4,151
@@ -146,73 +146,73 @@ aiagents4pharma/talk2scholars/configs/config.yaml,sha256=-8X0_gTmjEuXAeIrnppw3Np
146
146
  aiagents4pharma/talk2scholars/configs/agents/__init__.py,sha256=yyh7PB2oY_JulnpSQCWS4wwCH_uzIdt47O2Ay48x_oU,75
147
147
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/__init__.py,sha256=64GEWAoKOd_YHLi27eSOcOC5eSLK0IG_FNra3ZBt02Y,146
148
148
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
149
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml,sha256=rZfZ_dJArjlznHzusjxCnOjhptLTyejFiB0euV5R13c,662
149
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/main_agent/default.yaml,sha256=GZRqZoUy8eAWXyd9GJDh-A4mYSJOhnkid6TaIJTGBeU,1192
150
150
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/paper_download_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
151
151
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/pdf_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
152
152
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
153
- aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml,sha256=sn6vX6r-P0CR7UWS63ZqCmMKKn4As8pZoITRWx8sdoo,1151
153
+ aiagents4pharma/talk2scholars/configs/agents/talk2scholars/s2_agent/default.yaml,sha256=UIYkr060IpoLHMXVPxGAjrkCJSjX7H0DzcFSasyW6sE,1185
154
154
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
155
155
  aiagents4pharma/talk2scholars/configs/agents/talk2scholars/zotero_agent/default.yaml,sha256=lWBreotqsu1jlHi1uZ9vY60zi-MiiG2VuHxo5IoAvkE,1112
156
156
  aiagents4pharma/talk2scholars/configs/app/__init__.py,sha256=JoSZV6N669kGMv5zLDszwf0ZjcRHx9TJfIqGhIIdPXE,70
157
157
  aiagents4pharma/talk2scholars/configs/app/frontend/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
158
- aiagents4pharma/talk2scholars/configs/app/frontend/default.yaml,sha256=wsELBdRLv6UqZ9QZfwpS7K4xfMj5s-a99-aXqIs6WEI,868
158
+ aiagents4pharma/talk2scholars/configs/app/frontend/default.yaml,sha256=A6nYjrgzEyRv5JYsGN7oqNX4-tufMBZ6mg-A7bMX6V4,906
159
159
  aiagents4pharma/talk2scholars/configs/tools/__init__.py,sha256=GwpgnRrfjyZDVsangewSVTG3H3GBYM6s_YaQd9-zI10,238
160
160
  aiagents4pharma/talk2scholars/configs/tools/download_arxiv_paper/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
161
161
  aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
162
- aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/default.yaml,sha256=QV7HrG7NdjBEjTMszh27MbGBYMbf_78V3sCGftdTtvo,442
162
+ aiagents4pharma/talk2scholars/configs/tools/multi_paper_recommendation/default.yaml,sha256=comNgL9hRpH--IWuEsrN6hV5WdrJmh-ZsRh7hbryVhg,631
163
163
  aiagents4pharma/talk2scholars/configs/tools/question_and_answer/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
164
164
  aiagents4pharma/talk2scholars/configs/tools/retrieve_semantic_scholar_paper_id/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
165
165
  aiagents4pharma/talk2scholars/configs/tools/retrieve_semantic_scholar_paper_id/default.yaml,sha256=HG-N8yRjlX9zFwbIBvaDI9ndKjfL-gqPTCCPMLgdUpw,271
166
166
  aiagents4pharma/talk2scholars/configs/tools/search/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
167
- aiagents4pharma/talk2scholars/configs/tools/search/default.yaml,sha256=153R4NmtG2bGKpxwo73tR15IetGKdrD4QgZRlz8zS18,504
167
+ aiagents4pharma/talk2scholars/configs/tools/search/default.yaml,sha256=RlORkZFLDKFXQqjXjTM0NqsHyLDTIfirbWlAAXrAT00,694
168
168
  aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
169
- aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/default.yaml,sha256=1QtFWqnGftIipftlALnG_IdCAOwzJTyOpUSUfWqQ7cA,596
169
+ aiagents4pharma/talk2scholars/configs/tools/single_paper_recommendation/default.yaml,sha256=PFXz5oRpNbjQp789QlgmyXktdVWwwVfoYi7mAnlRgik,785
170
170
  aiagents4pharma/talk2scholars/configs/tools/zotero_read/__init__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
171
- aiagents4pharma/talk2scholars/configs/tools/zotero_read/default.yaml,sha256=6ZvZdCsnudPeVjnatv78Z0QfMwsHZuliE2RCIRCW05Y,1221
171
+ aiagents4pharma/talk2scholars/configs/tools/zotero_read/default.yaml,sha256=ifOtX8Huj1LPHF_rRe1JxWgrDaLiKg6HVoQen_1R1Ls,1223
172
172
  aiagents4pharma/talk2scholars/configs/tools/zotero_write/__inti__.py,sha256=fqQQ-GlRcbzru2KmEk3oMma0R6_SzGM8dOXzYeU4oVA,46
173
173
  aiagents4pharma/talk2scholars/state/__init__.py,sha256=S6SxlszIMZSIMJehjevPF9sKyR-PAwWb5TEdo6xWXE8,103
174
- aiagents4pharma/talk2scholars/state/state_talk2scholars.py,sha256=0dFSdsGiiilNIuuHQFEjpjQmcZXlK0JQwMV_GCiAsuU,2490
174
+ aiagents4pharma/talk2scholars/state/state_talk2scholars.py,sha256=Vngb5iEZxGORHq4OfCQK6aCqycBYsZZO9DkefjMuUVs,2632
175
175
  aiagents4pharma/talk2scholars/tests/__init__.py,sha256=U3PsTiUZaUBD1IZanFGkDIOdFieDVJtGKQ5-woYUo8c,45
176
- aiagents4pharma/talk2scholars/tests/test_llm_main_integration.py,sha256=SAMG-Kb2S9sei8Us5vUWCUJikTKXPZVKQ6aJJPEhJsc,1880
177
- aiagents4pharma/talk2scholars/tests/test_main_agent.py,sha256=5QnOPKNrQCd5GdYU-vVF3bUrmitOsUcazZA7BsXeomo,5947
176
+ aiagents4pharma/talk2scholars/tests/test_llm_main_integration.py,sha256=FBRqS06IKJYFOudQEHQr-9oJ4tftkH-gTCowTAqwWSg,3686
177
+ aiagents4pharma/talk2scholars/tests/test_main_agent.py,sha256=hgKgMXAGGqGJ6EXbjfsdZ5t1IWo2W67tc_F7vK747Qg,6844
178
178
  aiagents4pharma/talk2scholars/tests/test_paper_download_agent.py,sha256=CP4fKFU_JYP_AXvTptnwpjaVar1d5lVKV5vxYgH_1j4,5309
179
179
  aiagents4pharma/talk2scholars/tests/test_paper_download_tools.py,sha256=_bGuoo4b6zD_vwLa7jGziWDT5qRtavsf02Jiaa7JIRU,5817
180
180
  aiagents4pharma/talk2scholars/tests/test_pdf_agent.py,sha256=TN4Sq5-SCxv-9VfFyq7sOlBlxbekmnWuB7-qh4MrhkA,4656
181
- aiagents4pharma/talk2scholars/tests/test_question_and_answer_tool.py,sha256=TpCDiGfsC2y6bOkm0ZTXjT1Vp8D-Po25wiEH5aDT_DA,6491
182
- aiagents4pharma/talk2scholars/tests/test_routing_logic.py,sha256=AZrvaEBDk51KL6edrZY3GpQ_N6VbrlADqXFeg_jxDoQ,2284
181
+ aiagents4pharma/talk2scholars/tests/test_question_and_answer_tool.py,sha256=te6w1tPvuNSNCTihIwaCT083BzUCowjOQPwuCodXR4k,8723
182
+ aiagents4pharma/talk2scholars/tests/test_routing_logic.py,sha256=WPFGMzCidY_I6K2tPsXUVFha_qOr-nFt2-1eSeq3eME,2780
183
183
  aiagents4pharma/talk2scholars/tests/test_s2_agent.py,sha256=BhW1wGc-wUPS4fwNBQRtBXJaJ_i7L6t_G9Bq57fK7rI,7784
184
184
  aiagents4pharma/talk2scholars/tests/test_s2_display.py,sha256=w1TqgEdl9WpW_A2Ud1slfI5fkRFkKtKadAlkEfSLOZk,2247
185
- aiagents4pharma/talk2scholars/tests/test_s2_multi.py,sha256=fkTQ268WqOYvJEtTteVJ7eav3QuMAahhYR6LOnx1Huk,10161
185
+ aiagents4pharma/talk2scholars/tests/test_s2_multi.py,sha256=0XGtl6BigfDI6zpblQlJmKqml-fQ30HQB12PMCOl0h8,11115
186
186
  aiagents4pharma/talk2scholars/tests/test_s2_query.py,sha256=hEcBt142nn_bKV9lor__Yk4LusgE1tN5dA-qpT606Bc,2443
187
187
  aiagents4pharma/talk2scholars/tests/test_s2_retrieve.py,sha256=YtA2nbPRtoSR7mPqEjqLF5ERGVzTfeULztsNoCI48X8,2003
188
- aiagents4pharma/talk2scholars/tests/test_s2_search.py,sha256=ZnfBO0b9xMwMvT1oaw1yIFxLToSej1_KSyEzHr6HbOQ,9068
189
- aiagents4pharma/talk2scholars/tests/test_s2_single.py,sha256=J4r_J4gPeIIWAIUlahClINnCu7bEiW5AcphRNChv2Eo,9317
188
+ aiagents4pharma/talk2scholars/tests/test_s2_search.py,sha256=wuXEke7_bmancvlh7C5WFbXRbpcUVrYGhD_X47tOt3M,9834
189
+ aiagents4pharma/talk2scholars/tests/test_s2_single.py,sha256=l0uBxwjqojlmT65j7WK3R-u2PMyPcMf95yLGApSycO0,10274
190
190
  aiagents4pharma/talk2scholars/tests/test_state.py,sha256=_iHXvoZnU_eruf8l1sQKBSCIVnxNkH_9VzkVtZZA6bY,384
191
191
  aiagents4pharma/talk2scholars/tests/test_zotero_agent.py,sha256=3TKz6yjNfYulaQv-MBv1zXCmR9xh9g3ju4Ge5HDdt1o,6136
192
192
  aiagents4pharma/talk2scholars/tests/test_zotero_path.py,sha256=XeXYqTlSkJgZ02tCz84VNDHGYnmrxrGFLxlLq_Bargs,2356
193
- aiagents4pharma/talk2scholars/tests/test_zotero_read.py,sha256=vLAPAFeL8MjDju_HlsLnio-9HxzN1RqOApr9jyemYBk,14951
193
+ aiagents4pharma/talk2scholars/tests/test_zotero_read.py,sha256=oFO_J-2EbMz7D1rfVlBmJXB3YrWZekpTwsU-dX8KlyM,15279
194
194
  aiagents4pharma/talk2scholars/tests/test_zotero_write.py,sha256=76V7ezb6Xw-BEEwdJQvJs78JPGRYpAsijHIi3bTGsW8,23206
195
195
  aiagents4pharma/talk2scholars/tools/__init__.py,sha256=UtGutYNNaRcr2nOmT_XqbTiaJpgVYKo3KVGVPFVrX2Y,107
196
196
  aiagents4pharma/talk2scholars/tools/paper_download/__init__.py,sha256=0XmPLEqCply536Y1uWksmHYjlgNWcmcMpZx63XvGEFI,413
197
- aiagents4pharma/talk2scholars/tools/paper_download/abstract_downloader.py,sha256=UgJOu9o9RAjlzMahUgPWV6iCGC6n7atDOa0VEp8bGx0,1325
198
- aiagents4pharma/talk2scholars/tools/paper_download/arxiv_downloader.py,sha256=kP5tyLc92zlkF5EPA7zVYSjpVk724pCsjHFgOntb_Tw,3869
199
- aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py,sha256=EJBr9RSSog8tFa7BIFIDZ-Qn7qjqJIAuRb_hF4wZ49Q,2181
197
+ aiagents4pharma/talk2scholars/tools/paper_download/abstract_downloader.py,sha256=nwVhRUqkdta3WLgd9roAWpx-bhJm3aAgJLx4RSYSJXQ,1327
198
+ aiagents4pharma/talk2scholars/tools/paper_download/arxiv_downloader.py,sha256=hM9fdbwtOxuW1mpAfmfbILTI7kSVALgrGpjC2vMsvf8,3970
199
+ aiagents4pharma/talk2scholars/tools/paper_download/download_arxiv_input.py,sha256=zndAnNFRBztuBK-tpW9UyYsGL8tB3gFjYhiTq6nzZu4,2203
200
200
  aiagents4pharma/talk2scholars/tools/pdf/__init__.py,sha256=WOm-o-fFzyjFZBaHg658Gjzdiu1Kt-h9xvzvw0hR7aE,103
201
- aiagents4pharma/talk2scholars/tools/pdf/question_and_answer.py,sha256=22JvT7F0rY11TF40pBfe9Cn2Y-6Tx73NfWDt4NJv700,6639
201
+ aiagents4pharma/talk2scholars/tools/pdf/question_and_answer.py,sha256=tNv0frCr0dxA0lfbwf5yudKRyWtbuRGMwqW5mk9u4eE,8797
202
202
  aiagents4pharma/talk2scholars/tools/s2/__init__.py,sha256=wytqCmGm8Fbl8y5qLdIkxhhG8VHLYMifCGjbH_LK2Fc,258
203
203
  aiagents4pharma/talk2scholars/tools/s2/display_results.py,sha256=UR0PtEHGDpOhPH0Di5HT8-Fip2RkEMTJgzROsChb1gc,2959
204
- aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py,sha256=Y-nIjtPSYvL7kLaN9_cueQM-VZF1SPZZ1_FB8KhS0XY,6352
205
- aiagents4pharma/talk2scholars/tools/s2/query_results.py,sha256=S4yBNtg1loDu4ckLPrW4H8GAswriPaRU4U08cOuw2HE,2028
204
+ aiagents4pharma/talk2scholars/tools/s2/multi_paper_rec.py,sha256=FeuZfx8TQ7kB68_9RqDwyhB5ut14YedlHVzNIbXHXno,7744
205
+ aiagents4pharma/talk2scholars/tools/s2/query_results.py,sha256=5yXuHqz5UKO9BbovEUnqgjcMvqVG4vp9VJO8Zaz5N1w,2029
206
206
  aiagents4pharma/talk2scholars/tools/s2/retrieve_semantic_scholar_paper_id.py,sha256=llzMMnEQKeYVamJbF4_DTMx-BgVe79vwDcUIFGLrmUY,2615
207
- aiagents4pharma/talk2scholars/tools/s2/search.py,sha256=496sv4aAfqB65zgjNxU2AGnhclcRRNF0VuG4fguN3gw,5319
208
- aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py,sha256=YJ1P-BAn5d2vGKIg3OTsYH1g8as5LeqaRuraL0buqJo,6095
207
+ aiagents4pharma/talk2scholars/tools/s2/search.py,sha256=L7myD4ET_iTTO4LZ9V8zU6y4KHSDSn5HylG9Z1i1Wo4,6722
208
+ aiagents4pharma/talk2scholars/tools/s2/single_paper_rec.py,sha256=dLLEq7mwOMfkO3eydFcZzB3mU3N_PmX3vCTwJSh7lYA,7474
209
209
  aiagents4pharma/talk2scholars/tools/zotero/__init__.py,sha256=HF47ta_r94Y4gP3fK3WG_ix8kg1zUQw8yWjLJksnTfc,100
210
- aiagents4pharma/talk2scholars/tools/zotero/zotero_read.py,sha256=eRqdQCyWws8q6iC_w4OIBR6w9Ha5x5UT5S8jifNxcqw,6142
211
- aiagents4pharma/talk2scholars/tools/zotero/zotero_write.py,sha256=dqYc5HWMK3vz77psHYUosMLE63NYg9Nk6xbWy8TOrU4,9246
210
+ aiagents4pharma/talk2scholars/tools/zotero/zotero_read.py,sha256=7WbXUeIgAyyeuACeLZF2wpfrWbPkubMViR0Y0bZ2yHw,7189
211
+ aiagents4pharma/talk2scholars/tools/zotero/zotero_write.py,sha256=zvoV-R_kTQq7ly48gS70s-7RvBRMk3zpmGpIfGuSdRI,10650
212
212
  aiagents4pharma/talk2scholars/tools/zotero/utils/__init__.py,sha256=Ll8YQZj9sYJpXmoGxj_0ZcuEHDj06_CUqdDlTlevGL4,53
213
213
  aiagents4pharma/talk2scholars/tools/zotero/utils/zotero_path.py,sha256=nHmYe3kcrygNOslHki4YeMztfnmRDPul4gZvXl_XsV0,1954
214
- aiagents4pharma-1.30.0.dist-info/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
215
- aiagents4pharma-1.30.0.dist-info/METADATA,sha256=411N0HHxJVGSKxY07zzYQ4Z60aIJRN7fd4cMaSa7uVc,13245
216
- aiagents4pharma-1.30.0.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
217
- aiagents4pharma-1.30.0.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
218
- aiagents4pharma-1.30.0.dist-info/RECORD,,
214
+ aiagents4pharma-1.30.1.dist-info/LICENSE,sha256=IcIbyB1Hyk5ZDah03VNQvJkbNk2hkBCDqQ8qtnCvB4Q,1077
215
+ aiagents4pharma-1.30.1.dist-info/METADATA,sha256=15DmgM25zLdQr8QNkb0mXjeZ0GoQX5rykgPtrkoL2BM,13245
216
+ aiagents4pharma-1.30.1.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
217
+ aiagents4pharma-1.30.1.dist-info/top_level.txt,sha256=-AH8rMmrSnJtq7HaAObS78UU-cTCwvX660dSxeM7a0A,16
218
+ aiagents4pharma-1.30.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.2)
2
+ Generator: setuptools (76.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5