ursa-ai 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ursa-ai might be problematic. Click here for more details.

ursa/agents/__init__.py CHANGED
@@ -1,9 +1,17 @@
1
- from .arxiv_agent import ArxivAgent, PaperMetadata, PaperState
2
- from .base import BaseAgent, BaseChatModel
3
- from .code_review_agent import CodeReviewAgent, CodeReviewState
4
- from .execution_agent import ExecutionAgent, ExecutionState
5
- from .hypothesizer_agent import HypothesizerAgent, HypothesizerState
6
- from .mp_agent import MaterialsProjectAgent
7
- from .planning_agent import PlanningAgent, PlanningState
8
- from .recall_agent import RecallAgent
9
- from .websearch_agent import WebSearchAgent, WebSearchState
1
+ from .arxiv_agent import ArxivAgent as ArxivAgent
2
+ from .arxiv_agent import PaperMetadata as PaperMetadata
3
+ from .arxiv_agent import PaperState as PaperState
4
+ from .base import BaseAgent as BaseAgent
5
+ from .base import BaseChatModel as BaseChatModel
6
+ from .code_review_agent import CodeReviewAgent as CodeReviewAgent
7
+ from .code_review_agent import CodeReviewState as CodeReviewState
8
+ from .execution_agent import ExecutionAgent as ExecutionAgent
9
+ from .execution_agent import ExecutionState as ExecutionState
10
+ from .hypothesizer_agent import HypothesizerAgent as HypothesizerAgent
11
+ from .hypothesizer_agent import HypothesizerState as HypothesizerState
12
+ from .mp_agent import MaterialsProjectAgent as MaterialsProjectAgent
13
+ from .planning_agent import PlanningAgent as PlanningAgent
14
+ from .planning_agent import PlanningState as PlanningState
15
+ from .recall_agent import RecallAgent as RecallAgent
16
+ from .websearch_agent import WebSearchAgent as WebSearchAgent
17
+ from .websearch_agent import WebSearchState as WebSearchState
@@ -211,12 +211,10 @@ class ArxivAgent(BaseAgent):
211
211
  except Exception as e:
212
212
  full_text = f"Error loading paper: {e}"
213
213
 
214
- papers.append(
215
- {
216
- "arxiv_id": arxiv_id,
217
- "full_text": full_text,
218
- }
219
- )
214
+ papers.append({
215
+ "arxiv_id": arxiv_id,
216
+ "full_text": full_text,
217
+ })
220
218
 
221
219
  return papers
222
220
 
@@ -279,28 +277,23 @@ class ArxivAgent(BaseAgent):
279
277
  )
280
278
 
281
279
  if relevant_docs_with_scores:
282
- score = sum(
283
- [s for _, s in relevant_docs_with_scores]
284
- ) / len(relevant_docs_with_scores)
280
+ score = sum([
281
+ s for _, s in relevant_docs_with_scores
282
+ ]) / len(relevant_docs_with_scores)
285
283
  relevancy_scores[i] = abs(1.0 - score)
286
284
  else:
287
285
  relevancy_scores[i] = 0.0
288
286
 
289
- retrieved_content = "\n\n".join(
290
- [
291
- doc.page_content
292
- for doc, _ in relevant_docs_with_scores
293
- ]
294
- )
287
+ retrieved_content = "\n\n".join([
288
+ doc.page_content for doc, _ in relevant_docs_with_scores
289
+ ])
295
290
  else:
296
291
  retrieved_content = cleaned_text
297
292
 
298
- summary = chain.invoke(
299
- {
300
- "retrieved_content": retrieved_content,
301
- "context": state["context"],
302
- }
303
- )
293
+ summary = chain.invoke({
294
+ "retrieved_content": retrieved_content,
295
+ "context": state["context"],
296
+ })
304
297
 
305
298
  except Exception as e:
306
299
  summary = f"Error summarizing paper: {e}"
@@ -376,9 +369,10 @@ class ArxivAgent(BaseAgent):
376
369
 
377
370
  chain = prompt | self.llm | StrOutputParser()
378
371
 
379
- final_summary = chain.invoke(
380
- {"Summaries": combined, "context": state["context"]}
381
- )
372
+ final_summary = chain.invoke({
373
+ "Summaries": combined,
374
+ "context": state["context"],
375
+ })
382
376
 
383
377
  with open(self.summaries_path + "/final_summary.txt", "w") as f:
384
378
  f.write(final_summary)
@@ -406,9 +400,10 @@ class ArxivAgent(BaseAgent):
406
400
  return graph
407
401
 
408
402
  def run(self, arxiv_search_query: str, context: str) -> str:
409
- result = self.graph.invoke(
410
- {"query": arxiv_search_query, "context": context}
411
- )
403
+ result = self.graph.invoke({
404
+ "query": arxiv_search_query,
405
+ "context": context,
406
+ })
412
407
 
413
408
  if self.summarize:
414
409
  return result.get("final_summary", "No summary generated.")
ursa/agents/mp_agent.py CHANGED
@@ -141,9 +141,10 @@ You are a materials-science assistant. Given the following metadata about a mate
141
141
  {context}
142
142
  """)
143
143
  chain = prompt | self.llm | StrOutputParser()
144
- final = chain.invoke(
145
- {"summaries": combined, "context": state["context"]}
146
- )
144
+ final = chain.invoke({
145
+ "summaries": combined,
146
+ "context": state["context"],
147
+ })
147
148
  return {**state, "final_summary": final}
148
149
 
149
150
  def _build_graph(self):
@@ -26,20 +26,30 @@ class AgentMemory:
26
26
  * Requires `langchain-chroma`, and `chromadb`.
27
27
  """
28
28
 
29
+ @classmethod
30
+ def get_db_path(cls, path: Optional[str | Path]) -> Path:
31
+ match path:
32
+ case None:
33
+ return Path.home() / ".cache" / "ursa" / "rag" / "db"
34
+ case str():
35
+ return Path(str)
36
+ case Path():
37
+ return path
38
+ case _:
39
+ raise TypeError(
40
+ f"Type of path is `{type(path)}` "
41
+ "but `Optional[str | Path]` was expected."
42
+ )
43
+
29
44
  def __init__(
30
45
  self,
31
46
  embedding_model,
32
47
  path: Optional[str | Path] = None,
33
48
  collection_name: str = "agent_memory",
34
49
  ) -> None:
35
- self.path = (
36
- Path(path)
37
- if path
38
- else Path(__file__).resolve().parent / "agent_memory_db"
39
- )
50
+ self.path = self.get_db_path(path)
40
51
  self.collection_name = collection_name
41
52
  self.path.mkdir(parents=True, exist_ok=True)
42
-
43
53
  self.embeddings = embedding_model
44
54
 
45
55
  # If a DB already exists, load it; otherwise defer creation until `build_index`.
@@ -165,8 +175,7 @@ def delete_database(path: Optional[str | Path] = None):
165
175
  Where the on-disk Chroma DB is for deleting. If *None*, a folder called
166
176
  ``agent_memory_db`` is created in the package’s base directory.
167
177
  """
168
-
169
- db_path = Path(path) if path else Path("~/.cache/ursa/rag/db/")
178
+ db_path = AgentMemory.get_db_path(path)
170
179
  if os.path.exists(db_path):
171
180
  shutil.rmtree(db_path)
172
181
  print(f"Database: {db_path} has been deleted.")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ursa-ai
3
- Version: 0.2.9
3
+ Version: 0.2.11
4
4
  Summary: Agents for science at LANL
5
5
  Author-email: Mike Grosskopf <mikegros@lanl.gov>, Nathan Debardeleben <ndebard@lanl.gov>, Rahul Somasundaram <rsomasundaram@lanl.gov>, Isaac Michaud <imichaud@lanl.gov>, Avanish Mishra <avanish@lanl.gov>, Arthur Lui <alui@lanl.gov>, Russell Bent <rbent@lanl.gov>, Earl Lawrence <earl@lanl.gov>
6
6
  License-Expression: BSD-3-Clause
@@ -1,10 +1,10 @@
1
- ursa/agents/__init__.py,sha256=PAPw6gjnmdA74HpiiYwtO5c6hYLgG_Yu22eu_nuGYKw,498
2
- ursa/agents/arxiv_agent.py,sha256=A_HVbmYOtQfalntOKSsUIkFu10oN9T6kBAvCuAz2evg,14273
1
+ ursa/agents/__init__.py,sha256=IdwVBLlz7QydHiFSok06McIfEJed6rxrT01hZgpQig0,1008
2
+ ursa/agents/arxiv_agent.py,sha256=6pycpm1hkUNjwXF6lJ4DCF5CxOiSnS5dvpW2Qes3ZD8,14125
3
3
  ursa/agents/base.py,sha256=uFhRLVzqhFbTZVA7IePKbUi03ATCXuvga7rzwaHy1B0,1321
4
4
  ursa/agents/code_review_agent.py,sha256=aUDq5gT-jdl9Qs-Wewj2oz1d60xov9sN-DOYRfGNTU0,11550
5
5
  ursa/agents/execution_agent.py,sha256=-At1EcKRHP9lYQ80jpqTPtQyPuQV-sIpf9J8LlEfWdA,16618
6
6
  ursa/agents/hypothesizer_agent.py,sha256=rSLohNQz3xvEcL_DGTFivf9q5BlX1cqlLUcts4GJIjM,23309
7
- ursa/agents/mp_agent.py,sha256=HTMAnv1yGs8vgRLGFFYHSbwOz24qdnB-if_JQSH3urQ,6002
7
+ ursa/agents/mp_agent.py,sha256=UyJSheMGHZpWQJL3EgYgPPqArfv6F8sndN05q4CPtyo,6015
8
8
  ursa/agents/planning_agent.py,sha256=AKWQJ848RLPiwQGrvDNdN9lBlf3YI5qWmt2hqXnRGj8,5426
9
9
  ursa/agents/recall_agent.py,sha256=bQk7ZJtiO5pj89A50OBDzAJ4G2F7ZdsMwmKnp1WWR7g,813
10
10
  ursa/agents/websearch_agent.py,sha256=rCv4AWbqe5Us4FmuypM6jptri21nKoNg044ncsu9u3E,8014
@@ -17,10 +17,10 @@ ursa/prompt_library/websearch_prompts.py,sha256=n4DJaYn_lIYAVtdy00CCJjT-dLWhn2JN
17
17
  ursa/tools/run_command.py,sha256=sQRuHtRyJYWEyL9dpW_Ukc-xQ5vmKKJK1i_6z3uKEfA,690
18
18
  ursa/tools/write_code.py,sha256=DtCsUMZegYm0mk-HMPG5Zo3Ba1gbGfnXHsv1NZTdDs8,1220
19
19
  ursa/util/diff_renderer.py,sha256=1L1q2qWWb8gLhR532-LgJn2TrqXDx0gUpPVOWD_sqeU,4086
20
- ursa/util/memory_logger.py,sha256=-4jZkMFXLnABj9x_DMGEUySVPLZaI47HrLgK69Naxw0,5731
20
+ ursa/util/memory_logger.py,sha256=9tNPF3Fgth4ImpNbs7Qg5Ci6Hb1ziMWsDTsqjkDi1iY,6096
21
21
  ursa/util/parse.py,sha256=M0cjyQWmjatxX4WbVmDRUiirTLyW-t_Aemlrlrsc5nA,2811
22
- ursa_ai-0.2.9.dist-info/licenses/LICENSE,sha256=4Vr6_u2zTHIUvYjoOBg9ztDbfpV3hyCFv3mTCS87gYU,1482
23
- ursa_ai-0.2.9.dist-info/METADATA,sha256=tPMX8nDJxo5ZbaOnpScf9wQrkTsUD73XrStyHxs81Eg,6848
24
- ursa_ai-0.2.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
25
- ursa_ai-0.2.9.dist-info/top_level.txt,sha256=OjA1gRYSUAeiXGnpqPC8iOOGfcjFO1IlP848qMnYSdY,5
26
- ursa_ai-0.2.9.dist-info/RECORD,,
22
+ ursa_ai-0.2.11.dist-info/licenses/LICENSE,sha256=4Vr6_u2zTHIUvYjoOBg9ztDbfpV3hyCFv3mTCS87gYU,1482
23
+ ursa_ai-0.2.11.dist-info/METADATA,sha256=apeOKu1ZbVz8Fbi4p41DsdGLkasx8yZ0uuE2BjUAmek,6849
24
+ ursa_ai-0.2.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
25
+ ursa_ai-0.2.11.dist-info/top_level.txt,sha256=OjA1gRYSUAeiXGnpqPC8iOOGfcjFO1IlP848qMnYSdY,5
26
+ ursa_ai-0.2.11.dist-info/RECORD,,