mb-rag 1.1.12__tar.gz → 1.1.17__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mb-rag might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mb_rag
3
- Version: 1.1.12
3
+ Version: 1.1.17
4
4
  Summary: RAG function file
5
5
  Author: ['Malav Bateriwala']
6
6
  Requires-Python: >=3.8
@@ -145,6 +145,7 @@ class ModelFactory:
145
145
  raise ImportError("Langchain Community package not found. Please install it using: pip install langchain_ollama")
146
146
 
147
147
  from langchain_ollama import ChatOllama
148
+ print(f"Current Ollama serve model is {os.system('ollama ps')}")
148
149
  kwargs["model"] = model_name
149
150
  return ChatOllama(**kwargs)
150
151
 
@@ -264,8 +265,10 @@ class ModelFactory:
264
265
  print("Continuing without structured output")
265
266
  message= HumanMessage(content=prompt_new,)
266
267
  response = self.model.invoke([message])
267
- return response.content
268
-
268
+ try:
269
+ return response.content
270
+ except Exception:
271
+ return response
269
272
 
270
273
  class ConversationModel:
271
274
  """
@@ -458,3 +461,105 @@ class IPythonStreamHandler(StreamingStdOutCallbackHandler):
458
461
  """Handle new token"""
459
462
  self.output += token
460
463
  display(HTML(self.output), clear=True)
464
+
465
+
466
+ class AgentFactory:
467
+ """Factory class for creating different types of agents"""
468
+
469
+ def __init__(self, agent_type: str = 'basic', model_name: str = "gpt-4o", **kwargs) -> Any:
470
+ """
471
+ Factory method to create any type of agent
472
+ Args:
473
+ agent_type (str): Type of agent to create. Default is basic.
474
+ model_name (str): Name of the model
475
+ **kwargs: Additional arguments
476
+ Returns:
477
+ Any: Agent
478
+ """
479
+ creators = {
480
+ 'basic': self.create_basic_agent,
481
+ 'langgraph': self.create_langgraph_agent,
482
+ }
483
+
484
+ agent_data = creators.get(agent_type)
485
+ if not agent_data:
486
+ raise ValueError(f"Unsupported agent type: {agent_type}")
487
+
488
+ try:
489
+ self.agent = agent_data(model_name, **kwargs)
490
+ except Exception as e:
491
+ raise ValueError(f"Error creating {agent_type} agent: {str(e)}")
492
+
493
+ @classmethod
494
+ def create_basic_agent(cls, model_name: str = "gpt-4o", **kwargs) -> Any:
495
+ """
496
+ Create basic agent
497
+ Args:
498
+ model_name (str): Name of the model
499
+ **kwargs: Additional arguments
500
+ Returns:
501
+ Runnable: Agent
502
+ """
503
+ # Basic agent creation logic here
504
+ llm = ModelFactory(model_name=model_name, **kwargs).model
505
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
506
+ prompt = ChatPromptTemplate.from_messages([
507
+ ("system", "You are a helpful AI assistant"),
508
+ MessagesPlaceholder(variable_name="messages")
509
+ ])
510
+ from langchain_core.runnables import chain
511
+ agent = prompt | llm
512
+ return agent
513
+
514
+ @classmethod
515
+ def create_langgraph_agent(cls, model_name: str = "gpt-4o", **kwargs) -> Any:
516
+ """
517
+ Create LangGraph agent
518
+ Args:
519
+ model_name (str): Name of the model
520
+ **kwargs: Additional arguments
521
+ Returns:
522
+ Graph: LangGraph agent
523
+ """
524
+ if not check_package("langgraph"):
525
+ raise ImportError("LangGraph package not found. Please install it using: pip install langgraph")
526
+
527
+ from langgraph.graph import StateGraph, MessageGraph
528
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
529
+ from langchain_core.runnables import chain
530
+ from langchain_core.messages import BaseMessage
531
+
532
+ llm = ModelFactory(model_name=model_name, **kwargs).model
533
+
534
+ # Define the state of the graph
535
+ class GraphState:
536
+ messages: List[BaseMessage]
537
+ agent_state: Dict[str, Any]
538
+
539
+ # Define the nodes
540
+ def agent(state: GraphState):
541
+ prompt = ChatPromptTemplate.from_messages([
542
+ ("system", "You are a helpful AI assistant"),
543
+ MessagesPlaceholder(variable_name="messages")
544
+ ])
545
+ return (prompt | llm).invoke({"messages": state.messages})
546
+
547
+ def user(state: GraphState, input: str):
548
+ return HumanMessage(content=input)
549
+
550
+ # Define the graph
551
+ graph = MessageGraph()
552
+
553
+ # Add the nodes
554
+ graph.add_node("agent", agent)
555
+ graph.add_node("user", user)
556
+
557
+ # Set the entrypoint
558
+ graph.set_entry_point("user")
559
+
560
+ # Add the edges
561
+ graph.add_edge("user", "agent")
562
+ graph.add_edge("agent", "user")
563
+
564
+ # Compile the graph
565
+ return graph.compile()
@@ -1,5 +1,5 @@
1
1
  MAJOR_VERSION = 1
2
2
  MINOR_VERSION = 1
3
- PATCH_VERSION = 12
3
+ PATCH_VERSION = 17
4
4
  version = '{}.{}.{}'.format(MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION)
5
5
  __all__ = ['MAJOR_VERSION', 'MINOR_VERSION', 'PATCH_VERSION', 'version']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mb_rag
3
- Version: 1.1.12
3
+ Version: 1.1.17
4
4
  Summary: RAG function file
5
5
  Author: ['Malav Bateriwala']
6
6
  Requires-Python: >=3.8
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes