pixie-examples 0.1.0__tar.gz → 0.1.1.dev5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. pixie_examples-0.1.1.dev5/LICENSE +21 -0
  2. pixie_examples-0.1.1.dev5/PKG-INFO +118 -0
  3. pixie_examples-0.1.1.dev5/README.md +78 -0
  4. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/langchain/basic_agent.py +18 -12
  5. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/langchain/customer_support.py +10 -9
  6. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/langchain/personal_assistant.py +14 -11
  7. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/langchain/sql_agent.py +17 -14
  8. {pixie_examples-0.1.0/examples/langchain → pixie_examples-0.1.1.dev5/examples/langgraph}/langgraph_rag.py +44 -9
  9. {pixie_examples-0.1.0/examples/langchain → pixie_examples-0.1.1.dev5/examples/langgraph}/langgraph_sql_agent.py +22 -8
  10. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/openai_agents_sdk/README.md +3 -3
  11. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/openai_agents_sdk/customer_service.py +5 -6
  12. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/openai_agents_sdk/financial_research_agent.py +5 -3
  13. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/openai_agents_sdk/llm_as_a_judge.py +3 -3
  14. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/openai_agents_sdk/routing.py +11 -10
  15. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/pydantic_ai/README.md +2 -2
  16. pixie_examples-0.1.1.dev5/examples/pydantic_ai/__init__.py +0 -0
  17. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/pydantic_ai/bank_support.py +5 -22
  18. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/pydantic_ai/flight_booking.py +5 -30
  19. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/pydantic_ai/question_graph.py +4 -22
  20. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/pydantic_ai/sql_gen.py +4 -5
  21. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/pydantic_ai/structured_output.py +4 -6
  22. pixie_examples-0.1.1.dev5/examples/quickstart/__init__.py +0 -0
  23. pixie_examples-0.1.1.dev5/examples/quickstart/chatbot.py +25 -0
  24. pixie_examples-0.1.1.dev5/examples/quickstart/sleepy_poet.py +96 -0
  25. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/quickstart/weather_agent.py +28 -54
  26. pixie_examples-0.1.1.dev5/examples/sql_utils.py +241 -0
  27. pixie_examples-0.1.1.dev5/pyproject.toml +46 -0
  28. pixie_examples-0.1.0/PKG-INFO +0 -29
  29. pixie_examples-0.1.0/README.md +0 -1
  30. pixie_examples-0.1.0/examples/openai_agents_sdk/__init__.py +0 -27
  31. pixie_examples-0.1.0/examples/quickstart/chatbot.py +0 -28
  32. pixie_examples-0.1.0/examples/quickstart/sleepy_haiku_agent.py +0 -79
  33. pixie_examples-0.1.0/pyproject.toml +0 -30
  34. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/__init__.py +0 -0
  35. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/langchain/README.md +0 -0
  36. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/langchain/__init__.py +0 -0
  37. {pixie_examples-0.1.0/examples/pydantic_ai → pixie_examples-0.1.1.dev5/examples/langgraph}/__init__.py +0 -0
  38. {pixie_examples-0.1.0/examples/quickstart → pixie_examples-0.1.1.dev5/examples/openai_agents_sdk}/__init__.py +0 -0
  39. {pixie_examples-0.1.0 → pixie_examples-0.1.1.dev5}/examples/pydantic_ai/.env.example +0 -0
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Finto Technologies
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,118 @@
1
+ Metadata-Version: 2.4
2
+ Name: pixie-examples
3
+ Version: 0.1.1.dev5
4
+ Summary: examples for using Pixie
5
+ License: MIT
6
+ License-File: LICENSE
7
+ Author: Yiou Li
8
+ Author-email: yol@gopixie.ai
9
+ Requires-Python: >=3.11,<3.14
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Programming Language :: Python :: 3.13
15
+ Requires-Dist: beautifulsoup4 (>=4.14.3,<5.0.0)
16
+ Requires-Dist: langchain (>=1.2.3,<2.0.0)
17
+ Requires-Dist: langchain-openai (>=1.1.7,<2.0.0)
18
+ Requires-Dist: langchain-text-splitters (>=1.1.0,<2.0.0)
19
+ Requires-Dist: langgraph (>=1.0.5,<2.0.0)
20
+ Requires-Dist: lxml (>=6.0.2,<7.0.0)
21
+ Requires-Dist: numpy (>=2.4.1,<3.0.0)
22
+ Requires-Dist: openai-agents (>=0.6.5,<0.7.0)
23
+ Requires-Dist: openinference-instrumentation-crewai (>=0.1.17,<0.2.0)
24
+ Requires-Dist: openinference-instrumentation-dspy (>=0.1.33,<0.2.0)
25
+ Requires-Dist: openinference-instrumentation-google-adk (>=0.1.8,<0.2.0)
26
+ Requires-Dist: openinference-instrumentation-openai-agents (>=1.4.0,<2.0.0)
27
+ Requires-Dist: pixie-sdk (>=0.1.1.dev13,<0.2.0)
28
+ Requires-Dist: pydantic (>=2.7.4,<3.0.0)
29
+ Requires-Dist: pydantic-ai-slim (>=1.39.0,<2.0.0)
30
+ Requires-Dist: pymarkdownlnt (>=0.9.34,<0.10.0)
31
+ Requires-Dist: requests (>=2.32.5,<3.0.0)
32
+ Requires-Dist: sqlalchemy (>=2.0.45,<3.0.0)
33
+ Project-URL: Changelog, https://github.com/yiouli/pixie-examples/commits/main/
34
+ Project-URL: Documentation, https://yiouli.github.io/pixie-sdk-py/
35
+ Project-URL: Homepage, https://gopixie.ai/?url=https%3A%2F%2Fdemo.yiouli.us%2Fgraphql
36
+ Project-URL: Issues, https://github.com/yiouli/pixie-examples/issues
37
+ Project-URL: Repository, https://github.com/yiouli/pixie-examples
38
+ Description-Content-Type: text/markdown
39
+
40
+ # Pixie Examples
41
+
42
+ [![MIT License](https://img.shields.io/badge/License-MIT-red.svg?style=flat-square)](https://opensource.org/licenses/MIT)
43
+ [![Python Version](https://img.shields.io/badge/python-3.11%2B-blue?style=flat-square)](https://www.python.org/downloads/)
44
+ [![Discord](https://img.shields.io/discord/1459772566528069715?style=flat-square&logo=Discord&logoColor=white&label=Discord&color=%23434EE4)](https://discord.gg/YMNYu6Z3)
45
+
46
+ This repository contains a collection of example applications integrated with [**Pixie SDK**](https://github.com/yiouli/pixie-sdk-py) for interactive debugging.
47
+
48
+ ## Get Started
49
+
50
+ > You can play with the demo site [here](https://gopixie.ai/?url=https://demo.yiouli.us/graphql) withou any setup.
51
+
52
+ ### 1. Setup
53
+
54
+ Clone this repository:
55
+
56
+ Install `pixie-examples` python package:
57
+
58
+ ```bash
59
+ pip install pixie-examples
60
+ ```
61
+
62
+ Create a `.env` file with your API keys:
63
+
64
+ ```ini
65
+ # .env
66
+ OPENAI_API_KEY=...
67
+ # Add other API keys as needed for specific examples
68
+ ```
69
+
70
+ Start the Pixie server:
71
+
72
+ ```bash
73
+ pixie
74
+ ```
75
+
76
+ ### 2. Debug with Web UI
77
+
78
+ Visit [gopixie.ai](https://gopixie.ai) to interact with and debug your applications through the web interface.
79
+
80
+ ## Important Links
81
+
82
+ - [**Pixie SDK**](https://github.com/yiouli/pixie-sdk-py)
83
+ - [**Documentation**](https://yiouli.github.io/pixie-sdk-py/)
84
+ - [**Discord**](https://discord.gg/YMNYu6Z3)
85
+
86
+ ## Examples Catelog
87
+
88
+ ### Quickstart
89
+
90
+ - **Basic Example**: Simple hello world application to get started with Pixie SDK
91
+
92
+ ### Pydantic AI Examples
93
+
94
+ - **Bank Support**: Multi-turn chatbot for banking customer support
95
+ - **Flight Booking**: Multi-agent system for flight booking
96
+ - **Question Graph**: Graph-based question answering system
97
+ - **SQL Generation**: Multi-step workflow for generating SQL queries
98
+ - **Structured Output**: Examples of structured data handling
99
+
100
+ ### OpenAI Agents SDK Examples
101
+
102
+ - **Customer Service**: Multi-agent customer service system
103
+ - **Financial Research Agent**: Multi-step financial research workflow
104
+ - **LLM-as-a-Judge**: Evaluation and judging patterns
105
+ - **Routing**: Agent routing and handoffs
106
+
107
+ ### LangChain Examples
108
+
109
+ - **Basic Agent**: Simple LangChain agent integration
110
+ - **Customer Support**: Customer support chatbot
111
+ - **Personal Assistant**: Multi-agent personal assistant
112
+ - **SQL Agent**: SQL query generation with LangChain
113
+
114
+ ### LangGraph Examples
115
+
116
+ - **RAG System**: Retrieval-augmented generation with LangGraph
117
+ - **SQL Agent**: SQL agent built with LangGraph state machines
118
+
@@ -0,0 +1,78 @@
1
+ # Pixie Examples
2
+
3
+ [![MIT License](https://img.shields.io/badge/License-MIT-red.svg?style=flat-square)](https://opensource.org/licenses/MIT)
4
+ [![Python Version](https://img.shields.io/badge/python-3.11%2B-blue?style=flat-square)](https://www.python.org/downloads/)
5
+ [![Discord](https://img.shields.io/discord/1459772566528069715?style=flat-square&logo=Discord&logoColor=white&label=Discord&color=%23434EE4)](https://discord.gg/YMNYu6Z3)
6
+
7
+ This repository contains a collection of example applications integrated with [**Pixie SDK**](https://github.com/yiouli/pixie-sdk-py) for interactive debugging.
8
+
9
+ ## Get Started
10
+
11
+ > You can play with the demo site [here](https://gopixie.ai/?url=https://demo.yiouli.us/graphql) withou any setup.
12
+
13
+ ### 1. Setup
14
+
15
+ Clone this repository:
16
+
17
+ Install `pixie-examples` python package:
18
+
19
+ ```bash
20
+ pip install pixie-examples
21
+ ```
22
+
23
+ Create a `.env` file with your API keys:
24
+
25
+ ```ini
26
+ # .env
27
+ OPENAI_API_KEY=...
28
+ # Add other API keys as needed for specific examples
29
+ ```
30
+
31
+ Start the Pixie server:
32
+
33
+ ```bash
34
+ pixie
35
+ ```
36
+
37
+ ### 2. Debug with Web UI
38
+
39
+ Visit [gopixie.ai](https://gopixie.ai) to interact with and debug your applications through the web interface.
40
+
41
+ ## Important Links
42
+
43
+ - [**Pixie SDK**](https://github.com/yiouli/pixie-sdk-py)
44
+ - [**Documentation**](https://yiouli.github.io/pixie-sdk-py/)
45
+ - [**Discord**](https://discord.gg/YMNYu6Z3)
46
+
47
+ ## Examples Catelog
48
+
49
+ ### Quickstart
50
+
51
+ - **Basic Example**: Simple hello world application to get started with Pixie SDK
52
+
53
+ ### Pydantic AI Examples
54
+
55
+ - **Bank Support**: Multi-turn chatbot for banking customer support
56
+ - **Flight Booking**: Multi-agent system for flight booking
57
+ - **Question Graph**: Graph-based question answering system
58
+ - **SQL Generation**: Multi-step workflow for generating SQL queries
59
+ - **Structured Output**: Examples of structured data handling
60
+
61
+ ### OpenAI Agents SDK Examples
62
+
63
+ - **Customer Service**: Multi-agent customer service system
64
+ - **Financial Research Agent**: Multi-step financial research workflow
65
+ - **LLM-as-a-Judge**: Evaluation and judging patterns
66
+ - **Routing**: Agent routing and handoffs
67
+
68
+ ### LangChain Examples
69
+
70
+ - **Basic Agent**: Simple LangChain agent integration
71
+ - **Customer Support**: Customer support chatbot
72
+ - **Personal Assistant**: Multi-agent personal assistant
73
+ - **SQL Agent**: SQL query generation with LangChain
74
+
75
+ ### LangGraph Examples
76
+
77
+ - **RAG System**: Retrieval-augmented generation with LangGraph
78
+ - **SQL Agent**: SQL agent built with LangGraph state machines
@@ -5,10 +5,14 @@ This example demonstrates a simple agent that can answer questions and call tool
5
5
  Based on: https://docs.langchain.com/oss/python/langchain/quickstart
6
6
  """
7
7
 
8
- from types import NoneType
9
8
  from langchain.agents import create_agent
10
9
  from langchain.chat_models import init_chat_model
11
- from pixie import pixie_app, PixieGenerator, UserInputRequirement
10
+
11
+ from langfuse.langchain import CallbackHandler
12
+ import pixie
13
+
14
+
15
+ langfuse_handler = CallbackHandler()
12
16
 
13
17
 
14
18
  def get_weather(city: str) -> str:
@@ -16,8 +20,8 @@ def get_weather(city: str) -> str:
16
20
  return f"It's always sunny in {city}!"
17
21
 
18
22
 
19
- @pixie_app
20
- async def basic_weather_agent(query: str) -> str:
23
+ @pixie.app
24
+ async def langchain_basic_weather_agent(query: str) -> str:
21
25
  """A simple weather agent that can answer questions using tools.
22
26
 
23
27
  Args:
@@ -37,21 +41,21 @@ async def basic_weather_agent(query: str) -> str:
37
41
  )
38
42
 
39
43
  # Run the agent
40
- result = agent.invoke({"messages": [{"role": "user", "content": query}]})
44
+ result = agent.invoke(
45
+ {"messages": [{"role": "user", "content": query}]},
46
+ config={"callbacks": [langfuse_handler]},
47
+ )
41
48
 
42
49
  # Return the final response
43
50
  return result["messages"][-1].content
44
51
 
45
52
 
46
- @pixie_app
47
- async def interactive_weather_agent(_: NoneType) -> PixieGenerator[str, str]:
53
+ @pixie.app
54
+ async def langchain_interactive_weather_agent() -> pixie.PixieGenerator[str, str]:
48
55
  """An interactive weather chatbot that maintains conversation.
49
56
 
50
57
  This agent can have multi-turn conversations with the user.
51
58
 
52
- Args:
53
- _: No initial input required
54
-
55
59
  Yields:
56
60
  AI responses to user questions
57
61
  """
@@ -73,7 +77,7 @@ async def interactive_weather_agent(_: NoneType) -> PixieGenerator[str, str]:
73
77
 
74
78
  while True:
75
79
  # Get user input
76
- user_query = yield UserInputRequirement(str)
80
+ user_query = yield pixie.InputRequired(str)
77
81
 
78
82
  # Check for exit commands
79
83
  if user_query.lower() in {"exit", "quit", "bye", "goodbye"}:
@@ -84,7 +88,9 @@ async def interactive_weather_agent(_: NoneType) -> PixieGenerator[str, str]:
84
88
  messages.append({"role": "user", "content": user_query})
85
89
 
86
90
  # Run agent with full conversation history
87
- result = agent.invoke({"messages": messages})
91
+ result = agent.invoke(
92
+ {"messages": messages}, config={"callbacks": [langfuse_handler]}
93
+ )
88
94
 
89
95
  # Update history with AI response
90
96
  messages = result["messages"]
@@ -7,7 +7,6 @@ as it moves through different states of a workflow.
7
7
  Based on: https://docs.langchain.com/oss/python/langchain/multi-agent/handoffs-customer-support
8
8
  """
9
9
 
10
- from types import NoneType
11
10
  from typing import Literal, NotRequired
12
11
  from langchain.agents import create_agent, AgentState
13
12
  from langchain.chat_models import init_chat_model
@@ -16,7 +15,12 @@ from langchain.agents.middleware import wrap_model_call, ModelRequest, ModelResp
16
15
  from langgraph.checkpoint.memory import InMemorySaver
17
16
  from langgraph.types import Command
18
17
  from typing import Callable
19
- from pixie import pixie_app, PixieGenerator, UserInputRequirement
18
+
19
+ from langfuse.langchain import CallbackHandler
20
+ import pixie
21
+
22
+
23
+ langfuse_handler = CallbackHandler()
20
24
 
21
25
 
22
26
  # Define the possible workflow steps
@@ -168,8 +172,8 @@ def apply_step_config(
168
172
  return handler(request)
169
173
 
170
174
 
171
- @pixie_app
172
- async def customer_support(_: NoneType) -> PixieGenerator[str, str]:
175
+ @pixie.app
176
+ async def langchain_customer_support() -> pixie.PixieGenerator[str, str]:
173
177
  """Customer support agent with state machine workflow.
174
178
 
175
179
  The agent progresses through three stages:
@@ -177,9 +181,6 @@ async def customer_support(_: NoneType) -> PixieGenerator[str, str]:
177
181
  2. Issue classification (hardware/software)
178
182
  3. Resolution (solution or escalation)
179
183
 
180
- Args:
181
- _: No initial input required
182
-
183
184
  Yields:
184
185
  AI responses guiding the support workflow
185
186
  """
@@ -208,11 +209,11 @@ async def customer_support(_: NoneType) -> PixieGenerator[str, str]:
208
209
 
209
210
  # Initialize conversation
210
211
  thread_id = "support_thread"
211
- config = {"configurable": {"thread_id": thread_id}}
212
+ config = {"configurable": {"thread_id": thread_id}, "callbacks": [langfuse_handler]}
212
213
 
213
214
  while True:
214
215
  # Get user input
215
- user_message = yield UserInputRequirement(str)
216
+ user_message = yield pixie.InputRequired(str)
216
217
 
217
218
  # Check for exit
218
219
  if user_message.lower() in {"exit", "quit", "bye"}:
@@ -7,12 +7,16 @@ coordinates specialized worker agents (calendar and email agents).
7
7
  Based on: https://docs.langchain.com/oss/python/langchain/multi-agent/subagents-personal-assistant
8
8
  """
9
9
 
10
- from types import NoneType
11
10
  from langchain.agents import create_agent
12
11
  from langchain.chat_models import init_chat_model
13
12
  from langchain.tools import tool
14
13
  from langgraph.checkpoint.memory import InMemorySaver
15
- from pixie import pixie_app, PixieGenerator, UserInputRequirement
14
+
15
+ from langfuse.langchain import CallbackHandler
16
+ import pixie
17
+
18
+
19
+ langfuse_handler = CallbackHandler()
16
20
 
17
21
 
18
22
  # Define calendar tools (stubs for demonstration)
@@ -68,17 +72,14 @@ SUPERVISOR_PROMPT = (
68
72
  )
69
73
 
70
74
 
71
- @pixie_app
72
- async def personal_assistant(_: NoneType) -> PixieGenerator[str, str]:
75
+ @pixie.app
76
+ async def langchain_personal_assistant() -> pixie.PixieGenerator[str, str]:
73
77
  """Multi-agent personal assistant with calendar and email subagents.
74
78
 
75
79
  The supervisor coordinates specialized worker agents:
76
80
  - Calendar agent: handles scheduling and availability
77
81
  - Email agent: manages communication and drafts
78
82
 
79
- Args:
80
- _: No initial input required
81
-
82
83
  Yields:
83
84
  AI responses to user requests
84
85
  """
@@ -108,7 +109,8 @@ async def personal_assistant(_: NoneType) -> PixieGenerator[str, str]:
108
109
  Handles date/time parsing, availability checking, and event creation.
109
110
  """
110
111
  result = calendar_agent.invoke(
111
- {"messages": [{"role": "user", "content": request}]}
112
+ {"messages": [{"role": "user", "content": request}]},
113
+ config={"callbacks": [langfuse_handler]},
112
114
  )
113
115
  return result["messages"][-1].content
114
116
 
@@ -120,7 +122,8 @@ async def personal_assistant(_: NoneType) -> PixieGenerator[str, str]:
120
122
  communication. Handles recipient extraction, subject generation, and email composition.
121
123
  """
122
124
  result = email_agent.invoke(
123
- {"messages": [{"role": "user", "content": request}]}
125
+ {"messages": [{"role": "user", "content": request}]},
126
+ config={"callbacks": [langfuse_handler]},
124
127
  )
125
128
  return result["messages"][-1].content
126
129
 
@@ -140,11 +143,11 @@ async def personal_assistant(_: NoneType) -> PixieGenerator[str, str]:
140
143
 
141
144
  # Initialize conversation
142
145
  thread_id = "personal_assistant_thread"
143
- config = {"configurable": {"thread_id": thread_id}}
146
+ config = {"configurable": {"thread_id": thread_id}, "callbacks": [langfuse_handler]}
144
147
 
145
148
  while True:
146
149
  # Get user request
147
- user_request = yield UserInputRequirement(str)
150
+ user_request = yield pixie.InputRequired(str)
148
151
 
149
152
  # Check for exit
150
153
  if user_request.lower() in {"exit", "quit", "bye", "goodbye"}:
@@ -17,13 +17,16 @@ queries. Make sure database connection permissions are scoped as narrowly as pos
17
17
 
18
18
  import pathlib
19
19
  import requests
20
- from types import NoneType
21
20
  from langchain.agents import create_agent
22
21
  from langchain.chat_models import init_chat_model
23
- from langchain_community.utilities import SQLDatabase
24
- from langchain_community.agent_toolkits import SQLDatabaseToolkit
25
22
  from langgraph.checkpoint.memory import InMemorySaver
26
- from pixie import pixie_app, PixieGenerator, UserInputRequirement
23
+
24
+ from langfuse.langchain import CallbackHandler
25
+ import pixie
26
+ from ..sql_utils import SQLDatabase, SQLDatabaseToolkit
27
+
28
+
29
+ langfuse_handler = CallbackHandler()
27
30
 
28
31
 
29
32
  # System prompt for SQL agent
@@ -72,8 +75,8 @@ def setup_database():
72
75
  return SQLDatabase.from_uri("sqlite:///Chinook.db")
73
76
 
74
77
 
75
- @pixie_app
76
- async def sql_query_agent(question: str) -> str:
78
+ @pixie.app
79
+ async def langchain_sql_query_agent(question: str) -> str:
77
80
  """SQL database query agent that can answer questions about the Chinook database.
78
81
 
79
82
  The Chinook database represents a digital media store with tables for artists,
@@ -102,21 +105,21 @@ async def sql_query_agent(question: str) -> str:
102
105
  agent = create_agent(model, tools, system_prompt=system_prompt)
103
106
 
104
107
  # Run the agent
105
- result = agent.invoke({"messages": [{"role": "user", "content": question}]})
108
+ result = agent.invoke(
109
+ {"messages": [{"role": "user", "content": question}]},
110
+ config={"callbacks": [langfuse_handler]},
111
+ )
106
112
 
107
113
  # Return the final answer
108
114
  return result["messages"][-1].content
109
115
 
110
116
 
111
- @pixie_app
112
- async def interactive_sql_agent(_: NoneType) -> PixieGenerator[str, str]:
117
+ @pixie.app
118
+ async def langchain_interactive_sql_agent() -> pixie.PixieGenerator[str, str]:
113
119
  """Interactive SQL database query agent with multi-turn conversation.
114
120
 
115
121
  This agent maintains conversation history and can handle follow-up questions.
116
122
 
117
- Args:
118
- _: No initial input required
119
-
120
123
  Yields:
121
124
  AI responses to database queries
122
125
  """
@@ -153,11 +156,11 @@ Ask me any question about the data!"""
153
156
 
154
157
  # Initialize conversation
155
158
  thread_id = "sql_thread"
156
- config = {"configurable": {"thread_id": thread_id}}
159
+ config = {"configurable": {"thread_id": thread_id}, "callbacks": [langfuse_handler]}
157
160
 
158
161
  while True:
159
162
  # Get user question
160
- user_question = yield UserInputRequirement(str)
163
+ user_question = yield pixie.InputRequired(str)
161
164
 
162
165
  # Check for exit
163
166
  if user_question.lower() in {"exit", "quit", "bye"}:
@@ -15,13 +15,36 @@ from typing import Literal
15
15
  from langchain.chat_models import init_chat_model
16
16
  from langchain.tools import tool
17
17
  from langchain.messages import HumanMessage
18
- from langchain_community.document_loaders import WebBaseLoader
19
18
  from langchain_text_splitters import RecursiveCharacterTextSplitter
20
19
  from langchain_core.vectorstores import InMemoryVectorStore
20
+ from langchain_core.documents import Document
21
21
  from langchain_openai import OpenAIEmbeddings
22
22
  from langgraph.graph import END, START, MessagesState, StateGraph
23
23
  from langgraph.prebuilt import ToolNode, tools_condition
24
- from pixie import pixie_app
24
+
25
+ from langfuse.langchain import CallbackHandler
26
+ import pixie
27
+ import requests
28
+ from bs4 import BeautifulSoup
29
+
30
+
31
+ langfuse_handler = CallbackHandler()
32
+
33
+
34
+ def load_web_page(url: str) -> list[Document]:
35
+ """Simple web page loader using requests and BeautifulSoup.
36
+
37
+ Replaces langchain_community.document_loaders.WebBaseLoader
38
+ to avoid the langchain-community dependency.
39
+ """
40
+ response = requests.get(url)
41
+ response.raise_for_status()
42
+ soup = BeautifulSoup(response.content, "html.parser")
43
+
44
+ # Extract text from the page
45
+ text = soup.get_text(separator="\n", strip=True)
46
+
47
+ return [Document(page_content=text, metadata={"source": url})]
25
48
 
26
49
 
27
50
  def setup_vectorstore():
@@ -34,7 +57,7 @@ def setup_vectorstore():
34
57
  "https://lilianweng.github.io/posts/2024-04-12-diffusion-video/",
35
58
  ]
36
59
 
37
- docs = [WebBaseLoader(url).load() for url in urls]
60
+ docs = [load_web_page(url) for url in urls]
38
61
  docs_list = [item for sublist in docs for item in sublist]
39
62
 
40
63
  print("Splitting documents...")
@@ -66,7 +89,9 @@ def create_rag_graph(retriever, model):
66
89
  # Node: Generate query or respond
67
90
  def generate_query_or_respond(state: MessagesState):
68
91
  """Call the model to generate a response or use retrieval tool."""
69
- response = model.bind_tools([retriever_tool]).invoke(state["messages"])
92
+ response = model.bind_tools([retriever_tool]).invoke(
93
+ state["messages"], config={"callbacks": [langfuse_handler]}
94
+ )
70
95
  return {"messages": [response]}
71
96
 
72
97
  # Grade documents schema
@@ -97,7 +122,8 @@ def create_rag_graph(retriever, model):
97
122
 
98
123
  prompt = GRADE_PROMPT.format(question=question, context=context)
99
124
  response = grader_model.with_structured_output(GradeDocuments).invoke(
100
- [{"role": "user", "content": prompt}]
125
+ [{"role": "user", "content": prompt}],
126
+ config={"callbacks": [langfuse_handler]},
101
127
  )
102
128
 
103
129
  score = response.binary_score # type: ignore
@@ -121,7 +147,10 @@ def create_rag_graph(retriever, model):
121
147
  messages = state["messages"]
122
148
  question = messages[0].content
123
149
  prompt = REWRITE_PROMPT.format(question=question)
124
- response = model.invoke([{"role": "user", "content": prompt}])
150
+ response = model.invoke(
151
+ [{"role": "user", "content": prompt}],
152
+ config={"callbacks": [langfuse_handler]},
153
+ )
125
154
  return {"messages": [HumanMessage(content=response.content)]}
126
155
 
127
156
  # Node: Generate answer
@@ -139,7 +168,10 @@ def create_rag_graph(retriever, model):
139
168
  question = state["messages"][0].content
140
169
  context = state["messages"][-1].content
141
170
  prompt = GENERATE_PROMPT.format(question=question, context=context)
142
- response = model.invoke([{"role": "user", "content": prompt}])
171
+ response = model.invoke(
172
+ [{"role": "user", "content": prompt}],
173
+ config={"callbacks": [langfuse_handler]},
174
+ )
143
175
  return {"messages": [response]}
144
176
 
145
177
  # Build graph
@@ -172,7 +204,7 @@ def create_rag_graph(retriever, model):
172
204
  return workflow.compile()
173
205
 
174
206
 
175
- @pixie_app
207
+ @pixie.app
176
208
  async def langgraph_rag_agent(question: str) -> str:
177
209
  """Agentic RAG system that can answer questions about Lilian Weng's blog posts.
178
210
 
@@ -200,7 +232,10 @@ async def langgraph_rag_agent(question: str) -> str:
200
232
  print(f"Processing question: {question}")
201
233
 
202
234
  # Run the graph
203
- result = graph.invoke({"messages": [{"role": "user", "content": question}]}) # type: ignore
235
+ result = graph.invoke(
236
+ {"messages": [{"role": "user", "content": question}]}, # type: ignore
237
+ config={"callbacks": [langfuse_handler]},
238
+ )
204
239
 
205
240
  # Return the final answer
206
241
  return result["messages"][-1].content
@@ -13,12 +13,16 @@ import requests
13
13
  from typing import Literal
14
14
  from langchain.chat_models import init_chat_model
15
15
  from langchain.messages import AIMessage
16
- from langchain_community.utilities import SQLDatabase
17
- from langchain_community.agent_toolkits import SQLDatabaseToolkit
18
16
  from langgraph.graph import START, MessagesState, StateGraph
17
+ from ..sql_utils import SQLDatabase, SQLDatabaseToolkit
19
18
  from langgraph.prebuilt import ToolNode
20
19
  from langgraph.checkpoint.memory import InMemorySaver
21
- from pixie import pixie_app
20
+
21
+ from langfuse.langchain import CallbackHandler
22
+ import pixie
23
+
24
+
25
+ langfuse_handler = CallbackHandler()
22
26
 
23
27
 
24
28
  def setup_database():
@@ -74,7 +78,9 @@ def create_sql_graph(db: SQLDatabase, model):
74
78
  # Node: Force model to call get_schema
75
79
  def call_get_schema(state: MessagesState):
76
80
  llm_with_tools = model.bind_tools([get_schema_tool], tool_choice="any")
77
- response = llm_with_tools.invoke(state["messages"])
81
+ response = llm_with_tools.invoke(
82
+ state["messages"], config={"callbacks": [langfuse_handler]}
83
+ )
78
84
  return {"messages": [response]}
79
85
 
80
86
  # Node: Generate query
@@ -95,7 +101,10 @@ DO NOT make any DML statements (INSERT, UPDATE, DELETE, DROP etc.) to the databa
95
101
  def generate_query(state: MessagesState):
96
102
  system_message = {"role": "system", "content": generate_query_prompt}
97
103
  llm_with_tools = model.bind_tools([run_query_tool])
98
- response = llm_with_tools.invoke([system_message] + state["messages"])
104
+ response = llm_with_tools.invoke(
105
+ [system_message] + state["messages"],
106
+ config={"callbacks": [langfuse_handler]},
107
+ )
99
108
  return {"messages": [response]}
100
109
 
101
110
  # Node: Check query
@@ -130,7 +139,9 @@ You will call the appropriate tool to execute the query after running this check
130
139
  # Fallback if no tool calls
131
140
  user_message = {"role": "user", "content": "Please check the query"}
132
141
  llm_with_tools = model.bind_tools([run_query_tool], tool_choice="any")
133
- response = llm_with_tools.invoke([system_message, user_message])
142
+ response = llm_with_tools.invoke(
143
+ [system_message, user_message], config={"callbacks": [langfuse_handler]}
144
+ )
134
145
  if isinstance(last_message, AI):
135
146
  response.id = last_message.id
136
147
  return {"messages": [response]}
@@ -167,7 +178,7 @@ You will call the appropriate tool to execute the query after running this check
167
178
  return builder.compile(checkpointer=InMemorySaver())
168
179
 
169
180
 
170
- @pixie_app
181
+ @pixie.app
171
182
  async def langgraph_sql_agent(question: str) -> str:
172
183
  """Custom SQL agent built with LangGraph primitives.
173
184
 
@@ -197,7 +208,10 @@ async def langgraph_sql_agent(question: str) -> str:
197
208
  # Run the graph
198
209
  result = graph.invoke(
199
210
  {"messages": [{"role": "user", "content": question}]}, # type: ignore
200
- {"configurable": {"thread_id": "langgraph_sql"}},
211
+ {
212
+ "configurable": {"thread_id": "langgraph_sql"},
213
+ "callbacks": [langfuse_handler],
214
+ },
201
215
  )
202
216
 
203
217
  # Return the final message