pydantic-ai-examples 0.3.3__tar.gz → 0.3.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-examples might be problematic. Click here for more details.
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/PKG-INFO +4 -3
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/__init__.py +0 -0
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/agent.py +47 -0
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/app.py +36 -0
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/functions.py +85 -0
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/modal.py +66 -0
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/models.py +46 -0
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/slack.py +30 -0
- pydantic_ai_examples-0.3.5/pydantic_ai_examples/slack_lead_qualifier/store.py +31 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pyproject.toml +2 -1
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/.gitignore +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/LICENSE +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/README.md +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/__main__.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/bank_support.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/chat_app.html +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/chat_app.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/chat_app.ts +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/__init__.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/agent.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/custom_evaluators.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/datasets/time_range_v1.yaml +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/datasets/time_range_v1_schema.json +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/datasets/time_range_v2.yaml +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/datasets/time_range_v2_schema.json +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/example_01_generate_dataset.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/example_02_add_custom_evaluators.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/example_03_unit_testing.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/example_04_compare_models.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/models.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/flight_booking.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/pydantic_model.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/question_graph.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/rag.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/roulette_wheel.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/sql_gen.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/stream_markdown.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/stream_whales.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/weather_agent.py +0 -0
- {pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/weather_agent_gradio.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydantic-ai-examples
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.5
|
|
4
4
|
Summary: Examples of how to use PydanticAI and what it can do.
|
|
5
5
|
Author-email: Samuel Colvin <samuel@pydantic.dev>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -31,8 +31,9 @@ Requires-Dist: fastapi>=0.115.4
|
|
|
31
31
|
Requires-Dist: gradio>=5.9.0; python_version > '3.9'
|
|
32
32
|
Requires-Dist: logfire[asyncpg,fastapi,httpx,sqlite3]>=2.6
|
|
33
33
|
Requires-Dist: mcp[cli]>=1.4.1; python_version >= '3.10'
|
|
34
|
-
Requires-Dist:
|
|
35
|
-
Requires-Dist: pydantic-
|
|
34
|
+
Requires-Dist: modal>=1.0.4
|
|
35
|
+
Requires-Dist: pydantic-ai-slim[anthropic,groq,openai,vertexai]==0.3.5
|
|
36
|
+
Requires-Dist: pydantic-evals==0.3.5
|
|
36
37
|
Requires-Dist: python-multipart>=0.0.17
|
|
37
38
|
Requires-Dist: rich>=13.9.2
|
|
38
39
|
Requires-Dist: uvicorn>=0.32.0
|
|
File without changes
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from textwrap import dedent
|
|
2
|
+
from types import NoneType
|
|
3
|
+
|
|
4
|
+
import logfire
|
|
5
|
+
|
|
6
|
+
### [imports]
|
|
7
|
+
from pydantic_ai import Agent, NativeOutput
|
|
8
|
+
from pydantic_ai.common_tools.duckduckgo import duckduckgo_search_tool ### [/imports]
|
|
9
|
+
|
|
10
|
+
from .models import Analysis, Profile
|
|
11
|
+
|
|
12
|
+
### [agent]
|
|
13
|
+
agent = Agent(
|
|
14
|
+
'openai:gpt-4o',
|
|
15
|
+
instructions=dedent(
|
|
16
|
+
"""
|
|
17
|
+
When a new person joins our public Slack, please put together a brief snapshot so we can be most useful to them.
|
|
18
|
+
|
|
19
|
+
**What to include**
|
|
20
|
+
|
|
21
|
+
1. **Who they are:** Any details about their professional role or projects (e.g. LinkedIn, GitHub, company bio).
|
|
22
|
+
2. **Where they work:** Name of the organisation and its domain.
|
|
23
|
+
3. **How we can help:** On a scale of 1–5, estimate how likely they are to benefit from **Pydantic Logfire**
|
|
24
|
+
(our paid observability tool) based on factors such as company size, product maturity, or AI usage.
|
|
25
|
+
*1 = probably not relevant, 5 = very strong fit.*
|
|
26
|
+
|
|
27
|
+
**Our products (for context only)**
|
|
28
|
+
• **Pydantic Validation** – Python data-validation (open source)
|
|
29
|
+
• **Pydantic AI** – Python agent framework (open source)
|
|
30
|
+
• **Pydantic Logfire** – Observability for traces, logs & metrics with first-class AI support (commercial)
|
|
31
|
+
|
|
32
|
+
**How to research**
|
|
33
|
+
|
|
34
|
+
• Use the provided DuckDuckGo search tool to research the person and the organization they work for, based on the email domain or what you find on e.g. LinkedIn and GitHub.
|
|
35
|
+
• If you can't find enough to form a reasonable view, return **None**.
|
|
36
|
+
"""
|
|
37
|
+
),
|
|
38
|
+
tools=[duckduckgo_search_tool()],
|
|
39
|
+
output_type=NativeOutput([Analysis, NoneType]),
|
|
40
|
+
) ### [/agent]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
### [analyze_profile]
|
|
44
|
+
@logfire.instrument('Analyze profile')
|
|
45
|
+
async def analyze_profile(profile: Profile) -> Analysis | None:
|
|
46
|
+
result = await agent.run(profile.as_prompt())
|
|
47
|
+
return result.output ### [/analyze_profile]
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
import logfire
|
|
4
|
+
from fastapi import FastAPI, HTTPException, status
|
|
5
|
+
from logfire.propagate import get_context
|
|
6
|
+
|
|
7
|
+
from .models import Profile
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
### [process_slack_member]
|
|
11
|
+
def process_slack_member(profile: Profile):
|
|
12
|
+
from .modal import process_slack_member as _process_slack_member
|
|
13
|
+
|
|
14
|
+
_process_slack_member.spawn(
|
|
15
|
+
profile.model_dump(), logfire_ctx=get_context()
|
|
16
|
+
) ### [/process_slack_member]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
### [app]
|
|
20
|
+
app = FastAPI()
|
|
21
|
+
logfire.instrument_fastapi(app, capture_headers=True)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@app.post('/')
|
|
25
|
+
async def process_webhook(payload: dict[str, Any]) -> dict[str, Any]:
|
|
26
|
+
if payload['type'] == 'url_verification':
|
|
27
|
+
return {'challenge': payload['challenge']}
|
|
28
|
+
elif (
|
|
29
|
+
payload['type'] == 'event_callback' and payload['event']['type'] == 'team_join'
|
|
30
|
+
):
|
|
31
|
+
profile = Profile.model_validate(payload['event']['user']['profile'])
|
|
32
|
+
|
|
33
|
+
process_slack_member(profile)
|
|
34
|
+
return {'status': 'OK'}
|
|
35
|
+
|
|
36
|
+
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY) ### [/app]
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import logfire
|
|
2
|
+
|
|
3
|
+
### [imports]
|
|
4
|
+
from .agent import analyze_profile
|
|
5
|
+
from .models import Profile
|
|
6
|
+
|
|
7
|
+
### [imports-daily_summary]
|
|
8
|
+
from .slack import send_slack_message
|
|
9
|
+
from .store import AnalysisStore ### [/imports,/imports-daily_summary]
|
|
10
|
+
|
|
11
|
+
### [constant-new_lead_channel]
|
|
12
|
+
NEW_LEAD_CHANNEL = '#new-slack-leads'
|
|
13
|
+
### [/constant-new_lead_channel]
|
|
14
|
+
### [constant-daily_summary_channel]
|
|
15
|
+
DAILY_SUMMARY_CHANNEL = '#daily-slack-leads-summary'
|
|
16
|
+
### [/constant-daily_summary_channel]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
### [process_slack_member]
|
|
20
|
+
@logfire.instrument('Process Slack member')
|
|
21
|
+
async def process_slack_member(profile: Profile):
|
|
22
|
+
analysis = await analyze_profile(profile)
|
|
23
|
+
logfire.info('Analysis', analysis=analysis)
|
|
24
|
+
|
|
25
|
+
if analysis is None:
|
|
26
|
+
return
|
|
27
|
+
|
|
28
|
+
await AnalysisStore().add(analysis)
|
|
29
|
+
|
|
30
|
+
await send_slack_message(
|
|
31
|
+
NEW_LEAD_CHANNEL,
|
|
32
|
+
[
|
|
33
|
+
{
|
|
34
|
+
'type': 'header',
|
|
35
|
+
'text': {
|
|
36
|
+
'type': 'plain_text',
|
|
37
|
+
'text': f'New Slack member with score {analysis.relevance}/5',
|
|
38
|
+
},
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
'type': 'divider',
|
|
42
|
+
},
|
|
43
|
+
*analysis.as_slack_blocks(),
|
|
44
|
+
],
|
|
45
|
+
) ### [/process_slack_member]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
### [send_daily_summary]
|
|
49
|
+
@logfire.instrument('Send daily summary')
|
|
50
|
+
async def send_daily_summary():
|
|
51
|
+
analyses = await AnalysisStore().list()
|
|
52
|
+
logfire.info('Analyses', analyses=analyses)
|
|
53
|
+
|
|
54
|
+
if len(analyses) == 0:
|
|
55
|
+
return
|
|
56
|
+
|
|
57
|
+
sorted_analyses = sorted(analyses, key=lambda x: x.relevance, reverse=True)
|
|
58
|
+
top_analyses = sorted_analyses[:5]
|
|
59
|
+
|
|
60
|
+
blocks = [
|
|
61
|
+
{
|
|
62
|
+
'type': 'header',
|
|
63
|
+
'text': {
|
|
64
|
+
'type': 'plain_text',
|
|
65
|
+
'text': f'Top {len(top_analyses)} new Slack members from the last 24 hours',
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
]
|
|
69
|
+
|
|
70
|
+
for analysis in top_analyses:
|
|
71
|
+
blocks.extend(
|
|
72
|
+
[
|
|
73
|
+
{
|
|
74
|
+
'type': 'divider',
|
|
75
|
+
},
|
|
76
|
+
*analysis.as_slack_blocks(include_relevance=True),
|
|
77
|
+
]
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
await send_slack_message(
|
|
81
|
+
DAILY_SUMMARY_CHANNEL,
|
|
82
|
+
blocks,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
await AnalysisStore().clear() ### [/send_daily_summary]
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
### [setup_modal]
|
|
4
|
+
import modal
|
|
5
|
+
|
|
6
|
+
image = modal.Image.debian_slim(python_version='3.13').pip_install(
|
|
7
|
+
'pydantic',
|
|
8
|
+
'pydantic_ai_slim[openai,duckduckgo]',
|
|
9
|
+
'logfire[httpx,fastapi]',
|
|
10
|
+
'fastapi[standard]',
|
|
11
|
+
'httpx',
|
|
12
|
+
)
|
|
13
|
+
app = modal.App(
|
|
14
|
+
name='slack-lead-qualifier',
|
|
15
|
+
image=image,
|
|
16
|
+
secrets=[
|
|
17
|
+
modal.Secret.from_name('logfire'),
|
|
18
|
+
modal.Secret.from_name('openai'),
|
|
19
|
+
modal.Secret.from_name('slack'),
|
|
20
|
+
],
|
|
21
|
+
) ### [/setup_modal]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
### [setup_logfire]
|
|
25
|
+
def setup_logfire():
|
|
26
|
+
import logfire
|
|
27
|
+
|
|
28
|
+
logfire.configure(service_name=app.name)
|
|
29
|
+
logfire.instrument_pydantic_ai()
|
|
30
|
+
logfire.instrument_httpx(capture_all=True) ### [/setup_logfire]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
### [web_app]
|
|
34
|
+
@app.function(min_containers=1)
|
|
35
|
+
@modal.asgi_app() # type: ignore
|
|
36
|
+
def web_app():
|
|
37
|
+
setup_logfire()
|
|
38
|
+
|
|
39
|
+
from .app import app as _app
|
|
40
|
+
|
|
41
|
+
return _app ### [/web_app]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
### [process_slack_member]
|
|
45
|
+
@app.function()
|
|
46
|
+
async def process_slack_member(profile_raw: dict[str, Any], logfire_ctx: Any):
|
|
47
|
+
setup_logfire()
|
|
48
|
+
|
|
49
|
+
from logfire.propagate import attach_context
|
|
50
|
+
|
|
51
|
+
from .functions import process_slack_member as _process_slack_member
|
|
52
|
+
from .models import Profile
|
|
53
|
+
|
|
54
|
+
with attach_context(logfire_ctx):
|
|
55
|
+
profile = Profile.model_validate(profile_raw)
|
|
56
|
+
await _process_slack_member(profile) ### [/process_slack_member]
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
### [send_daily_summary]
|
|
60
|
+
@app.function(schedule=modal.Cron('0 8 * * *')) # Every day at 8am UTC
|
|
61
|
+
async def send_daily_summary():
|
|
62
|
+
setup_logfire()
|
|
63
|
+
|
|
64
|
+
from .functions import send_daily_summary as _send_daily_summary
|
|
65
|
+
|
|
66
|
+
await _send_daily_summary() ### [/send_daily_summary]
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
from typing import Annotated, Any
|
|
2
|
+
|
|
3
|
+
from annotated_types import Ge, Le
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
### [import-format_as_xml]
|
|
7
|
+
from pydantic_ai import format_as_xml ### [/import-format_as_xml]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
### [profile,profile-intro]
|
|
11
|
+
class Profile(BaseModel): ### [/profile-intro]
|
|
12
|
+
first_name: str | None = None
|
|
13
|
+
last_name: str | None = None
|
|
14
|
+
display_name: str | None = None
|
|
15
|
+
email: str ### [/profile]
|
|
16
|
+
|
|
17
|
+
### [profile-as_prompt]
|
|
18
|
+
def as_prompt(self) -> str:
|
|
19
|
+
return format_as_xml(self, root_tag='profile') ### [/profile-as_prompt]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
### [analysis,analysis-intro]
|
|
23
|
+
class Analysis(BaseModel): ### [/analysis-intro]
|
|
24
|
+
profile: Profile
|
|
25
|
+
organization_name: str
|
|
26
|
+
organization_domain: str
|
|
27
|
+
job_title: str
|
|
28
|
+
relevance: Annotated[int, Ge(1), Le(5)]
|
|
29
|
+
"""Estimated fit for Pydantic Logfire: 1 = low, 5 = high"""
|
|
30
|
+
summary: str
|
|
31
|
+
"""One-sentence welcome note summarising who they are and how we might help""" ### [/analysis]
|
|
32
|
+
|
|
33
|
+
### [analysis-as_slack_blocks]
|
|
34
|
+
def as_slack_blocks(self, include_relevance: bool = False) -> list[dict[str, Any]]:
|
|
35
|
+
profile = self.profile
|
|
36
|
+
relevance = f'({self.relevance}/5)' if include_relevance else ''
|
|
37
|
+
return [
|
|
38
|
+
{
|
|
39
|
+
'type': 'markdown',
|
|
40
|
+
'text': f'[{profile.display_name}](mailto:{profile.email}), {self.job_title} at [**{self.organization_name}**](https://{self.organization_domain}) {relevance}',
|
|
41
|
+
},
|
|
42
|
+
{
|
|
43
|
+
'type': 'markdown',
|
|
44
|
+
'text': self.summary,
|
|
45
|
+
},
|
|
46
|
+
] ### [/analysis-as_slack_blocks]
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
import httpx
|
|
5
|
+
import logfire
|
|
6
|
+
|
|
7
|
+
### [send_slack_message]
|
|
8
|
+
API_KEY = os.getenv('SLACK_API_KEY')
|
|
9
|
+
assert API_KEY, 'SLACK_API_KEY is not set'
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@logfire.instrument('Send Slack message')
|
|
13
|
+
async def send_slack_message(channel: str, blocks: list[dict[str, Any]]):
|
|
14
|
+
client = httpx.AsyncClient()
|
|
15
|
+
response = await client.post(
|
|
16
|
+
'https://slack.com/api/chat.postMessage',
|
|
17
|
+
json={
|
|
18
|
+
'channel': channel,
|
|
19
|
+
'blocks': blocks,
|
|
20
|
+
},
|
|
21
|
+
headers={
|
|
22
|
+
'Authorization': f'Bearer {API_KEY}',
|
|
23
|
+
},
|
|
24
|
+
timeout=5,
|
|
25
|
+
)
|
|
26
|
+
response.raise_for_status()
|
|
27
|
+
result = response.json()
|
|
28
|
+
if not result.get('ok', False):
|
|
29
|
+
error = result.get('error', 'Unknown error')
|
|
30
|
+
raise Exception(f'Failed to send to Slack: {error}') ### [/send_slack_message]
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import logfire
|
|
2
|
+
|
|
3
|
+
### [import-modal]
|
|
4
|
+
import modal ### [/import-modal]
|
|
5
|
+
|
|
6
|
+
from .models import Analysis
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
### [analysis_store]
|
|
10
|
+
class AnalysisStore:
|
|
11
|
+
@classmethod
|
|
12
|
+
@logfire.instrument('Add analysis to store')
|
|
13
|
+
async def add(cls, analysis: Analysis):
|
|
14
|
+
await cls._get_store().put.aio(analysis.profile.email, analysis.model_dump())
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
@logfire.instrument('List analyses from store')
|
|
18
|
+
async def list(cls) -> list[Analysis]:
|
|
19
|
+
return [
|
|
20
|
+
Analysis.model_validate(analysis)
|
|
21
|
+
async for analysis in cls._get_store().values.aio()
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
@logfire.instrument('Clear analyses from store')
|
|
26
|
+
async def clear(cls):
|
|
27
|
+
await cls._get_store().clear.aio()
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def _get_store(cls) -> modal.Dict:
|
|
31
|
+
return modal.Dict.from_name('analyses', create_if_missing=True) # type: ignore ### [/analysis_store]
|
|
@@ -52,7 +52,8 @@ dependencies = [
|
|
|
52
52
|
"uvicorn>=0.32.0",
|
|
53
53
|
"devtools>=0.12.2",
|
|
54
54
|
"gradio>=5.9.0; python_version>'3.9'",
|
|
55
|
-
"mcp[cli]>=1.4.1; python_version >= '3.10'"
|
|
55
|
+
"mcp[cli]>=1.4.1; python_version >= '3.10'",
|
|
56
|
+
"modal>=1.0.4",
|
|
56
57
|
]
|
|
57
58
|
|
|
58
59
|
[tool.hatch.build.targets.wheel]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/bank_support.py
RENAMED
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/chat_app.html
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/__init__.py
RENAMED
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/agent.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/evals/models.py
RENAMED
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/flight_booking.py
RENAMED
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/pydantic_model.py
RENAMED
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/question_graph.py
RENAMED
|
File without changes
|
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/roulette_wheel.py
RENAMED
|
File without changes
|
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/stream_markdown.py
RENAMED
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/stream_whales.py
RENAMED
|
File without changes
|
{pydantic_ai_examples-0.3.3 → pydantic_ai_examples-0.3.5}/pydantic_ai_examples/weather_agent.py
RENAMED
|
File without changes
|
|
File without changes
|