opensmith 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opensmith-0.1.0/PKG-INFO +159 -0
- opensmith-0.1.0/README.md +136 -0
- opensmith-0.1.0/opensmith/__init__.py +14 -0
- opensmith-0.1.0/opensmith/cli.py +99 -0
- opensmith-0.1.0/opensmith/models.py +51 -0
- opensmith-0.1.0/opensmith/patcher.py +263 -0
- opensmith-0.1.0/opensmith/server.py +78 -0
- opensmith-0.1.0/opensmith/storage.py +281 -0
- opensmith-0.1.0/opensmith/tokens.py +61 -0
- opensmith-0.1.0/opensmith/tracer.py +154 -0
- opensmith-0.1.0/opensmith/ui/index.html +624 -0
- opensmith-0.1.0/pyproject.toml +41 -0
- opensmith-0.1.0/tests/.gitkeep +0 -0
- opensmith-0.1.0/tests/conftest.py +41 -0
- opensmith-0.1.0/tests/test_models.py +44 -0
- opensmith-0.1.0/tests/test_storage.py +107 -0
- opensmith-0.1.0/tests/test_tokens.py +34 -0
- opensmith-0.1.0/tests/test_tracer.py +131 -0
opensmith-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: opensmith
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Local-first LLM pipeline tracer. No cloud. No setup.
|
|
5
|
+
Project-URL: Homepage, https://github.com/shivnathtathe/opensmith
|
|
6
|
+
License: MIT
|
|
7
|
+
Keywords: agent,debug,langsmith,llm,local,observability,tracing
|
|
8
|
+
Classifier: Development Status :: 3 - Alpha
|
|
9
|
+
Classifier: Intended Audience :: Developers
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
11
|
+
Classifier: Topic :: Software Development :: Debuggers
|
|
12
|
+
Requires-Python: >=3.10
|
|
13
|
+
Requires-Dist: click>=8.0.0
|
|
14
|
+
Requires-Dist: fastapi>=0.100.0
|
|
15
|
+
Requires-Dist: pydantic>=2.0
|
|
16
|
+
Requires-Dist: rich>=13.0.0
|
|
17
|
+
Requires-Dist: uvicorn>=0.20.0
|
|
18
|
+
Provides-Extra: dev
|
|
19
|
+
Requires-Dist: httpx; extra == 'dev'
|
|
20
|
+
Requires-Dist: pytest; extra == 'dev'
|
|
21
|
+
Requires-Dist: pytest-asyncio; extra == 'dev'
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
|
|
24
|
+
<div align="center">
|
|
25
|
+
|
|
26
|
+
<pre style="font-family: monospace; font-size: 18px;
|
|
27
|
+
line-height: 1.2; color: #ededec; background: #0a0a0a;
|
|
28
|
+
padding: 20px; display: inline-block;">
|
|
29
|
+
██████ ██████ ███████ ███ ██ ███████ ███ ███ ██ ████████ ██ ██
|
|
30
|
+
██ ██ ██ ██ ██ ████ ██ ██ ████ ████ ██ ██ ██ ██
|
|
31
|
+
██ ██ ██████ █████ ██ ██ ██ ███████ ██ ████ ██ ██ ██ ███████
|
|
32
|
+
██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
|
|
33
|
+
██████ ██ ███████ ██ ████ ███████ ██ ██ ██ ██ ██ ██
|
|
34
|
+
</pre>
|
|
35
|
+
|
|
36
|
+
**Local-first LLM pipeline tracer. No cloud. No setup.**
|
|
37
|
+
|
|
38
|
+

|
|
39
|
+

|
|
40
|
+

|
|
41
|
+
|
|
42
|
+
</div>
|
|
43
|
+
|
|
44
|
+
# opensmith
|
|
45
|
+
|
|
46
|
+
Local-first LLM pipeline tracer. No cloud. No setup.
|
|
47
|
+
|
|
48
|
+
## Why opensmith
|
|
49
|
+
|
|
50
|
+
LangSmith is powerful, but it is built around cloud-hosted tracing and is most natural inside the LangChain ecosystem. opensmith is a local-first alternative: install it with `pip`, use it with any Python LLM pipeline, and inspect traces on your machine without accounts, hosted services, Docker, or configuration. No trace data leaves your machine.
|
|
51
|
+
|
|
52
|
+
## Install
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
pip install opensmith
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Quickstart
|
|
59
|
+
|
|
60
|
+
### Example 1: `@trace` decorator
|
|
61
|
+
|
|
62
|
+
```python
|
|
63
|
+
from opensmith import trace
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@trace
|
|
67
|
+
def call_llm(prompt: str):
|
|
68
|
+
return openai.chat.completions.create(
|
|
69
|
+
model="gpt-4o-mini",
|
|
70
|
+
messages=[{"role": "user", "content": prompt}],
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@trace
|
|
75
|
+
def my_pipeline(question: str):
|
|
76
|
+
# search_docs is your own retrieval function
|
|
77
|
+
docs = search_docs(question)
|
|
78
|
+
return call_llm(docs + question)
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Example 2: context manager
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
from opensmith import trace
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
with trace("my_pipeline") as t:
|
|
88
|
+
t.log("query", query)
|
|
89
|
+
response = openai.chat.completions.create(
|
|
90
|
+
model="gpt-4o-mini",
|
|
91
|
+
messages=[{"role": "user", "content": query}],
|
|
92
|
+
)
|
|
93
|
+
t.log("response", response)
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
### Example 3: `autopatch()` zero code changes
|
|
97
|
+
|
|
98
|
+
```python
|
|
99
|
+
from opensmith import autopatch
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
autopatch()
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
Patch only selected backends:
|
|
106
|
+
|
|
107
|
+
```python
|
|
108
|
+
from opensmith import autopatch
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
autopatch(only=["openai"])
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
Patch everything except selected backends:
|
|
115
|
+
|
|
116
|
+
```python
|
|
117
|
+
from opensmith import autopatch
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
autopatch(exclude=["chromadb"])
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
## Dashboard
|
|
124
|
+
|
|
125
|
+
```bash
|
|
126
|
+
opensmith ui
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
Open `http://localhost:7823`.
|
|
130
|
+
|
|
131
|
+

|
|
132
|
+
|
|
133
|
+
## CLI reference
|
|
134
|
+
|
|
135
|
+
| Command | Description |
|
|
136
|
+
| --- | --- |
|
|
137
|
+
| `opensmith ui` | Start the local dashboard at `localhost:7823`. |
|
|
138
|
+
| `opensmith traces` | List recent traces in the terminal. |
|
|
139
|
+
| `opensmith stats` | Show aggregate trace, step, token, and cost statistics. |
|
|
140
|
+
| `opensmith clear` | Delete all locally stored traces after confirmation. |
|
|
141
|
+
|
|
142
|
+
## Supported backends
|
|
143
|
+
|
|
144
|
+
| Backend | Package | Status |
|
|
145
|
+
|---------|---------|--------|
|
|
146
|
+
| openai | openai | ✅ |
|
|
147
|
+
| anthropic | anthropic | ✅ |
|
|
148
|
+
| litellm | litellm | ✅ |
|
|
149
|
+
| qdrant | qdrant-client | ✅ |
|
|
150
|
+
| chromadb | chromadb | ✅ |
|
|
151
|
+
| pinecone | pinecone-client | ✅ |
|
|
152
|
+
|
|
153
|
+
## Storage
|
|
154
|
+
|
|
155
|
+
Traces are stored locally at `~/.opensmith/traces.db`.
|
|
156
|
+
|
|
157
|
+
## License
|
|
158
|
+
|
|
159
|
+
MIT
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
<div align="center">
|
|
2
|
+
|
|
3
|
+
<pre style="font-family: monospace; font-size: 18px;
|
|
4
|
+
line-height: 1.2; color: #ededec; background: #0a0a0a;
|
|
5
|
+
padding: 20px; display: inline-block;">
|
|
6
|
+
██████ ██████ ███████ ███ ██ ███████ ███ ███ ██ ████████ ██ ██
|
|
7
|
+
██ ██ ██ ██ ██ ████ ██ ██ ████ ████ ██ ██ ██ ██
|
|
8
|
+
██ ██ ██████ █████ ██ ██ ██ ███████ ██ ████ ██ ██ ██ ███████
|
|
9
|
+
██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
|
|
10
|
+
██████ ██ ███████ ██ ████ ███████ ██ ██ ██ ██ ██ ██
|
|
11
|
+
</pre>
|
|
12
|
+
|
|
13
|
+
**Local-first LLM pipeline tracer. No cloud. No setup.**
|
|
14
|
+
|
|
15
|
+

|
|
16
|
+

|
|
17
|
+

|
|
18
|
+
|
|
19
|
+
</div>
|
|
20
|
+
|
|
21
|
+
# opensmith
|
|
22
|
+
|
|
23
|
+
Local-first LLM pipeline tracer. No cloud. No setup.
|
|
24
|
+
|
|
25
|
+
## Why opensmith
|
|
26
|
+
|
|
27
|
+
LangSmith is powerful, but it is built around cloud-hosted tracing and is most natural inside the LangChain ecosystem. opensmith is a local-first alternative: install it with `pip`, use it with any Python LLM pipeline, and inspect traces on your machine without accounts, hosted services, Docker, or configuration. No trace data leaves your machine.
|
|
28
|
+
|
|
29
|
+
## Install
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
pip install opensmith
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Quickstart
|
|
36
|
+
|
|
37
|
+
### Example 1: `@trace` decorator
|
|
38
|
+
|
|
39
|
+
```python
|
|
40
|
+
from opensmith import trace
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@trace
|
|
44
|
+
def call_llm(prompt: str):
|
|
45
|
+
return openai.chat.completions.create(
|
|
46
|
+
model="gpt-4o-mini",
|
|
47
|
+
messages=[{"role": "user", "content": prompt}],
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@trace
|
|
52
|
+
def my_pipeline(question: str):
|
|
53
|
+
# search_docs is your own retrieval function
|
|
54
|
+
docs = search_docs(question)
|
|
55
|
+
return call_llm(docs + question)
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### Example 2: context manager
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
from opensmith import trace
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
with trace("my_pipeline") as t:
|
|
65
|
+
t.log("query", query)
|
|
66
|
+
response = openai.chat.completions.create(
|
|
67
|
+
model="gpt-4o-mini",
|
|
68
|
+
messages=[{"role": "user", "content": query}],
|
|
69
|
+
)
|
|
70
|
+
t.log("response", response)
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### Example 3: `autopatch()` zero code changes
|
|
74
|
+
|
|
75
|
+
```python
|
|
76
|
+
from opensmith import autopatch
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
autopatch()
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
Patch only selected backends:
|
|
83
|
+
|
|
84
|
+
```python
|
|
85
|
+
from opensmith import autopatch
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
autopatch(only=["openai"])
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
Patch everything except selected backends:
|
|
92
|
+
|
|
93
|
+
```python
|
|
94
|
+
from opensmith import autopatch
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
autopatch(exclude=["chromadb"])
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
## Dashboard
|
|
101
|
+
|
|
102
|
+
```bash
|
|
103
|
+
opensmith ui
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
Open `http://localhost:7823`.
|
|
107
|
+
|
|
108
|
+

|
|
109
|
+
|
|
110
|
+
## CLI reference
|
|
111
|
+
|
|
112
|
+
| Command | Description |
|
|
113
|
+
| --- | --- |
|
|
114
|
+
| `opensmith ui` | Start the local dashboard at `localhost:7823`. |
|
|
115
|
+
| `opensmith traces` | List recent traces in the terminal. |
|
|
116
|
+
| `opensmith stats` | Show aggregate trace, step, token, and cost statistics. |
|
|
117
|
+
| `opensmith clear` | Delete all locally stored traces after confirmation. |
|
|
118
|
+
|
|
119
|
+
## Supported backends
|
|
120
|
+
|
|
121
|
+
| Backend | Package | Status |
|
|
122
|
+
|---------|---------|--------|
|
|
123
|
+
| openai | openai | ✅ |
|
|
124
|
+
| anthropic | anthropic | ✅ |
|
|
125
|
+
| litellm | litellm | ✅ |
|
|
126
|
+
| qdrant | qdrant-client | ✅ |
|
|
127
|
+
| chromadb | chromadb | ✅ |
|
|
128
|
+
| pinecone | pinecone-client | ✅ |
|
|
129
|
+
|
|
130
|
+
## Storage
|
|
131
|
+
|
|
132
|
+
Traces are stored locally at `~/.opensmith/traces.db`.
|
|
133
|
+
|
|
134
|
+
## License
|
|
135
|
+
|
|
136
|
+
MIT
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from opensmith.tracer import TraceCallable, trace
|
|
2
|
+
from opensmith.patcher import autopatch, unpatch
|
|
3
|
+
from opensmith.storage import Storage
|
|
4
|
+
from opensmith.models import Trace, Step, Run
|
|
5
|
+
|
|
6
|
+
__all__ = [
|
|
7
|
+
"trace",
|
|
8
|
+
"autopatch",
|
|
9
|
+
"unpatch",
|
|
10
|
+
"Storage",
|
|
11
|
+
"Trace",
|
|
12
|
+
"Step",
|
|
13
|
+
"Run",
|
|
14
|
+
]
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import click
|
|
7
|
+
import uvicorn
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
from rich.table import Table
|
|
10
|
+
|
|
11
|
+
from opensmith.storage import Storage
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
console = Console()
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@click.group(name="opensmith")
|
|
18
|
+
def cli() -> None:
|
|
19
|
+
"""Local-first LLM pipeline tracer."""
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@cli.command()
|
|
23
|
+
@click.option("--port", default=7823, show_default=True, type=int)
|
|
24
|
+
@click.option("--host", default="127.0.0.1", show_default=True)
|
|
25
|
+
def ui(port: int, host: str) -> None:
|
|
26
|
+
"""Start the local dashboard."""
|
|
27
|
+
click.echo(f"opensmith UI running at http://{host}:{port}")
|
|
28
|
+
click.echo("Press Ctrl+C to stop")
|
|
29
|
+
uvicorn.run("opensmith.server:app", host=host, port=port)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@cli.command()
|
|
33
|
+
def clear() -> None:
|
|
34
|
+
"""Clear all traces."""
|
|
35
|
+
if not click.confirm("Clear all traces?", default=False):
|
|
36
|
+
return
|
|
37
|
+
|
|
38
|
+
Storage().delete_all()
|
|
39
|
+
click.echo("Cleared all traces.")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@cli.command()
|
|
43
|
+
def stats() -> None:
|
|
44
|
+
"""Show trace statistics."""
|
|
45
|
+
data = Storage().get_stats()
|
|
46
|
+
|
|
47
|
+
table = Table(title="opensmith stats")
|
|
48
|
+
table.add_column("Metric")
|
|
49
|
+
table.add_column("Value", justify="right")
|
|
50
|
+
|
|
51
|
+
table.add_row("Total traces", str(data["total_traces"]))
|
|
52
|
+
table.add_row("Total steps", str(data["total_steps"]))
|
|
53
|
+
table.add_row("Total tokens", str(data["total_tokens"]))
|
|
54
|
+
table.add_row("Total cost (USD)", f"${float(data['total_cost_usd']):.6f}")
|
|
55
|
+
|
|
56
|
+
console.print(table)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@cli.command()
|
|
60
|
+
@click.option("--limit", default=20, show_default=True, type=int)
|
|
61
|
+
def traces(limit: int) -> None:
|
|
62
|
+
"""List recent traces."""
|
|
63
|
+
rows = Storage().get_traces(limit=limit)
|
|
64
|
+
|
|
65
|
+
table = Table(title="opensmith traces")
|
|
66
|
+
table.add_column("id")
|
|
67
|
+
table.add_column("name")
|
|
68
|
+
table.add_column("latency_ms", justify="right")
|
|
69
|
+
table.add_column("error")
|
|
70
|
+
table.add_column("created_at")
|
|
71
|
+
|
|
72
|
+
for row in rows:
|
|
73
|
+
table.add_row(
|
|
74
|
+
str(row.get("id", ""))[:8],
|
|
75
|
+
str(row.get("name") or ""),
|
|
76
|
+
_format_latency(row.get("latency_ms")),
|
|
77
|
+
"yes" if row.get("error") else "no",
|
|
78
|
+
_format_timestamp(row.get("created_at")),
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
console.print(table)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _format_latency(value: Any) -> str:
|
|
85
|
+
if value is None:
|
|
86
|
+
return ""
|
|
87
|
+
try:
|
|
88
|
+
return f"{float(value):.2f}"
|
|
89
|
+
except (TypeError, ValueError):
|
|
90
|
+
return ""
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _format_timestamp(value: Any) -> str:
|
|
94
|
+
if value is None:
|
|
95
|
+
return ""
|
|
96
|
+
try:
|
|
97
|
+
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(value)))
|
|
98
|
+
except (TypeError, ValueError, OSError):
|
|
99
|
+
return ""
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from typing import Any, Literal
|
|
5
|
+
from uuid import uuid4
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
StepType = Literal["llm", "retrieval", "tool", "custom"]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Step(BaseModel):
|
|
14
|
+
id: str = Field(default_factory=lambda: str(uuid4()))
|
|
15
|
+
trace_id: str | None = None
|
|
16
|
+
name: str
|
|
17
|
+
input: Any | None = None
|
|
18
|
+
output: Any | None = None
|
|
19
|
+
error: str | None = None
|
|
20
|
+
start_time: float | None = None
|
|
21
|
+
end_time: float | None = None
|
|
22
|
+
latency_ms: float | None = None
|
|
23
|
+
tokens_input: int | None = None
|
|
24
|
+
tokens_output: int | None = None
|
|
25
|
+
tokens_total: int | None = None
|
|
26
|
+
model: str | None = None
|
|
27
|
+
cost_usd: float | None = None
|
|
28
|
+
step_type: StepType | None = None
|
|
29
|
+
metadata: dict[str, Any] | None = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class Trace(BaseModel):
|
|
33
|
+
id: str = Field(default_factory=lambda: str(uuid4()))
|
|
34
|
+
name: str
|
|
35
|
+
input: Any | None = None
|
|
36
|
+
output: Any | None = None
|
|
37
|
+
error: str | None = None
|
|
38
|
+
start_time: float | None = None
|
|
39
|
+
end_time: float | None = None
|
|
40
|
+
latency_ms: float | None = None
|
|
41
|
+
parent_id: str | None = None
|
|
42
|
+
run_id: str | None = None
|
|
43
|
+
metadata: dict[str, Any] | None = None
|
|
44
|
+
steps: list[Step] = Field(default_factory=list)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class Run(BaseModel):
|
|
48
|
+
id: str = Field(default_factory=lambda: str(uuid4()))
|
|
49
|
+
name: str | None = None
|
|
50
|
+
tags: list[str] = Field(default_factory=list)
|
|
51
|
+
created_at: float = Field(default_factory=lambda: time.time())
|