peekr 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
peekr-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,221 @@
1
+ Metadata-Version: 2.4
2
+ Name: peekr
3
+ Version: 0.1.0
4
+ Summary: Zero-config observability for AI agents
5
+ License: MIT
6
+ Project-URL: Homepage, https://ashwanijha04.github.io/peekr
7
+ Project-URL: Repository, https://github.com/ashwanijha04/peekr
8
+ Requires-Python: >=3.9
9
+ Description-Content-Type: text/markdown
10
+ Provides-Extra: openai
11
+ Requires-Dist: openai>=1.0; extra == "openai"
12
+ Provides-Extra: anthropic
13
+ Requires-Dist: anthropic>=0.20; extra == "anthropic"
14
+ Provides-Extra: all
15
+ Requires-Dist: openai>=1.0; extra == "all"
16
+ Requires-Dist: anthropic>=0.20; extra == "all"
17
+ Provides-Extra: dev
18
+ Requires-Dist: pytest>=8.0; extra == "dev"
19
+
20
+ # peekr
21
+
22
+ [![PyPI](https://img.shields.io/pypi/v/peekr)](https://pypi.org/project/peekr/)
23
+ [![CI](https://github.com/ashwanijha04/peekr/actions/workflows/ci.yml/badge.svg)](https://github.com/ashwanijha04/peekr/actions/workflows/ci.yml)
24
+ [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE)
25
+ [![Python 3.9+](https://img.shields.io/badge/python-3.9+-blue.svg)](https://www.python.org/)
26
+
27
+ Zero-config observability for AI agents. Auto-instruments OpenAI and Anthropic SDKs — no code changes needed.
28
+
29
+ ```
30
+ pip install peekr
31
+ ```
32
+
33
+ ---
34
+
35
+ ## Quickstart
36
+
37
+ ```python
38
+ import peekr
39
+ peekr.instrument()
40
+
41
+ # Your existing agent code — zero changes
42
+ import openai
43
+ openai.chat.completions.create(model="gpt-4o", messages=[...])
44
+ ```
45
+
46
+ Every LLM call is automatically captured. View your traces:
47
+
48
+ ```bash
49
+ peekr view traces.jsonl
50
+ ```
51
+
52
+ ```
53
+ Trace a3f2b1c0 1243ms 891tok
54
+ ────────────────────────────────────────────────
55
+ agent.run 1243ms
56
+ └─ openai.chat.completions [gpt-4o] 821ms 312tok
57
+ └─ tool.search_web 12ms
58
+ └─ openai.chat.completions [gpt-4o] 410ms 579tok
59
+ ```
60
+
61
+ ---
62
+
63
+ ## Installation
64
+
65
+ ```bash
66
+ # Base (no LLM SDK required)
67
+ pip install peekr
68
+
69
+ # With OpenAI
70
+ pip install "peekr[openai]"
71
+
72
+ # With Anthropic
73
+ pip install "peekr[anthropic]"
74
+
75
+ # Both
76
+ pip install "peekr[all]"
77
+ ```
78
+
79
+ ---
80
+
81
+ ## Usage
82
+
83
+ ### Auto-instrument LLM SDKs
84
+
85
+ ```python
86
+ import peekr
87
+
88
+ peekr.instrument()
89
+ # That's it. All OpenAI and Anthropic calls are now traced.
90
+ ```
91
+
92
+ Options:
93
+
94
+ ```python
95
+ peekr.instrument(
96
+ console=True, # print spans as they happen (default: True)
97
+ jsonl_path="traces.jsonl", # write to file (default: traces.jsonl)
98
+ jsonl_path=None, # disable file output
99
+ )
100
+ ```
101
+
102
+ ### Trace your own functions
103
+
104
+ ```python
105
+ from peekr import trace
106
+
107
+ @trace
108
+ def search_web(query: str) -> list[str]:
109
+ ...
110
+
111
+ @trace(name="tool.calculator")
112
+ def calculate(expression: str) -> float:
113
+ ...
114
+
115
+ # Async works too
116
+ @trace
117
+ async def fetch_data(url: str) -> dict:
118
+ ...
119
+ ```
120
+
121
+ Decorated functions automatically become child spans of whatever called them.
122
+
123
+ ### Capture or hide inputs/outputs
124
+
125
+ ```python
126
+ @trace # captures args and return value by default
127
+ def search_web(query): ...
128
+
129
+ @trace(capture_io=False) # opt out for sensitive data
130
+ def get_api_key(): ...
131
+ ```
132
+
133
+ ### Manual spans
134
+
135
+ For cases where a decorator doesn't fit:
136
+
137
+ ```python
138
+ from peekr import start_span, end_span
139
+
140
+ span, token = start_span("my.operation")
141
+ span.attributes["custom_key"] = "custom_value"
142
+ try:
143
+ do_work()
144
+ span.status = "ok"
145
+ except Exception as e:
146
+ span.status = "error"
147
+ span.attributes["error"] = str(e)
148
+ raise
149
+ finally:
150
+ end_span(span, token)
151
+ ```
152
+
153
+ ### Viewing traces
154
+
155
+ ```bash
156
+ # Basic tree view
157
+ peekr view traces.jsonl
158
+
159
+ # Show inputs and outputs
160
+ peekr view --io traces.jsonl
161
+ ```
162
+
163
+ ---
164
+
165
+ ## What gets captured
166
+
167
+ | Field | Description |
168
+ |---|---|
169
+ | `name` | Span name (function name or custom) |
170
+ | `duration_ms` | Wall-clock time |
171
+ | `status` | `ok` or `error` |
172
+ | `model` | LLM model name (auto) |
173
+ | `tokens_input` | Prompt tokens (auto) |
174
+ | `tokens_output` | Completion tokens (auto) |
175
+ | `tokens_total` | Total tokens (auto) |
176
+ | `input` | Serialized function args (truncated) |
177
+ | `output` | Serialized return value (truncated) |
178
+ | `error` | Exception message if status is `error` |
179
+
180
+ ---
181
+
182
+ ## Custom exporters
183
+
184
+ ```python
185
+ from peekr.exporters import add_exporter
186
+
187
+ class MyExporter:
188
+ def export(self, span):
189
+ # send to your backend
190
+ requests.post("https://my-backend.com/spans", json=span.to_dict())
191
+
192
+ peekr.instrument()
193
+ add_exporter(MyExporter())
194
+ ```
195
+
196
+ ---
197
+
198
+ ## How it works
199
+
200
+ `instrument()` monkey-patches the OpenAI and Anthropic SDK methods before your code runs. Python looks up function references at call time, so every subsequent call hits the wrapper instead of the original — with zero changes to your code.
201
+
202
+ Span context (parent/child relationships) is tracked via Python's `contextvars.ContextVar`, which propagates correctly across `async/await` without any manual passing.
203
+
204
+ ---
205
+
206
+ ## Contributing
207
+
208
+ ```bash
209
+ git clone https://github.com/ashwanijha04/peekr
210
+ cd peekr
211
+ pip install -e ".[dev]"
212
+ pytest
213
+ ```
214
+
215
+ PRs welcome. Open an issue first for large changes.
216
+
217
+ ---
218
+
219
+ ## License
220
+
221
+ MIT
peekr-0.1.0/README.md ADDED
@@ -0,0 +1,202 @@
1
+ # peekr
2
+
3
+ [![PyPI](https://img.shields.io/pypi/v/peekr)](https://pypi.org/project/peekr/)
4
+ [![CI](https://github.com/ashwanijha04/peekr/actions/workflows/ci.yml/badge.svg)](https://github.com/ashwanijha04/peekr/actions/workflows/ci.yml)
5
+ [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE)
6
+ [![Python 3.9+](https://img.shields.io/badge/python-3.9+-blue.svg)](https://www.python.org/)
7
+
8
+ Zero-config observability for AI agents. Auto-instruments OpenAI and Anthropic SDKs — no code changes needed.
9
+
10
+ ```
11
+ pip install peekr
12
+ ```
13
+
14
+ ---
15
+
16
+ ## Quickstart
17
+
18
+ ```python
19
+ import peekr
20
+ peekr.instrument()
21
+
22
+ # Your existing agent code — zero changes
23
+ import openai
24
+ openai.chat.completions.create(model="gpt-4o", messages=[...])
25
+ ```
26
+
27
+ Every LLM call is automatically captured. View your traces:
28
+
29
+ ```bash
30
+ peekr view traces.jsonl
31
+ ```
32
+
33
+ ```
34
+ Trace a3f2b1c0 1243ms 891tok
35
+ ────────────────────────────────────────────────
36
+ agent.run 1243ms
37
+ └─ openai.chat.completions [gpt-4o] 821ms 312tok
38
+ └─ tool.search_web 12ms
39
+ └─ openai.chat.completions [gpt-4o] 410ms 579tok
40
+ ```
41
+
42
+ ---
43
+
44
+ ## Installation
45
+
46
+ ```bash
47
+ # Base (no LLM SDK required)
48
+ pip install peekr
49
+
50
+ # With OpenAI
51
+ pip install "peekr[openai]"
52
+
53
+ # With Anthropic
54
+ pip install "peekr[anthropic]"
55
+
56
+ # Both
57
+ pip install "peekr[all]"
58
+ ```
59
+
60
+ ---
61
+
62
+ ## Usage
63
+
64
+ ### Auto-instrument LLM SDKs
65
+
66
+ ```python
67
+ import peekr
68
+
69
+ peekr.instrument()
70
+ # That's it. All OpenAI and Anthropic calls are now traced.
71
+ ```
72
+
73
+ Options:
74
+
75
+ ```python
76
+ peekr.instrument(
77
+ console=True, # print spans as they happen (default: True)
78
+ jsonl_path="traces.jsonl", # write to file (default: traces.jsonl)
79
+ jsonl_path=None, # disable file output
80
+ )
81
+ ```
82
+
83
+ ### Trace your own functions
84
+
85
+ ```python
86
+ from peekr import trace
87
+
88
+ @trace
89
+ def search_web(query: str) -> list[str]:
90
+ ...
91
+
92
+ @trace(name="tool.calculator")
93
+ def calculate(expression: str) -> float:
94
+ ...
95
+
96
+ # Async works too
97
+ @trace
98
+ async def fetch_data(url: str) -> dict:
99
+ ...
100
+ ```
101
+
102
+ Decorated functions automatically become child spans of whatever called them.
103
+
104
+ ### Capture or hide inputs/outputs
105
+
106
+ ```python
107
+ @trace # captures args and return value by default
108
+ def search_web(query): ...
109
+
110
+ @trace(capture_io=False) # opt out for sensitive data
111
+ def get_api_key(): ...
112
+ ```
113
+
114
+ ### Manual spans
115
+
116
+ For cases where a decorator doesn't fit:
117
+
118
+ ```python
119
+ from peekr import start_span, end_span
120
+
121
+ span, token = start_span("my.operation")
122
+ span.attributes["custom_key"] = "custom_value"
123
+ try:
124
+ do_work()
125
+ span.status = "ok"
126
+ except Exception as e:
127
+ span.status = "error"
128
+ span.attributes["error"] = str(e)
129
+ raise
130
+ finally:
131
+ end_span(span, token)
132
+ ```
133
+
134
+ ### Viewing traces
135
+
136
+ ```bash
137
+ # Basic tree view
138
+ peekr view traces.jsonl
139
+
140
+ # Show inputs and outputs
141
+ peekr view --io traces.jsonl
142
+ ```
143
+
144
+ ---
145
+
146
+ ## What gets captured
147
+
148
+ | Field | Description |
149
+ |---|---|
150
+ | `name` | Span name (function name or custom) |
151
+ | `duration_ms` | Wall-clock time |
152
+ | `status` | `ok` or `error` |
153
+ | `model` | LLM model name (auto) |
154
+ | `tokens_input` | Prompt tokens (auto) |
155
+ | `tokens_output` | Completion tokens (auto) |
156
+ | `tokens_total` | Total tokens (auto) |
157
+ | `input` | Serialized function args (truncated) |
158
+ | `output` | Serialized return value (truncated) |
159
+ | `error` | Exception message if status is `error` |
160
+
161
+ ---
162
+
163
+ ## Custom exporters
164
+
165
+ ```python
166
+ from peekr.exporters import add_exporter
167
+
168
+ class MyExporter:
169
+ def export(self, span):
170
+ # send to your backend
171
+ requests.post("https://my-backend.com/spans", json=span.to_dict())
172
+
173
+ peekr.instrument()
174
+ add_exporter(MyExporter())
175
+ ```
176
+
177
+ ---
178
+
179
+ ## How it works
180
+
181
+ `instrument()` monkey-patches the OpenAI and Anthropic SDK methods before your code runs. Python looks up function references at call time, so every subsequent call hits the wrapper instead of the original — with zero changes to your code.
182
+
183
+ Span context (parent/child relationships) is tracked via Python's `contextvars.ContextVar`, which propagates correctly across `async/await` without any manual passing.
184
+
185
+ ---
186
+
187
+ ## Contributing
188
+
189
+ ```bash
190
+ git clone https://github.com/ashwanijha04/peekr
191
+ cd peekr
192
+ pip install -e ".[dev]"
193
+ pytest
194
+ ```
195
+
196
+ PRs welcome. Open an issue first for large changes.
197
+
198
+ ---
199
+
200
+ ## License
201
+
202
+ MIT
@@ -0,0 +1,26 @@
1
+ from __future__ import annotations
2
+ from .exporters import add_exporter, JSONLExporter, ConsoleExporter
3
+ from .context import start_span, end_span, get_current_span
4
+ from .decorators import trace
5
+ from .patches.openai_patch import patch_openai
6
+ from .patches.anthropic_patch import patch_anthropic
7
+
8
+
9
+ def instrument(exporter=None, console: bool = True, jsonl_path: str | None = "traces.jsonl"):
10
+ """
11
+ Auto-instrument OpenAI and Anthropic SDKs.
12
+ Call this once before any LLM calls.
13
+ """
14
+ if exporter:
15
+ add_exporter(exporter)
16
+ else:
17
+ if console:
18
+ add_exporter(ConsoleExporter())
19
+ if jsonl_path:
20
+ add_exporter(JSONLExporter(jsonl_path))
21
+
22
+ patch_openai()
23
+ patch_anthropic()
24
+
25
+
26
+ __all__ = ["instrument", "trace", "start_span", "end_span", "get_current_span", "JSONLExporter", "ConsoleExporter"]
@@ -0,0 +1,71 @@
1
+ from __future__ import annotations
2
+ import json
3
+ import sys
4
+ from collections import defaultdict
5
+
6
+
7
+ def main():
8
+ if len(sys.argv) < 2:
9
+ print("Usage: peekr view [--io] <traces.jsonl>")
10
+ sys.exit(1)
11
+
12
+ cmd = sys.argv[1]
13
+ if cmd == "view":
14
+ args = sys.argv[2:]
15
+ show_io = "--io" in args
16
+ args = [a for a in args if not a.startswith("--")]
17
+ path = args[0] if args else "traces.jsonl"
18
+ view_traces(path, show_io=show_io)
19
+ else:
20
+ print(f"Unknown command: {cmd}")
21
+ sys.exit(1)
22
+
23
+
24
+ def view_traces(path: str, show_io: bool = False):
25
+ try:
26
+ with open(path) as f:
27
+ spans = [json.loads(line) for line in f if line.strip()]
28
+ except FileNotFoundError:
29
+ print(f"No traces file at {path}")
30
+ return
31
+
32
+ traces = defaultdict(list)
33
+ for span in spans:
34
+ traces[span["trace_id"]].append(span)
35
+
36
+ for i, (trace_id, trace_spans) in enumerate(traces.items()):
37
+ if i > 0:
38
+ print()
39
+ total_ms = sum(s.get("duration_ms") or 0 for s in trace_spans if s["parent_id"] is None)
40
+ total_tokens = sum(s.get("attributes", {}).get("tokens_total", 0) for s in trace_spans)
41
+ token_str = f" {total_tokens} tokens" if total_tokens else ""
42
+ print(f"Trace {trace_id[:8]} {total_ms:.0f}ms{token_str}")
43
+ print("─" * 48)
44
+ roots = [s for s in trace_spans if s["parent_id"] is None]
45
+ for root in roots:
46
+ _print_span(root, trace_spans, indent=0, show_io=show_io)
47
+
48
+
49
+ def _print_span(span, all_spans, indent, show_io):
50
+ duration = f"{span['duration_ms']:.0f}ms" if span.get("duration_ms") else " ?"
51
+ attrs = span.get("attributes", {})
52
+ model = f" [{attrs['model']}]" if "model" in attrs else ""
53
+ tokens = f" {attrs['tokens_total']}tok" if "tokens_total" in attrs else ""
54
+ error = " \033[31mERROR\033[0m" if span["status"] == "error" else ""
55
+
56
+ connector = "└─ " if indent > 0 else ""
57
+ prefix = " " * indent + connector
58
+ print(f"{prefix}\033[1m{span['name']}\033[0m{model} {duration}{tokens}{error}")
59
+
60
+ if show_io:
61
+ io_prefix = " " * (indent + 1)
62
+ if "input" in attrs:
63
+ print(f"{io_prefix}\033[2min: {attrs['input'][:120]}\033[0m")
64
+ if "output" in attrs:
65
+ print(f"{io_prefix}\033[2mout: {attrs['output'][:120]}\033[0m")
66
+ if "error" in attrs and span["status"] == "error":
67
+ print(f"{io_prefix}\033[31merr: {attrs['error']}\033[0m")
68
+
69
+ children = [s for s in all_spans if s.get("parent_id") == span["span_id"]]
70
+ for child in children:
71
+ _print_span(child, all_spans, indent + 1, show_io)
@@ -0,0 +1,38 @@
1
+ from __future__ import annotations
2
+ import uuid
3
+ from contextvars import ContextVar
4
+ from typing import Optional
5
+
6
+ from .span import Span
7
+
8
+ _current_span: ContextVar[Optional[Span]] = ContextVar("current_span", default=None)
9
+ _current_trace_id: ContextVar[Optional[str]] = ContextVar("current_trace_id", default=None)
10
+
11
+
12
+ def get_current_span() -> Optional[Span]:
13
+ return _current_span.get()
14
+
15
+
16
+ def get_or_create_trace_id() -> str:
17
+ trace_id = _current_trace_id.get()
18
+ if trace_id is None:
19
+ trace_id = uuid.uuid4().hex
20
+ _current_trace_id.set(trace_id)
21
+ return trace_id
22
+
23
+
24
+ def start_span(name: str) -> tuple[Span, object, object]:
25
+ trace_id = get_or_create_trace_id()
26
+ parent = get_current_span()
27
+ span = Span(
28
+ name=name,
29
+ trace_id=trace_id,
30
+ parent_id=parent.span_id if parent else None,
31
+ )
32
+ span_token = _current_span.set(span)
33
+ return span, span_token
34
+
35
+
36
+ def end_span(span: Span, span_token: object) -> None:
37
+ span.finish()
38
+ _current_span.reset(span_token)
@@ -0,0 +1,81 @@
1
+ from __future__ import annotations
2
+ import asyncio
3
+ import functools
4
+ import json
5
+ from typing import Callable
6
+
7
+ from .context import start_span, end_span
8
+ from .exporters import export_span
9
+
10
+ _TRUNCATE = 500
11
+
12
+
13
+ def _serialize(value) -> str:
14
+ try:
15
+ s = json.dumps(value, default=str)
16
+ except Exception:
17
+ s = str(value)
18
+ return s if len(s) <= _TRUNCATE else s[:_TRUNCATE] + "…"
19
+
20
+
21
+ def trace(_func=None, *, name: str | None = None, capture_io: bool = True):
22
+ """
23
+ Decorator to trace any function as a span.
24
+
25
+ Usage:
26
+ @trace
27
+ def my_tool(query): ...
28
+
29
+ @trace(name="tool.search")
30
+ def search(query): ...
31
+
32
+ @trace(name="tool.search", capture_io=False)
33
+ def search(query): ...
34
+ """
35
+ def decorator(func: Callable) -> Callable:
36
+ span_name = name or f"{func.__module__}.{func.__qualname__}"
37
+
38
+ if asyncio.iscoroutinefunction(func):
39
+ @functools.wraps(func)
40
+ async def async_wrapper(*args, **kwargs):
41
+ span, token = start_span(span_name)
42
+ if capture_io:
43
+ span.attributes["input"] = _serialize({"args": args, "kwargs": kwargs})
44
+ try:
45
+ result = await func(*args, **kwargs)
46
+ span.status = "ok"
47
+ if capture_io:
48
+ span.attributes["output"] = _serialize(result)
49
+ return result
50
+ except Exception as e:
51
+ span.status = "error"
52
+ span.attributes["error"] = str(e)
53
+ raise
54
+ finally:
55
+ end_span(span, token)
56
+ export_span(span)
57
+ return async_wrapper
58
+ else:
59
+ @functools.wraps(func)
60
+ def sync_wrapper(*args, **kwargs):
61
+ span, token = start_span(span_name)
62
+ if capture_io:
63
+ span.attributes["input"] = _serialize({"args": args, "kwargs": kwargs})
64
+ try:
65
+ result = func(*args, **kwargs)
66
+ span.status = "ok"
67
+ if capture_io:
68
+ span.attributes["output"] = _serialize(result)
69
+ return result
70
+ except Exception as e:
71
+ span.status = "error"
72
+ span.attributes["error"] = str(e)
73
+ raise
74
+ finally:
75
+ end_span(span, token)
76
+ export_span(span)
77
+ return sync_wrapper
78
+
79
+ if _func is not None:
80
+ return decorator(_func)
81
+ return decorator
@@ -0,0 +1,36 @@
1
+ import json
2
+ import os
3
+ from .span import Span
4
+
5
+
6
+ class JSONLExporter:
7
+ def __init__(self, path: str = "traces.jsonl"):
8
+ self.path = path
9
+
10
+ def export(self, span: Span) -> None:
11
+ with open(self.path, "a") as f:
12
+ f.write(json.dumps(span.to_dict()) + "\n")
13
+
14
+
15
+ class ConsoleExporter:
16
+ def export(self, span: Span) -> None:
17
+ duration = f"{span.duration_ms:.1f}ms" if span.duration_ms else "?"
18
+ indent = " " if span.parent_id else ""
19
+ attrs = ""
20
+ if "model" in span.attributes:
21
+ attrs += f" model={span.attributes['model']}"
22
+ if "tokens_total" in span.attributes:
23
+ attrs += f" tokens={span.attributes['tokens_total']}"
24
+ print(f"{indent}[{span.name}] {duration}{attrs}")
25
+
26
+
27
+ _exporters: list = []
28
+
29
+
30
+ def add_exporter(exporter) -> None:
31
+ _exporters.append(exporter)
32
+
33
+
34
+ def export_span(span: Span) -> None:
35
+ for exporter in _exporters:
36
+ exporter.export(span)
File without changes