rewind-agent 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# Build
|
|
2
|
+
/target/
|
|
3
|
+
*.swp
|
|
4
|
+
*.swo
|
|
5
|
+
|
|
6
|
+
# macOS
|
|
7
|
+
.DS_Store
|
|
8
|
+
|
|
9
|
+
# IDE
|
|
10
|
+
.cursor/
|
|
11
|
+
.idea/
|
|
12
|
+
.vscode/
|
|
13
|
+
|
|
14
|
+
# Rewind data (user's local recordings)
|
|
15
|
+
.rewind/
|
|
16
|
+
|
|
17
|
+
# Python
|
|
18
|
+
__pycache__/
|
|
19
|
+
*.pyc
|
|
20
|
+
*.egg-info/
|
|
21
|
+
dist/
|
|
22
|
+
build/
|
|
23
|
+
|
|
24
|
+
# Env
|
|
25
|
+
.env
|
|
26
|
+
.env.local
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: rewind-agent
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Chrome DevTools for AI agents — record, inspect, fork, replay, diff.
|
|
5
|
+
Project-URL: Homepage, https://github.com/risjai/rewind
|
|
6
|
+
Project-URL: Repository, https://github.com/risjai/rewind
|
|
7
|
+
Project-URL: Issues, https://github.com/risjai/rewind/issues
|
|
8
|
+
Project-URL: Changelog, https://github.com/risjai/rewind/blob/main/CHANGELOG.md
|
|
9
|
+
Author: Rewind Contributors
|
|
10
|
+
License-Expression: MIT
|
|
11
|
+
Keywords: agents,ai,anthropic,crewai,debugging,langgraph,llm,observability,openai,time-travel
|
|
12
|
+
Classifier: Development Status :: 3 - Alpha
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
21
|
+
Classifier: Topic :: Software Development :: Debuggers
|
|
22
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
+
Requires-Python: >=3.9
|
|
24
|
+
Provides-Extra: all
|
|
25
|
+
Requires-Dist: anthropic>=0.18; extra == 'all'
|
|
26
|
+
Requires-Dist: openai>=1.0; extra == 'all'
|
|
27
|
+
Provides-Extra: anthropic
|
|
28
|
+
Requires-Dist: anthropic>=0.18; extra == 'anthropic'
|
|
29
|
+
Provides-Extra: openai
|
|
30
|
+
Requires-Dist: openai>=1.0; extra == 'openai'
|
|
31
|
+
Description-Content-Type: text/markdown
|
|
32
|
+
|
|
33
|
+
# rewind-agent
|
|
34
|
+
|
|
35
|
+
**Python SDK for [Rewind](https://github.com/risjai/rewind) — the time-travel debugger for AI agents.**
|
|
36
|
+
|
|
37
|
+
Record every LLM call. See the exact context window. Fork, fix, replay — without re-running.
|
|
38
|
+
|
|
39
|
+
## Install
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
pip install rewind-agent
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
Requires the Rewind CLI for recording. Install with:
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
curl -fsSL https://raw.githubusercontent.com/risjai/rewind/main/install.sh | sh
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Quick Start
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
import rewind_agent
|
|
55
|
+
|
|
56
|
+
# Auto-patches OpenAI/Anthropic clients to route through the Rewind proxy
|
|
57
|
+
rewind_agent.init()
|
|
58
|
+
|
|
59
|
+
# Your existing agent code runs unchanged — all LLM calls are recorded
|
|
60
|
+
client = openai.OpenAI()
|
|
61
|
+
client.chat.completions.create(model="gpt-4o", messages=[...])
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
## Agent Hooks
|
|
65
|
+
|
|
66
|
+
Enrich recordings with semantic labels:
|
|
67
|
+
|
|
68
|
+
```python
|
|
69
|
+
@rewind_agent.step("search")
|
|
70
|
+
def search(query: str) -> str:
|
|
71
|
+
return client.chat.completions.create(...)
|
|
72
|
+
|
|
73
|
+
@rewind_agent.tool("calculator")
|
|
74
|
+
def calculate(expr: str) -> float:
|
|
75
|
+
return eval(expr)
|
|
76
|
+
|
|
77
|
+
with rewind_agent.trace("analysis"):
|
|
78
|
+
rewind_agent.annotate("confidence", 0.92)
|
|
79
|
+
result = search("Tokyo population")
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
## Framework Adapters
|
|
83
|
+
|
|
84
|
+
```python
|
|
85
|
+
# LangGraph
|
|
86
|
+
graph = rewind_agent.wrap_langgraph(compiled_graph)
|
|
87
|
+
|
|
88
|
+
# CrewAI
|
|
89
|
+
crew = rewind_agent.wrap_crew(crew)
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
## Learn More
|
|
93
|
+
|
|
94
|
+
- [GitHub](https://github.com/risjai/rewind)
|
|
95
|
+
- [Changelog](https://github.com/risjai/rewind/blob/main/CHANGELOG.md)
|
|
96
|
+
- [Examples](https://github.com/risjai/rewind/tree/main/examples)
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
# rewind-agent
|
|
2
|
+
|
|
3
|
+
**Python SDK for [Rewind](https://github.com/risjai/rewind) — the time-travel debugger for AI agents.**
|
|
4
|
+
|
|
5
|
+
Record every LLM call. See the exact context window. Fork, fix, replay — without re-running.
|
|
6
|
+
|
|
7
|
+
## Install
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
pip install rewind-agent
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
Requires the Rewind CLI for recording. Install with:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
curl -fsSL https://raw.githubusercontent.com/risjai/rewind/main/install.sh | sh
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## Quick Start
|
|
20
|
+
|
|
21
|
+
```python
|
|
22
|
+
import rewind_agent
|
|
23
|
+
|
|
24
|
+
# Auto-patches OpenAI/Anthropic clients to route through the Rewind proxy
|
|
25
|
+
rewind_agent.init()
|
|
26
|
+
|
|
27
|
+
# Your existing agent code runs unchanged — all LLM calls are recorded
|
|
28
|
+
client = openai.OpenAI()
|
|
29
|
+
client.chat.completions.create(model="gpt-4o", messages=[...])
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## Agent Hooks
|
|
33
|
+
|
|
34
|
+
Enrich recordings with semantic labels:
|
|
35
|
+
|
|
36
|
+
```python
|
|
37
|
+
@rewind_agent.step("search")
|
|
38
|
+
def search(query: str) -> str:
|
|
39
|
+
return client.chat.completions.create(...)
|
|
40
|
+
|
|
41
|
+
@rewind_agent.tool("calculator")
|
|
42
|
+
def calculate(expr: str) -> float:
|
|
43
|
+
return eval(expr)
|
|
44
|
+
|
|
45
|
+
with rewind_agent.trace("analysis"):
|
|
46
|
+
rewind_agent.annotate("confidence", 0.92)
|
|
47
|
+
result = search("Tokyo population")
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## Framework Adapters
|
|
51
|
+
|
|
52
|
+
```python
|
|
53
|
+
# LangGraph
|
|
54
|
+
graph = rewind_agent.wrap_langgraph(compiled_graph)
|
|
55
|
+
|
|
56
|
+
# CrewAI
|
|
57
|
+
crew = rewind_agent.wrap_crew(crew)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Learn More
|
|
61
|
+
|
|
62
|
+
- [GitHub](https://github.com/risjai/rewind)
|
|
63
|
+
- [Changelog](https://github.com/risjai/rewind/blob/main/CHANGELOG.md)
|
|
64
|
+
- [Examples](https://github.com/risjai/rewind/tree/main/examples)
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "rewind-agent"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "Chrome DevTools for AI agents — record, inspect, fork, replay, diff."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
license = "MIT"
|
|
11
|
+
requires-python = ">=3.9"
|
|
12
|
+
authors = [
|
|
13
|
+
{name = "Rewind Contributors"},
|
|
14
|
+
]
|
|
15
|
+
keywords = [
|
|
16
|
+
"ai",
|
|
17
|
+
"agents",
|
|
18
|
+
"debugging",
|
|
19
|
+
"observability",
|
|
20
|
+
"time-travel",
|
|
21
|
+
"llm",
|
|
22
|
+
"openai",
|
|
23
|
+
"anthropic",
|
|
24
|
+
"langgraph",
|
|
25
|
+
"crewai",
|
|
26
|
+
]
|
|
27
|
+
classifiers = [
|
|
28
|
+
"Development Status :: 3 - Alpha",
|
|
29
|
+
"Intended Audience :: Developers",
|
|
30
|
+
"License :: OSI Approved :: MIT License",
|
|
31
|
+
"Programming Language :: Python :: 3",
|
|
32
|
+
"Programming Language :: Python :: 3.9",
|
|
33
|
+
"Programming Language :: Python :: 3.10",
|
|
34
|
+
"Programming Language :: Python :: 3.11",
|
|
35
|
+
"Programming Language :: Python :: 3.12",
|
|
36
|
+
"Programming Language :: Python :: 3.13",
|
|
37
|
+
"Topic :: Software Development :: Debuggers",
|
|
38
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
39
|
+
]
|
|
40
|
+
dependencies = []
|
|
41
|
+
|
|
42
|
+
[project.optional-dependencies]
|
|
43
|
+
openai = ["openai>=1.0"]
|
|
44
|
+
anthropic = ["anthropic>=0.18"]
|
|
45
|
+
all = ["openai>=1.0", "anthropic>=0.18"]
|
|
46
|
+
|
|
47
|
+
[project.urls]
|
|
48
|
+
Homepage = "https://github.com/risjai/rewind"
|
|
49
|
+
Repository = "https://github.com/risjai/rewind"
|
|
50
|
+
Issues = "https://github.com/risjai/rewind/issues"
|
|
51
|
+
Changelog = "https://github.com/risjai/rewind/blob/main/CHANGELOG.md"
|
|
52
|
+
|
|
53
|
+
[tool.hatch.build.targets.wheel]
|
|
54
|
+
packages = ["rewind_agent"]
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Rewind Agent SDK — Chrome DevTools for AI agents.
|
|
3
|
+
|
|
4
|
+
Usage:
|
|
5
|
+
import rewind_agent
|
|
6
|
+
|
|
7
|
+
# Auto-patch OpenAI/Anthropic to route through the proxy
|
|
8
|
+
rewind_agent.init()
|
|
9
|
+
|
|
10
|
+
# Decorate agent steps for richer traces
|
|
11
|
+
@rewind_agent.step("search")
|
|
12
|
+
def search(query):
|
|
13
|
+
return client.chat.completions.create(...)
|
|
14
|
+
|
|
15
|
+
# Wrap LangGraph / CrewAI for automatic instrumentation
|
|
16
|
+
graph = rewind_agent.wrap_langgraph(graph)
|
|
17
|
+
crew = rewind_agent.wrap_crew(crew)
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from .patch import init, uninit, session
|
|
21
|
+
from .hooks import (
|
|
22
|
+
step,
|
|
23
|
+
node,
|
|
24
|
+
tool,
|
|
25
|
+
trace,
|
|
26
|
+
annotate,
|
|
27
|
+
get_annotations,
|
|
28
|
+
wrap_langgraph,
|
|
29
|
+
wrap_crew,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
__all__ = [
|
|
33
|
+
"init",
|
|
34
|
+
"uninit",
|
|
35
|
+
"session",
|
|
36
|
+
"step",
|
|
37
|
+
"node",
|
|
38
|
+
"tool",
|
|
39
|
+
"trace",
|
|
40
|
+
"annotate",
|
|
41
|
+
"get_annotations",
|
|
42
|
+
"wrap_langgraph",
|
|
43
|
+
"wrap_crew",
|
|
44
|
+
]
|
|
45
|
+
__version__ = "0.1.0"
|
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Rewind Hooks — Framework-agnostic decorators for enriching agent recordings.
|
|
3
|
+
|
|
4
|
+
These decorators work with ANY Python agent framework. They annotate
|
|
5
|
+
the LLM calls captured by the Rewind proxy with semantic metadata:
|
|
6
|
+
node names, agent state, step descriptions.
|
|
7
|
+
|
|
8
|
+
Works with LangGraph, CrewAI, OpenAI Agents SDK, or plain functions.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import functools
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import time
|
|
15
|
+
import urllib.request
|
|
16
|
+
from contextlib import contextmanager
|
|
17
|
+
from typing import Any, Callable
|
|
18
|
+
|
|
19
|
+
REWIND_PROXY = os.environ.get("REWIND_PROXY", "http://127.0.0.1:8443")
|
|
20
|
+
|
|
21
|
+
_step_counter = 0
|
|
22
|
+
_annotations: list[dict] = []
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def step(name: str | None = None, metadata: dict | None = None):
|
|
26
|
+
"""
|
|
27
|
+
Decorator that records a function as a named agent step.
|
|
28
|
+
|
|
29
|
+
Usage:
|
|
30
|
+
@rewind.step("search")
|
|
31
|
+
def search_web(query: str) -> str:
|
|
32
|
+
return client.chat.completions.create(...)
|
|
33
|
+
|
|
34
|
+
@rewind.step("plan", metadata={"agent": "planner"})
|
|
35
|
+
def plan_task(goal: str) -> dict:
|
|
36
|
+
...
|
|
37
|
+
"""
|
|
38
|
+
def decorator(func: Callable) -> Callable:
|
|
39
|
+
step_name = name or func.__name__
|
|
40
|
+
|
|
41
|
+
@functools.wraps(func)
|
|
42
|
+
def wrapper(*args, **kwargs):
|
|
43
|
+
global _step_counter
|
|
44
|
+
_step_counter += 1
|
|
45
|
+
step_id = _step_counter
|
|
46
|
+
|
|
47
|
+
start = time.perf_counter()
|
|
48
|
+
error = None
|
|
49
|
+
result = None
|
|
50
|
+
|
|
51
|
+
_annotate("step_start", {
|
|
52
|
+
"step_id": step_id,
|
|
53
|
+
"step_name": step_name,
|
|
54
|
+
"metadata": metadata or {},
|
|
55
|
+
"args_preview": _safe_preview(args, kwargs),
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
result = func(*args, **kwargs)
|
|
60
|
+
return result
|
|
61
|
+
except Exception as e:
|
|
62
|
+
error = str(e)
|
|
63
|
+
raise
|
|
64
|
+
finally:
|
|
65
|
+
elapsed_ms = (time.perf_counter() - start) * 1000
|
|
66
|
+
_annotate("step_end", {
|
|
67
|
+
"step_id": step_id,
|
|
68
|
+
"step_name": step_name,
|
|
69
|
+
"duration_ms": round(elapsed_ms, 2),
|
|
70
|
+
"error": error,
|
|
71
|
+
"result_preview": _safe_str(result)[:200] if result else None,
|
|
72
|
+
})
|
|
73
|
+
|
|
74
|
+
return wrapper
|
|
75
|
+
return decorator
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def node(name: str):
|
|
79
|
+
"""
|
|
80
|
+
Decorator for LangGraph-style graph nodes.
|
|
81
|
+
|
|
82
|
+
Usage:
|
|
83
|
+
@rewind.node("researcher")
|
|
84
|
+
def researcher(state: dict) -> dict:
|
|
85
|
+
...
|
|
86
|
+
"""
|
|
87
|
+
return step(name=name, metadata={"type": "graph_node"})
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def tool(name: str | None = None):
|
|
91
|
+
"""
|
|
92
|
+
Decorator for tool/function calls.
|
|
93
|
+
|
|
94
|
+
Usage:
|
|
95
|
+
@rewind.tool("web_search")
|
|
96
|
+
def search(query: str) -> str:
|
|
97
|
+
...
|
|
98
|
+
"""
|
|
99
|
+
def decorator(func: Callable) -> Callable:
|
|
100
|
+
tool_name = name or func.__name__
|
|
101
|
+
return step(name=tool_name, metadata={"type": "tool"})(func)
|
|
102
|
+
return decorator
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
@contextmanager
|
|
106
|
+
def trace(name: str, metadata: dict | None = None):
|
|
107
|
+
"""
|
|
108
|
+
Context manager for tracing a block of agent execution.
|
|
109
|
+
|
|
110
|
+
Usage:
|
|
111
|
+
with rewind.trace("research_phase"):
|
|
112
|
+
result1 = search(query)
|
|
113
|
+
result2 = analyze(result1)
|
|
114
|
+
"""
|
|
115
|
+
global _step_counter
|
|
116
|
+
_step_counter += 1
|
|
117
|
+
step_id = _step_counter
|
|
118
|
+
start = time.perf_counter()
|
|
119
|
+
|
|
120
|
+
_annotate("trace_start", {
|
|
121
|
+
"step_id": step_id,
|
|
122
|
+
"trace_name": name,
|
|
123
|
+
"metadata": metadata or {},
|
|
124
|
+
})
|
|
125
|
+
|
|
126
|
+
error = None
|
|
127
|
+
try:
|
|
128
|
+
yield
|
|
129
|
+
except Exception as e:
|
|
130
|
+
error = str(e)
|
|
131
|
+
raise
|
|
132
|
+
finally:
|
|
133
|
+
elapsed_ms = (time.perf_counter() - start) * 1000
|
|
134
|
+
_annotate("trace_end", {
|
|
135
|
+
"step_id": step_id,
|
|
136
|
+
"trace_name": name,
|
|
137
|
+
"duration_ms": round(elapsed_ms, 2),
|
|
138
|
+
"error": error,
|
|
139
|
+
})
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def annotate(key: str, value: Any):
|
|
143
|
+
"""
|
|
144
|
+
Add a custom annotation to the current recording.
|
|
145
|
+
|
|
146
|
+
Usage:
|
|
147
|
+
rewind.annotate("confidence", 0.85)
|
|
148
|
+
rewind.annotate("decision", "retry with different prompt")
|
|
149
|
+
"""
|
|
150
|
+
_annotate("custom", {"key": key, "value": _safe_str(value)})
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def get_annotations() -> list[dict]:
|
|
154
|
+
"""Return all annotations recorded in this session."""
|
|
155
|
+
return list(_annotations)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
# ── LangGraph adapter ──────────────────────────────────────────
|
|
159
|
+
|
|
160
|
+
def wrap_langgraph(graph, recorder_name: str = "langgraph"):
|
|
161
|
+
"""
|
|
162
|
+
Wrap a LangGraph compiled graph to record all node executions.
|
|
163
|
+
|
|
164
|
+
Usage:
|
|
165
|
+
from langgraph.graph import StateGraph
|
|
166
|
+
graph = builder.compile()
|
|
167
|
+
graph = rewind.wrap_langgraph(graph)
|
|
168
|
+
result = graph.invoke({"input": "..."})
|
|
169
|
+
|
|
170
|
+
This wraps each node's function with @rewind.node() automatically.
|
|
171
|
+
"""
|
|
172
|
+
try:
|
|
173
|
+
nodes = graph.nodes
|
|
174
|
+
except AttributeError:
|
|
175
|
+
return graph # not a LangGraph, return unchanged
|
|
176
|
+
|
|
177
|
+
for node_name, node_fn in list(nodes.items()):
|
|
178
|
+
if node_name in ("__start__", "__end__"):
|
|
179
|
+
continue
|
|
180
|
+
wrapped = step(name=f"{recorder_name}/{node_name}", metadata={"type": "graph_node", "graph": recorder_name})(node_fn)
|
|
181
|
+
nodes[node_name] = wrapped
|
|
182
|
+
|
|
183
|
+
return graph
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
# ── CrewAI adapter ─────────────────────────────────────────────
|
|
187
|
+
|
|
188
|
+
def wrap_crew(crew, recorder_name: str = "crewai"):
|
|
189
|
+
"""
|
|
190
|
+
Instrument a CrewAI Crew to record task and step execution.
|
|
191
|
+
|
|
192
|
+
Usage:
|
|
193
|
+
from crewai import Crew
|
|
194
|
+
crew = Crew(agents=[...], tasks=[...])
|
|
195
|
+
crew = rewind.wrap_crew(crew)
|
|
196
|
+
result = crew.kickoff()
|
|
197
|
+
|
|
198
|
+
Hooks into step_callback and task_callback if available.
|
|
199
|
+
"""
|
|
200
|
+
# Install step callback
|
|
201
|
+
original_step_cb = getattr(crew, 'step_callback', None)
|
|
202
|
+
|
|
203
|
+
def _step_callback(output):
|
|
204
|
+
_annotate("crew_step", {
|
|
205
|
+
"recorder": recorder_name,
|
|
206
|
+
"output_preview": _safe_str(output)[:300],
|
|
207
|
+
})
|
|
208
|
+
if original_step_cb:
|
|
209
|
+
original_step_cb(output)
|
|
210
|
+
|
|
211
|
+
# Install task callback
|
|
212
|
+
original_task_cb = getattr(crew, 'task_callback', None)
|
|
213
|
+
|
|
214
|
+
def _task_callback(output):
|
|
215
|
+
_annotate("crew_task_complete", {
|
|
216
|
+
"recorder": recorder_name,
|
|
217
|
+
"output_preview": _safe_str(output)[:300],
|
|
218
|
+
})
|
|
219
|
+
if original_task_cb:
|
|
220
|
+
original_task_cb(output)
|
|
221
|
+
|
|
222
|
+
try:
|
|
223
|
+
crew.step_callback = _step_callback
|
|
224
|
+
crew.task_callback = _task_callback
|
|
225
|
+
except (AttributeError, TypeError):
|
|
226
|
+
pass # older CrewAI versions may not support this
|
|
227
|
+
|
|
228
|
+
return crew
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
# ── Internal ───────────────────────────────────────────────────
|
|
232
|
+
|
|
233
|
+
def _annotate(event_type: str, data: dict):
|
|
234
|
+
"""Record an annotation locally and optionally POST to the proxy."""
|
|
235
|
+
entry = {
|
|
236
|
+
"type": event_type,
|
|
237
|
+
"timestamp": time.time(),
|
|
238
|
+
"data": data,
|
|
239
|
+
}
|
|
240
|
+
_annotations.append(entry)
|
|
241
|
+
|
|
242
|
+
# Best-effort POST to proxy side-channel (non-blocking)
|
|
243
|
+
try:
|
|
244
|
+
payload = json.dumps(entry).encode()
|
|
245
|
+
req = urllib.request.Request(
|
|
246
|
+
f"{REWIND_PROXY}/_rewind/annotate",
|
|
247
|
+
data=payload,
|
|
248
|
+
headers={"Content-Type": "application/json"},
|
|
249
|
+
method="POST",
|
|
250
|
+
)
|
|
251
|
+
urllib.request.urlopen(req, timeout=0.5)
|
|
252
|
+
except Exception:
|
|
253
|
+
pass # proxy may not be running, that's fine
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def _safe_preview(args, kwargs) -> str:
|
|
257
|
+
"""Safely preview function arguments."""
|
|
258
|
+
parts = []
|
|
259
|
+
for a in args[:3]:
|
|
260
|
+
parts.append(_safe_str(a)[:100])
|
|
261
|
+
for k, v in list(kwargs.items())[:3]:
|
|
262
|
+
parts.append(f"{k}={_safe_str(v)[:100]}")
|
|
263
|
+
return ", ".join(parts)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def _safe_str(obj) -> str:
|
|
267
|
+
"""Safely convert any object to string."""
|
|
268
|
+
try:
|
|
269
|
+
if isinstance(obj, (dict, list)):
|
|
270
|
+
return json.dumps(obj, default=str)[:500]
|
|
271
|
+
return str(obj)[:500]
|
|
272
|
+
except Exception:
|
|
273
|
+
return "<unserializable>"
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Monkey-patching layer for OpenAI clients.
|
|
3
|
+
|
|
4
|
+
When `init()` is called, it patches the OpenAI client to route
|
|
5
|
+
all API calls through the local Rewind proxy, which records them.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
import contextlib
|
|
10
|
+
from functools import wraps
|
|
11
|
+
|
|
12
|
+
_original_base_url = None
|
|
13
|
+
_initialized = False
|
|
14
|
+
|
|
15
|
+
REWIND_PROXY_URL = "http://127.0.0.1:8443"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def init(proxy_url: str | None = None, auto_patch: bool = True):
|
|
19
|
+
"""
|
|
20
|
+
Initialize Rewind recording.
|
|
21
|
+
|
|
22
|
+
This patches the OPENAI_BASE_URL environment variable so that
|
|
23
|
+
all OpenAI client instances route through the Rewind proxy.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
proxy_url: Override the default proxy URL (http://127.0.0.1:8443)
|
|
27
|
+
auto_patch: If True, also monkey-patch already-imported OpenAI clients
|
|
28
|
+
"""
|
|
29
|
+
global _original_base_url, _initialized
|
|
30
|
+
|
|
31
|
+
if _initialized:
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
url = proxy_url or REWIND_PROXY_URL
|
|
35
|
+
|
|
36
|
+
# Save original
|
|
37
|
+
_original_base_url = os.environ.get("OPENAI_BASE_URL")
|
|
38
|
+
|
|
39
|
+
# Set proxy URL — new OpenAI() clients will pick this up automatically
|
|
40
|
+
os.environ["OPENAI_BASE_URL"] = f"{url}/v1"
|
|
41
|
+
|
|
42
|
+
# Also patch Anthropic if available
|
|
43
|
+
os.environ["ANTHROPIC_BASE_URL"] = f"{url}/anthropic"
|
|
44
|
+
|
|
45
|
+
_initialized = True
|
|
46
|
+
|
|
47
|
+
if auto_patch:
|
|
48
|
+
_patch_existing_clients(url)
|
|
49
|
+
|
|
50
|
+
_print_banner(url)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def uninit():
|
|
54
|
+
"""Restore original base URLs and remove patches."""
|
|
55
|
+
global _original_base_url, _initialized
|
|
56
|
+
|
|
57
|
+
if not _initialized:
|
|
58
|
+
return
|
|
59
|
+
|
|
60
|
+
if _original_base_url is not None:
|
|
61
|
+
os.environ["OPENAI_BASE_URL"] = _original_base_url
|
|
62
|
+
else:
|
|
63
|
+
os.environ.pop("OPENAI_BASE_URL", None)
|
|
64
|
+
|
|
65
|
+
os.environ.pop("ANTHROPIC_BASE_URL", None)
|
|
66
|
+
_initialized = False
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@contextlib.contextmanager
|
|
70
|
+
def session(name: str = "default", proxy_url: str | None = None):
|
|
71
|
+
"""
|
|
72
|
+
Context manager for a Rewind recording session.
|
|
73
|
+
|
|
74
|
+
Usage:
|
|
75
|
+
with rewind_agent.session("my-agent"):
|
|
76
|
+
client = openai.OpenAI()
|
|
77
|
+
client.chat.completions.create(...)
|
|
78
|
+
"""
|
|
79
|
+
# TODO: Start a named session via the proxy API
|
|
80
|
+
init(proxy_url=proxy_url)
|
|
81
|
+
try:
|
|
82
|
+
yield
|
|
83
|
+
finally:
|
|
84
|
+
uninit()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _patch_existing_clients(proxy_url: str):
|
|
88
|
+
"""Patch already-instantiated OpenAI clients if the module is loaded."""
|
|
89
|
+
try:
|
|
90
|
+
import openai
|
|
91
|
+
# Patch the module-level default client if it exists
|
|
92
|
+
if hasattr(openai, '_client'):
|
|
93
|
+
openai._client.base_url = f"{proxy_url}/v1"
|
|
94
|
+
except ImportError:
|
|
95
|
+
pass
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _print_banner(proxy_url: str):
|
|
99
|
+
"""Print a nice startup banner."""
|
|
100
|
+
print()
|
|
101
|
+
print(" \033[36m\033[1m⏪ Rewind\033[0m — Recording active")
|
|
102
|
+
print()
|
|
103
|
+
print(f" \033[90mProxy:\033[0m {proxy_url}")
|
|
104
|
+
print(f" \033[90mOpenAI:\033[0m {proxy_url}/v1")
|
|
105
|
+
print()
|
|
106
|
+
print(" \033[33mAll LLM calls are being recorded.\033[0m")
|
|
107
|
+
print(" Run \033[32mrewind show latest\033[0m to see the trace.")
|
|
108
|
+
print()
|