ragaai-catalyst 2.0.7.2__py3-none-any.whl → 2.0.7.2b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. ragaai_catalyst/evaluation.py +107 -153
  2. ragaai_catalyst/tracers/agentic_tracing/Untitled-1.json +660 -0
  3. ragaai_catalyst/tracers/agentic_tracing/__init__.py +3 -0
  4. ragaai_catalyst/tracers/agentic_tracing/agent_tracer.py +311 -0
  5. ragaai_catalyst/tracers/agentic_tracing/agentic_tracing.py +212 -0
  6. ragaai_catalyst/tracers/agentic_tracing/base.py +270 -0
  7. ragaai_catalyst/tracers/agentic_tracing/data_structure.py +239 -0
  8. ragaai_catalyst/tracers/agentic_tracing/llm_tracer.py +906 -0
  9. ragaai_catalyst/tracers/agentic_tracing/network_tracer.py +286 -0
  10. ragaai_catalyst/tracers/agentic_tracing/sample.py +197 -0
  11. ragaai_catalyst/tracers/agentic_tracing/tool_tracer.py +235 -0
  12. ragaai_catalyst/tracers/agentic_tracing/unique_decorator.py +221 -0
  13. ragaai_catalyst/tracers/agentic_tracing/unique_decorator_test.py +172 -0
  14. ragaai_catalyst/tracers/agentic_tracing/user_interaction_tracer.py +67 -0
  15. ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +3 -0
  16. ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +18 -0
  17. ragaai_catalyst/tracers/agentic_tracing/utils/data_classes.py +61 -0
  18. ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +32 -0
  19. ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +181 -0
  20. ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +5946 -0
  21. ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +74 -0
  22. ragaai_catalyst/tracers/tracer.py +26 -4
  23. ragaai_catalyst/tracers/upload_traces.py +127 -0
  24. ragaai_catalyst-2.0.7.2b0.dist-info/METADATA +39 -0
  25. ragaai_catalyst-2.0.7.2b0.dist-info/RECORD +50 -0
  26. ragaai_catalyst-2.0.7.2.dist-info/METADATA +0 -386
  27. ragaai_catalyst-2.0.7.2.dist-info/RECORD +0 -29
  28. {ragaai_catalyst-2.0.7.2.dist-info → ragaai_catalyst-2.0.7.2b0.dist-info}/WHEEL +0 -0
  29. {ragaai_catalyst-2.0.7.2.dist-info → ragaai_catalyst-2.0.7.2b0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,235 @@
1
+ import functools
2
+ import uuid
3
+ from datetime import datetime
4
+ import psutil
5
+ from typing import Optional, Any, Dict, List
6
+ from .unique_decorator import mydecorator
7
+ import contextvars
8
+ import asyncio
9
+
10
+ class ToolTracerMixin:
11
+ def __init__(self, *args, **kwargs):
12
+ super().__init__(*args, **kwargs)
13
+ self.current_tool_name = contextvars.ContextVar("tool_name", default=None)
14
+ self.current_tool_id = contextvars.ContextVar("tool_id", default=None)
15
+ self.component_network_calls = {}
16
+ self._trace_sync_tool_execution = mydecorator(self._trace_sync_tool_execution)
17
+ self._trace_tool_execution = mydecorator(self._trace_tool_execution)
18
+
19
+
20
+ def trace_tool(self, name: str, tool_type: str = "generic", version: str = "1.0.0"):
21
+ def decorator(func):
22
+ # Check if the function is async
23
+ is_async = asyncio.iscoroutinefunction(func)
24
+
25
+ @functools.wraps(func)
26
+ async def async_wrapper(*args, **kwargs):
27
+ return await self._trace_tool_execution(
28
+ func, name, tool_type, version, *args, **kwargs
29
+ )
30
+
31
+ @functools.wraps(func)
32
+ def sync_wrapper(*args, **kwargs):
33
+ return self._trace_sync_tool_execution(
34
+ func, name, tool_type, version, *args, **kwargs
35
+ )
36
+
37
+ return async_wrapper if is_async else sync_wrapper
38
+
39
+ return decorator
40
+
41
+ def _trace_sync_tool_execution(self, func, name, tool_type, version, *args, **kwargs):
42
+ """Synchronous version of tool tracing"""
43
+ if not self.is_active:
44
+ return func(*args, **kwargs)
45
+
46
+ start_time = datetime.now().astimezone()
47
+ start_memory = psutil.Process().memory_info().rss
48
+ component_id = str(uuid.uuid4())
49
+ hash_id = self._trace_sync_tool_execution.hash_id
50
+
51
+ # Start tracking network calls for this component
52
+ self.start_component(component_id)
53
+
54
+ try:
55
+ # Execute the tool
56
+ result = func(*args, **kwargs)
57
+
58
+ # Calculate resource usage
59
+ end_time = datetime.now().astimezone()
60
+ end_memory = psutil.Process().memory_info().rss
61
+ memory_used = max(0, end_memory - start_memory)
62
+
63
+ # End tracking network calls for this component
64
+ self.end_component(component_id)
65
+
66
+ # Create tool component
67
+ tool_component = self.create_tool_component(
68
+ component_id=component_id,
69
+ hash_id=hash_id,
70
+ name=name,
71
+ tool_type=tool_type,
72
+ version=version,
73
+ memory_used=memory_used,
74
+ start_time=start_time,
75
+ end_time=end_time,
76
+ input_data=self._sanitize_input(args, kwargs),
77
+ output_data=self._sanitize_output(result)
78
+ )
79
+
80
+ self.add_component(tool_component)
81
+ return result
82
+
83
+ except Exception as e:
84
+ error_component = {
85
+ "code": 500,
86
+ "type": type(e).__name__,
87
+ "message": str(e),
88
+ "details": {}
89
+ }
90
+
91
+ # End tracking network calls for this component
92
+ self.end_component(component_id)
93
+
94
+ end_time = datetime.now().astimezone()
95
+
96
+ tool_component = self.create_tool_component(
97
+ component_id=component_id,
98
+ hash_id=hash_id,
99
+ name=name,
100
+ tool_type=tool_type,
101
+ version=version,
102
+ memory_used=0,
103
+ start_time=start_time,
104
+ end_time=end_time,
105
+ input_data=self._sanitize_input(args, kwargs),
106
+ output_data=None,
107
+ error=error_component
108
+ )
109
+
110
+ self.add_component(tool_component)
111
+ raise
112
+
113
+ async def _trace_tool_execution(self, func, name, tool_type, version, *args, **kwargs):
114
+ """Asynchronous version of tool tracing"""
115
+ if not self.is_active:
116
+ return await func(*args, **kwargs)
117
+
118
+ start_time = datetime.now().astimezone()
119
+ start_memory = psutil.Process().memory_info().rss
120
+ component_id = str(uuid.uuid4())
121
+ hash_id = self._trace_tool_execution.hash_id
122
+
123
+ try:
124
+ # Execute the tool
125
+ result = await func(*args, **kwargs)
126
+
127
+ # Calculate resource usage
128
+ end_time = datetime.now().astimezone()
129
+ end_memory = psutil.Process().memory_info().rss
130
+ memory_used = max(0, end_memory - start_memory)
131
+
132
+ # Create tool component
133
+ tool_component = self.create_tool_component(
134
+ component_id=component_id,
135
+ hash_id=hash_id,
136
+ name=name,
137
+ tool_type=tool_type,
138
+ version=version,
139
+ start_time=start_time,
140
+ end_time=end_time,
141
+ memory_used=memory_used,
142
+ input_data=self._sanitize_input(args, kwargs),
143
+ output_data=self._sanitize_output(result)
144
+ )
145
+
146
+ self.add_component(tool_component)
147
+ return result
148
+
149
+ except Exception as e:
150
+ error_component = {
151
+ "code": 500,
152
+ "type": type(e).__name__,
153
+ "message": str(e),
154
+ "details": {}
155
+ }
156
+
157
+ end_time = datetime.now().astimezone()
158
+
159
+ tool_component = self.create_tool_component(
160
+ component_id=component_id,
161
+ hash_id=hash_id,
162
+ name=name,
163
+ tool_type=tool_type,
164
+ version=version,
165
+ start_time=start_time,
166
+ end_time=end_time,
167
+ memory_used=0,
168
+ input_data=self._sanitize_input(args, kwargs),
169
+ output_data=None,
170
+ error=error_component
171
+ )
172
+
173
+ self.add_component(tool_component)
174
+ raise
175
+
176
+ def create_tool_component(self, **kwargs):
177
+ """Create a tool component according to the data structure"""
178
+ start_time = kwargs["start_time"]
179
+ component = {
180
+ "id": kwargs["component_id"],
181
+ "hash_id": kwargs["hash_id"],
182
+ "source_hash_id": None,
183
+ "type": "tool",
184
+ "name": kwargs["name"],
185
+ "start_time": start_time.isoformat(),
186
+ "end_time": kwargs["end_time"].isoformat(),
187
+ "error": kwargs.get("error"),
188
+ "parent_id": self.current_agent_id.get(),
189
+ "info": {
190
+ "tool_type": kwargs["tool_type"],
191
+ "version": kwargs["version"],
192
+ "memory_used": kwargs["memory_used"]
193
+ },
194
+ "data": {
195
+ "input": kwargs["input_data"],
196
+ "output": kwargs["output_data"],
197
+ "memory_used": kwargs["memory_used"]
198
+ },
199
+ "network_calls": self.component_network_calls.get(kwargs["component_id"], []),
200
+ "interactions": [{
201
+ "id": f"int_{uuid.uuid4()}",
202
+ "interaction_type": "input",
203
+ "timestamp": start_time.isoformat(),
204
+ "content": kwargs["input_data"]
205
+ }, {
206
+ "id": f"int_{uuid.uuid4()}",
207
+ "interaction_type": "output",
208
+ "timestamp": kwargs["end_time"].isoformat(),
209
+ "content": kwargs["output_data"]
210
+ }]
211
+ }
212
+
213
+ return component
214
+
215
+ def start_component(self, component_id):
216
+ self.component_network_calls[component_id] = []
217
+
218
+ def end_component(self, component_id):
219
+ pass
220
+
221
+ def _sanitize_input(self, args: tuple, kwargs: dict) -> Dict:
222
+ """Sanitize and format input data"""
223
+ return {
224
+ "args": [str(arg) if not isinstance(arg, (int, float, bool, str, list, dict)) else arg for arg in args],
225
+ "kwargs": {
226
+ k: str(v) if not isinstance(v, (int, float, bool, str, list, dict)) else v
227
+ for k, v in kwargs.items()
228
+ }
229
+ }
230
+
231
+ def _sanitize_output(self, output: Any) -> Any:
232
+ """Sanitize and format output data"""
233
+ if isinstance(output, (int, float, bool, str, list, dict)):
234
+ return output
235
+ return str(output)
@@ -0,0 +1,221 @@
1
+ import hashlib
2
+ import inspect
3
+ import functools
4
+ import re
5
+ import tokenize
6
+ import io
7
+ import uuid
8
+
9
+ def normalize_source_code(source):
10
+ """
11
+ Advanced normalization of source code that:
12
+ 1. Preserves docstrings
13
+ 2. Removes comments
14
+ 3. Removes extra whitespace
15
+
16
+ Args:
17
+ source (str): Original source code
18
+
19
+ Returns:
20
+ str: Normalized source code
21
+ """
22
+ # Use tokenize to carefully parse the source code
23
+ normalized_tokens = []
24
+
25
+ try:
26
+ # Convert source to a file-like object for tokenize
27
+ token_source = io.StringIO(source).readline
28
+
29
+ for token_type, token_string, _, _, _ in tokenize.generate_tokens(token_source):
30
+ # Preserve strings (including docstrings)
31
+ if token_type == tokenize.STRING:
32
+ normalized_tokens.append(token_string.strip())
33
+
34
+ # Preserve code tokens
35
+ elif token_type in [
36
+ tokenize.NAME,
37
+ tokenize.NUMBER,
38
+ tokenize.OP
39
+ ]:
40
+ normalized_tokens.append(token_string.strip())
41
+
42
+ except tokenize.TokenError:
43
+ # Fallback to a simpler method if tokenization fails
44
+ normalized_tokens = re.findall(r'\w+|[^\w\s]', source)
45
+
46
+ # Remove extra spaces and join
47
+ normalized_source = ''.join(normalized_tokens)
48
+
49
+ return normalized_source
50
+
51
+ # def generate_unique_hash(obj, *call_args, **call_kwargs):
52
+ # print('#'*100,'hash id: ', '#'*100)
53
+ # print(obj)
54
+ # print(*call_args)
55
+ # # print(**call_kwargs)
56
+ # """
57
+ # Generate a unique, deterministic hash for a given object.
58
+
59
+ # Args:
60
+ # obj: The object (function or class) to generate hash for
61
+ # additional_salt: Optional additional salt to ensure uniqueness
62
+
63
+ # Returns:
64
+ # str: A unique hash_id meeting the specified requirements
65
+ # """
66
+ # # Handle different object types
67
+
68
+ # if inspect.isclass(obj):
69
+ # # For classes, use the class definition
70
+ # try:
71
+ # source = inspect.getsource(obj)
72
+ # except (IOError, TypeError):
73
+ # source = repr(obj)
74
+
75
+ # # Use class name in hash generation
76
+ # hash_input = f"{obj.__name__}{normalize_source_code(source)}"
77
+
78
+ # else:
79
+ # # For functions and methods
80
+ # # Get full signature information
81
+ # signature = inspect.signature(obj)
82
+
83
+ # # Capture parameter names and their default values
84
+ # params_info = []
85
+ # for name, param in signature.parameters.items():
86
+ # param_str = f"{name}:{param.kind}"
87
+ # if param.default != inspect.Parameter.empty:
88
+ # param_str += f":default={param.default}"
89
+ # params_info.append(param_str)
90
+
91
+ # # Get source code
92
+ # try:
93
+ # source = inspect.getsource(obj)
94
+ # except (IOError, TypeError):
95
+ # source = repr(obj)
96
+
97
+ # # Combine method name, parameters, and normalized source
98
+ # hash_input = (
99
+ # f"{obj.__name__}" # Method name
100
+ # f"{''.join(params_info)}" # Parameter details
101
+ # f"{normalize_source_code(source)}" # Normalized source code
102
+ # )
103
+
104
+ # # Add optional salt
105
+ # args_repr = str(call_args) + str(sorted(call_kwargs.items()))
106
+ # hash_input += args_repr
107
+ # # Use SHA-256 for generating the hash
108
+ # hash_object = hashlib.sha256(hash_input.encode('utf-8'))
109
+
110
+ # # Generate hash and truncate to 32 characters
111
+ # hash_id = hash_object.hexdigest()[:32]
112
+
113
+ # # Ensure the hash starts with a letter
114
+ # if not hash_id[0].isalpha():
115
+ # hash_id = 'a' + hash_id[1:]
116
+
117
+ # print(hash_id)
118
+ # return hash_id
119
+
120
+
121
+
122
+ def generate_unique_hash(obj, *args, **kwargs):
123
+ """Generate a unique hash based on the normalized function definition and its arguments"""
124
+ if inspect.ismethod(obj) or inspect.isfunction(obj):
125
+ # Get function name and source code
126
+ func_name = obj.__name__
127
+ try:
128
+ # Get the source code and normalize it
129
+ func_source = inspect.getsource(obj)
130
+ normalized_source = normalize_source_code(func_source)
131
+ except (IOError, TypeError):
132
+ normalized_source = ""
133
+
134
+ # Get function arguments
135
+ if args and hasattr(args[0], '__class__'):
136
+ # If it's a method, skip the 'self' argument
137
+ args = args[1:]
138
+
139
+ # Normalize argument values
140
+ def normalize_arg(arg):
141
+ if isinstance(arg, (str, int, float, bool)):
142
+ return str(arg)
143
+ elif isinstance(arg, (list, tuple, set)):
144
+ return '_'.join(normalize_arg(x) for x in arg)
145
+ elif isinstance(arg, dict):
146
+ return '_'.join(f"{normalize_arg(k)}:{normalize_arg(v)}"
147
+ for k, v in sorted(arg.items()))
148
+ elif callable(arg):
149
+ return arg.__name__
150
+ else:
151
+ return str(type(arg).__name__)
152
+
153
+ # Create normalized strings of arguments
154
+ args_str = '_'.join(normalize_arg(arg) for arg in args)
155
+ kwargs_str = '_'.join(f"{k}:{normalize_arg(v)}"
156
+ for k, v in sorted(kwargs.items()))
157
+
158
+ # Combine all components
159
+ hash_input = f"{func_name}_{normalized_source}_{args_str}_{kwargs_str}"
160
+
161
+ elif inspect.isclass(obj):
162
+ # For classes, normalize the class definition
163
+ try:
164
+ class_source = inspect.getsource(obj)
165
+ normalized_source = normalize_source_code(class_source)
166
+ hash_input = f"{obj.__name__}_{normalized_source}"
167
+ except (IOError, TypeError):
168
+ hash_input = f"{obj.__name__}_{str(obj)}"
169
+
170
+ else:
171
+ # For other objects, use their string representation
172
+ hash_input = str(obj)
173
+
174
+ # Create hash
175
+ hash_obj = hashlib.md5(hash_input.encode('utf-8'))
176
+ return hash_obj.hexdigest()
177
+
178
+
179
+ class UniqueIdentifier:
180
+ _instance = None
181
+ _hash_cache = {} # Class-level cache for storing hashes
182
+
183
+ def __new__(cls, *args, **kwargs):
184
+ if cls._instance is None:
185
+ cls._instance = super().__new__(cls)
186
+ return cls._instance
187
+
188
+ def __init__(self, salt=None):
189
+ # Initialize only once
190
+ if not hasattr(self, 'salt'):
191
+ self.salt = salt
192
+
193
+ def __call__(self, obj):
194
+ if inspect.isclass(obj):
195
+ hash_id = generate_unique_hash(obj)
196
+ setattr(obj, 'hash_id', hash_id)
197
+ return obj
198
+
199
+ @functools.wraps(obj)
200
+ def wrapper(*args, **kwargs):
201
+ # Generate cache key based on function and arguments
202
+ cache_key = (obj.__name__, str(args), str(kwargs))
203
+
204
+ # Use cached hash if available, otherwise generate new one
205
+ if cache_key not in self._hash_cache:
206
+ self._hash_cache[cache_key] = generate_unique_hash(obj, *args, **kwargs)
207
+
208
+ # Store hash_id on the wrapper function
209
+ wrapper.hash_id = self._hash_cache[cache_key]
210
+
211
+ return obj(*args, **kwargs)
212
+
213
+ # Initialize hash_id
214
+ initial_hash = generate_unique_hash(obj)
215
+ wrapper.hash_id = initial_hash
216
+
217
+ return wrapper
218
+
219
+ # Create a single instance to be used across all mixins
220
+ mydecorator = UniqueIdentifier()
221
+
@@ -0,0 +1,172 @@
1
+ from unique_decorator import mydecorator
2
+ from unique_decorator import generate_unique_hash
3
+ import inspect
4
+
5
+ def print_test_case(case_num, description, expected_behavior, hash1, hash2=None):
6
+ print(f"\n{'='*100}")
7
+ print(f"Test Case #{case_num}: {description}")
8
+ print(f"Expected Behavior: {expected_behavior}")
9
+ print(f"{'='*100}")
10
+ if hash2 is not None:
11
+ print(f"Hash ID 1: {hash1}")
12
+ print(f"Hash ID 2: {hash2}")
13
+ print(f"Hash IDs are {'EQUAL' if hash1 == hash2 else 'DIFFERENT'} (Expected: {expected_behavior})")
14
+ else:
15
+ print(f"Hash ID: {hash1}")
16
+ print(f"{'='*100}\n")
17
+
18
+ # Test Case 1: Same function with different formatting
19
+ # Expected: Same hash_id
20
+ @mydecorator
21
+ def example_function():
22
+ x = 1
23
+ return x
24
+
25
+ hash1 = example_function.hash_id
26
+
27
+ @mydecorator
28
+ def example_function():
29
+ # This is a comment
30
+ x = 1 # Another comment
31
+ return x # More spacing
32
+
33
+ hash2 = example_function.hash_id
34
+
35
+ print_test_case(1,
36
+ "Same function with different formatting and comments",
37
+ "Hash IDs should be EQUAL",
38
+ hash1, hash2)
39
+
40
+ # Test Case 2: Function with parameters - different argument orders
41
+ # Expected: Same hash_id for same arguments in different order
42
+ @mydecorator
43
+ def function_with_params(a: int, b: int = 10):
44
+ return a + b
45
+
46
+ result1 = function_with_params(a=2, b=3)
47
+ hash1 = function_with_params.hash_id
48
+
49
+ result2 = function_with_params(b=3, a=2)
50
+ hash2 = function_with_params.hash_id
51
+
52
+ print_test_case(2,
53
+ "Same function call with different argument order (a=2, b=3 vs b=3, a=2)",
54
+ "Hash IDs should be EQUAL",
55
+ hash1, hash2)
56
+
57
+ # Test Case 3: Function with different default value
58
+ # Expected: Different hash_id
59
+ @mydecorator
60
+ def function_with_params(a: int, b: int = 5): # Different default value
61
+ return a + b
62
+
63
+ hash3 = function_with_params.hash_id
64
+
65
+ print_test_case(3,
66
+ "Same function name but different default parameter value",
67
+ "Hash IDs should be DIFFERENT",
68
+ hash2, hash3)
69
+
70
+ # Test Case 4: Class methods with different formatting
71
+ # Expected: Same hash_id
72
+ @mydecorator
73
+ class ExampleClass:
74
+ @mydecorator
75
+ def method1(self):
76
+ x = 1
77
+ return x
78
+
79
+ hash1 = ExampleClass().method1.hash_id
80
+
81
+ @mydecorator
82
+ class ExampleClass:
83
+ @mydecorator
84
+ def method1(self):
85
+ # Comment here
86
+ x = 1
87
+ return x
88
+
89
+ hash2 = ExampleClass().method1.hash_id
90
+
91
+ print_test_case(4,
92
+ "Class method with different formatting",
93
+ "Hash IDs should be EQUAL",
94
+ hash1, hash2)
95
+
96
+ # Test Case 5: Functions with different argument types but same content
97
+ # Expected: Same hash_id
98
+ @mydecorator
99
+ def complex_function(a: dict, b: list = [1, 2]):
100
+ return a, b
101
+
102
+ test_dict1 = {"a": 1, "b": 2}
103
+ test_dict2 = {"b": 2, "a": 1} # Same content, different order
104
+ test_list1 = [1, 2, 3]
105
+ test_list2 = [1, 2, 3] # Identical list
106
+
107
+ result1 = complex_function(test_dict1, test_list1)
108
+ hash1 = complex_function.hash_id
109
+
110
+ result2 = complex_function(test_dict2, test_list2)
111
+ hash2 = complex_function.hash_id
112
+
113
+ print_test_case(5,
114
+ "Complex function with same content in different order",
115
+ "Hash IDs should be EQUAL",
116
+ hash1, hash2)
117
+
118
+ # Test Case 6: Function with docstring - different formatting
119
+ # Expected: Same hash_id
120
+ @mydecorator
121
+ def documented_function(x: int):
122
+ """
123
+ This is a docstring.
124
+ It should be preserved in the hash.
125
+ """
126
+ # This is a comment that should be ignored
127
+ return x * 2 # This comment should also be ignored
128
+
129
+ hash1 = documented_function.hash_id
130
+
131
+ @mydecorator
132
+ def documented_function(x:int):
133
+ """
134
+ This is a docstring.
135
+ It should be preserved in the hash.
136
+ """
137
+ return x*2
138
+
139
+ hash2 = documented_function.hash_id
140
+
141
+ print_test_case(6,
142
+ "Function with docstring - different formatting",
143
+ "Hash IDs should be EQUAL",
144
+ hash1, hash2)
145
+
146
+ # Test Case 7: Different functions with same structure
147
+ # Expected: Different hash_id
148
+ @mydecorator
149
+ def function_a(x):
150
+ return x + 1
151
+
152
+ @mydecorator
153
+ def function_b(x):
154
+ return x + 1
155
+
156
+ print_test_case(7,
157
+ "Different function names with same implementation",
158
+ "Hash IDs should be DIFFERENT",
159
+ function_a.hash_id, function_b.hash_id)
160
+
161
+ # Test Case 8: Same function with different argument values
162
+ # Expected: Different hash_id
163
+ result1 = function_with_params(a=1, b=2)
164
+ hash1 = function_with_params.hash_id
165
+
166
+ result2 = function_with_params(a=3, b=4)
167
+ hash2 = function_with_params.hash_id
168
+
169
+ print_test_case(8,
170
+ "Same function with different argument values",
171
+ "Hash IDs should be DIFFERENT",
172
+ hash1, hash2)
@@ -0,0 +1,67 @@
1
+ import builtins
2
+ from contextlib import contextmanager, asynccontextmanager
3
+ from datetime import datetime
4
+
5
+ from ..data import UserInteractionModel
6
+
7
+
8
+ class UserInteractionTracer:
9
+ def __init__(self, tracer):
10
+ self.tracer = tracer
11
+ self.original_input = builtins.input
12
+ self.original_print = builtins.print
13
+
14
+ def input(self, prompt=""):
15
+ content = prompt
16
+ user_input = self.original_input(prompt)
17
+ self._log_interaction("input", user_input)
18
+ return user_input
19
+
20
+ def print(self, *args, **kwargs):
21
+ content = " ".join(str(arg) for arg in args)
22
+ self._log_interaction("output", content)
23
+ self.original_print(*args, **kwargs)
24
+
25
+ def _log_interaction(self, interaction_type, content):
26
+ agent_id = self.tracer.current_agent_id.get()
27
+ user_interaction = UserInteractionModel(
28
+ project_id=self.tracer.project_id,
29
+ trace_id=self.tracer.trace_id,
30
+ agent_id=agent_id,
31
+ interaction_type=interaction_type,
32
+ content=content,
33
+ timestamp=datetime.now(),
34
+ )
35
+ with self.tracer.Session() as session:
36
+ session.add(user_interaction)
37
+ session.commit()
38
+
39
+ # Also add to trace data
40
+ self.tracer.trace_data.setdefault("user_interactions", []).append(
41
+ {
42
+ "interaction_type": interaction_type,
43
+ "content": content,
44
+ "timestamp": datetime.now(),
45
+ "agent_id": agent_id,
46
+ }
47
+ )
48
+
49
+ @contextmanager
50
+ def capture(self):
51
+ builtins.input = self.input
52
+ builtins.print = self.print
53
+ try:
54
+ yield
55
+ finally:
56
+ builtins.input = self.original_input
57
+ builtins.print = self.original_print
58
+
59
+ @asynccontextmanager
60
+ async def async_capture(self):
61
+ builtins.input = self.input
62
+ builtins.print = self.print
63
+ try:
64
+ yield
65
+ finally:
66
+ builtins.input = self.original_input
67
+ builtins.print = self.original_print
@@ -0,0 +1,3 @@
1
+ from .generic import get_db_path
2
+
3
+ __all__ = ["get_db_path"]