ragaai-catalyst 2.2.4b5__py3-none-any.whl → 2.2.5b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. ragaai_catalyst/__init__.py +0 -2
  2. ragaai_catalyst/dataset.py +59 -1
  3. ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +5 -285
  4. ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -2
  5. ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +1 -1
  6. ragaai_catalyst/tracers/exporters/__init__.py +1 -2
  7. ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -1
  8. ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +23 -1
  9. ragaai_catalyst/tracers/tracer.py +6 -186
  10. {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/METADATA +1 -1
  11. {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/RECORD +14 -45
  12. ragaai_catalyst/experiment.py +0 -486
  13. ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -536
  14. ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -134
  15. ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -563
  16. ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
  17. ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -197
  18. ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -172
  19. ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -687
  20. ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +0 -1319
  21. ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -347
  22. ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
  23. ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -1182
  24. ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -288
  25. ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -557
  26. ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -129
  27. ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -74
  28. ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -21
  29. ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -32
  30. ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -28
  31. ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -133
  32. ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -34
  33. ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -467
  34. ragaai_catalyst/tracers/langchain_callback.py +0 -821
  35. ragaai_catalyst/tracers/llamaindex_callback.py +0 -361
  36. ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -424
  37. ragaai_catalyst/tracers/upload_traces.py +0 -170
  38. ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -62
  39. ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -69
  40. ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -74
  41. ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -82
  42. ragaai_catalyst/tracers/utils/rag_trace_json_converter.py +0 -403
  43. {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/WHEEL +0 -0
  44. {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/licenses/LICENSE +0 -0
  45. {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/top_level.txt +0 -0
@@ -1,424 +0,0 @@
1
- from configparser import InterpolationMissingOptionError
2
- import json
3
- from datetime import datetime
4
- from typing import Any, Optional, Dict, List, ClassVar
5
- from pydantic import Field
6
- # from treelib import Tree
7
-
8
- from llama_index.core.instrumentation.span import SimpleSpan
9
- from llama_index.core.instrumentation.span_handlers.base import BaseSpanHandler
10
- from llama_index.core.instrumentation.events import BaseEvent
11
- from llama_index.core.instrumentation.event_handlers import BaseEventHandler
12
- from llama_index.core.instrumentation import get_dispatcher
13
- from llama_index.core.instrumentation.span_handlers import SimpleSpanHandler
14
-
15
- from llama_index.core.instrumentation.events.agent import (
16
- AgentChatWithStepStartEvent,
17
- AgentChatWithStepEndEvent,
18
- AgentRunStepStartEvent,
19
- AgentRunStepEndEvent,
20
- AgentToolCallEvent,
21
- )
22
- from llama_index.core.instrumentation.events.chat_engine import (
23
- StreamChatErrorEvent,
24
- StreamChatDeltaReceivedEvent,
25
- )
26
- from llama_index.core.instrumentation.events.embedding import (
27
- EmbeddingStartEvent,
28
- EmbeddingEndEvent,
29
- )
30
- from llama_index.core.instrumentation.events.llm import (
31
- LLMPredictEndEvent,
32
- LLMPredictStartEvent,
33
- LLMStructuredPredictEndEvent,
34
- LLMStructuredPredictStartEvent,
35
- LLMCompletionEndEvent,
36
- LLMCompletionStartEvent,
37
- LLMChatEndEvent,
38
- LLMChatStartEvent,
39
- LLMChatInProgressEvent,
40
- )
41
- from llama_index.core.instrumentation.events.query import (
42
- QueryStartEvent,
43
- QueryEndEvent,
44
- )
45
- from llama_index.core.instrumentation.events.rerank import (
46
- ReRankStartEvent,
47
- ReRankEndEvent,
48
- )
49
- from llama_index.core.instrumentation.events.retrieval import (
50
- RetrievalStartEvent,
51
- RetrievalEndEvent,
52
- )
53
- from llama_index.core.instrumentation.events.span import (
54
- SpanDropEvent,
55
- )
56
- from llama_index.core.instrumentation.events.synthesis import (
57
- SynthesizeStartEvent,
58
- SynthesizeEndEvent,
59
- GetResponseEndEvent,
60
- GetResponseStartEvent,
61
- )
62
-
63
- import uuid
64
-
65
- from .utils.extraction_logic_llama_index import extract_llama_index_data
66
- from .utils.convert_llama_instru_callback import convert_llamaindex_instrumentation_to_callback
67
-
68
- class EventHandler(BaseEventHandler):
69
- """Example event handler.
70
-
71
- This event handler is an example of how to create a custom event handler.
72
-
73
- In general, logged events are treated as single events in a point in time,
74
- that link to a span. The span is a collection of events that are related to
75
- a single task. The span is identified by a unique span_id.
76
-
77
- While events are independent, there is some hierarchy.
78
- For example, in query_engine.query() call with a reranker attached:
79
- - QueryStartEvent
80
- - RetrievalStartEvent
81
- - EmbeddingStartEvent
82
- - EmbeddingEndEvent
83
- - RetrievalEndEvent
84
- - RerankStartEvent
85
- - RerankEndEvent
86
- - SynthesizeStartEvent
87
- - GetResponseStartEvent
88
- - LLMPredictStartEvent
89
- - LLMChatStartEvent
90
- - LLMChatEndEvent
91
- - LLMPredictEndEvent
92
- - GetResponseEndEvent
93
- - SynthesizeEndEvent
94
- - QueryEndEvent
95
- """
96
-
97
- events: List[BaseEvent] = []
98
- current_trace: List[Dict[str, Any]] = [] # Store events for the current trace
99
-
100
-
101
- @classmethod
102
- def class_name(cls) -> str:
103
- """Class name."""
104
- return "EventHandler"
105
-
106
- def handle(self, event: BaseEvent) -> None:
107
- """Logic for handling event."""
108
- # print("-----------------------")
109
- # # all events have these attributes
110
- # print(event.id_)
111
- # print(event.timestamp)
112
- # print(event.span_id)
113
-
114
- # Prepare event details dictionary
115
- event_details = {
116
- "id": event.id_,
117
- "timestamp": event.timestamp,
118
- "span_id": event.span_id,
119
- "event_type": event.class_name(),
120
- }
121
-
122
- # event specific attributes
123
- # print(f"Event type: {event.class_name()}")
124
- if isinstance(event, AgentRunStepStartEvent):
125
- event_details.update({
126
- "task_id": event.task_id,
127
- "step": event.step,
128
- "input": event.input,
129
- })
130
- if isinstance(event, AgentRunStepEndEvent):
131
- event_details.update({
132
- "step_output": event.step_output,
133
- })
134
- if isinstance(event, AgentChatWithStepStartEvent):
135
- event_details.update({
136
- "user_msg": event.user_msg,
137
- })
138
- if isinstance(event, AgentChatWithStepEndEvent):
139
- event_details.update({
140
- "response": event.response,
141
- })
142
- if isinstance(event, AgentToolCallEvent):
143
- event_details.update({
144
- "arguments": event.arguments,
145
- "tool_name": event.tool.name,
146
- "tool_description": event.tool.description,
147
- "tool_openai": event.tool.to_openai_tool(),
148
- })
149
- if isinstance(event, StreamChatDeltaReceivedEvent):
150
- event_details.update({
151
- "delta": event.delta,
152
- })
153
- if isinstance(event, StreamChatErrorEvent):
154
- event_details.update({
155
- "exception": event.exception,
156
- })
157
- if isinstance(event, EmbeddingStartEvent):
158
- event_details.update({
159
- "model_dict": event.model_dict,
160
- })
161
- if isinstance(event, EmbeddingEndEvent):
162
- event_details.update({
163
- "chunks": event.chunks,
164
- "embeddings": event.embeddings[0][:5],
165
- })
166
- if isinstance(event, LLMPredictStartEvent):
167
- event_details.update({
168
- "template": event.template,
169
- "template_args": event.template_args,
170
- })
171
- if isinstance(event, LLMPredictEndEvent):
172
- event_details.update({
173
- "output": event.output,
174
- })
175
- if isinstance(event, LLMStructuredPredictStartEvent):
176
- event_details.update({
177
- "template": event.template,
178
- "template_args": event.template_args,
179
- "output_cls": event.output_cls,
180
- })
181
- if isinstance(event, LLMStructuredPredictEndEvent):
182
- event_details.update({
183
- "output": event.output,
184
- })
185
- if isinstance(event, LLMCompletionStartEvent):
186
- event_details.update({
187
- "model_dict": event.model_dict,
188
- "prompt": event.prompt,
189
- "additional_kwargs": event.additional_kwargs,
190
- })
191
- if isinstance(event, LLMCompletionEndEvent):
192
- event_details.update({
193
- "response": event.response,
194
- "prompt": event.prompt,
195
- })
196
- if isinstance(event, LLMChatInProgressEvent):
197
- event_details.update({
198
- "messages": event.messages,
199
- "response": event.response,
200
- })
201
- if isinstance(event, LLMChatStartEvent):
202
- event_details.update({
203
- "messages": event.messages,
204
- "additional_kwargs": event.additional_kwargs,
205
- "model_dict": event.model_dict,
206
- })
207
- if isinstance(event, LLMChatEndEvent):
208
- event_details.update({
209
- "messages": event.messages,
210
- "response": event.response,
211
- })
212
- if isinstance(event, RetrievalStartEvent):
213
- event_details.update({
214
- "str_or_query_bundle": event.str_or_query_bundle,
215
- })
216
- if isinstance(event, RetrievalEndEvent):
217
- event_details.update({
218
- "str_or_query_bundle": event.str_or_query_bundle,
219
- "nodes": event.nodes,
220
- "text": event.nodes[0].text
221
- })
222
- if isinstance(event, ReRankStartEvent):
223
- event_details.update({
224
- "query": event.query,
225
- "nodes": event.nodes,
226
- "top_n": event.top_n,
227
- "model_name": event.model_name,
228
- })
229
- if isinstance(event, ReRankEndEvent):
230
- event_details.update({
231
- "nodes": event.nodes,
232
- })
233
- if isinstance(event, QueryStartEvent):
234
- event_details.update({
235
- "query": event.query,
236
- })
237
- if isinstance(event, QueryEndEvent):
238
- event_details.update({
239
- "response": event.response,
240
- "query": event.query,
241
- })
242
- if isinstance(event, SpanDropEvent):
243
- event_details.update({
244
- "err_str": event.err_str,
245
- })
246
- if isinstance(event, SynthesizeStartEvent):
247
- event_details.update({
248
- "query": event.query,
249
- })
250
- if isinstance(event, SynthesizeEndEvent):
251
- event_details.update({
252
- "response": event.response,
253
- "query": event.query,
254
- })
255
- if isinstance(event, GetResponseStartEvent):
256
- event_details.update({
257
- "query_str": event.query_str,
258
- })
259
-
260
- # Append event details to current_trace
261
- self.current_trace.append(event_details)
262
-
263
- self.events.append(event)
264
-
265
- def _get_events_by_span(self) -> Dict[str, List[BaseEvent]]:
266
- events_by_span: Dict[str, List[BaseEvent]] = {}
267
- for event in self.events:
268
- if event.span_id in events_by_span:
269
- events_by_span[event.span_id].append(event)
270
- else:
271
- events_by_span[event.span_id] = [event]
272
- return events_by_span
273
-
274
- # def _get_event_span_trees(self) -> List[Tree]:
275
- # events_by_span = self._get_events_by_span()
276
-
277
- # trees = []
278
- # tree = Tree()
279
-
280
- # for span, sorted_events in events_by_span.items():
281
- # # create root node i.e. span node
282
- # tree.create_node(
283
- # tag=f"{span} (SPAN)",
284
- # identifier=span,
285
- # parent=None,
286
- # data=sorted_events[0].timestamp,
287
- # )
288
-
289
- # for event in sorted_events:
290
- # tree.create_node(
291
- # tag=f"{event.class_name()}: {event.id_}",
292
- # identifier=event.id_,
293
- # parent=event.span_id,
294
- # data=event.timestamp,
295
- # )
296
-
297
- # trees.append(tree)
298
- # tree = Tree()
299
- # return trees
300
-
301
- # def print_event_span_trees(self) -> None:
302
- # """Method for viewing trace trees."""
303
- # trees = self._get_event_span_trees()
304
- # for tree in trees:
305
- # print(
306
- # tree.show(
307
- # stdout=False, sorting=True, key=lambda node: node.data
308
- # )
309
- # )
310
- # print("")
311
-
312
-
313
-
314
- class SpanHandler(BaseSpanHandler[SimpleSpan]):
315
- # span_dict = {}
316
- span_dict: ClassVar[Dict[str, List[SimpleSpan]]] = {}
317
-
318
- @classmethod
319
- def class_name(cls) -> str:
320
- """Class name."""
321
- return "SpanHandler"
322
-
323
- def new_span(
324
- self,
325
- id_: str,
326
- bound_args: Any,
327
- instance: Optional[Any] = None,
328
- parent_span_id: Optional[str] = None,
329
- tags: Optional[Dict[str, Any]] = None,
330
- **kwargs: Any,
331
- ) -> Optional[SimpleSpan]:
332
- """Create a span."""
333
- # logic for creating a new MyCustomSpan
334
- if id_ not in self.span_dict:
335
- self.span_dict[id_] = []
336
- self.span_dict[id_].append(
337
- SimpleSpan(id_=id_, parent_id=parent_span_id)
338
- )
339
-
340
- def prepare_to_exit_span(
341
- self,
342
- id_: str,
343
- bound_args: Any,
344
- instance: Optional[Any] = None,
345
- result: Optional[Any] = None,
346
- **kwargs: Any,
347
- ) -> Any:
348
- """Logic for preparing to exit a span."""
349
- pass
350
- # if id in self.span_dict:
351
- # return self.span_dict[id].pop()
352
-
353
- def prepare_to_drop_span(
354
- self,
355
- id_: str,
356
- bound_args: Any,
357
- instance: Optional[Any] = None,
358
- err: Optional[BaseException] = None,
359
- **kwargs: Any,
360
- ) -> Any:
361
- """Logic for preparing to drop a span."""
362
- pass
363
- # if id in self.span_dict:
364
- # return self.span_dict[id].pop()
365
-
366
-
367
-
368
- class LlamaIndexInstrumentationTracer:
369
- def __init__(self, user_detail):
370
- """Initialize the LlamaIndexTracer with handlers but don't start tracing yet."""
371
- # Initialize the root dispatcher
372
- self.root_dispatcher = get_dispatcher()
373
-
374
- # Initialize handlers
375
- self.json_event_handler = EventHandler()
376
- self.span_handler = SpanHandler()
377
- self.simple_span_handler = SimpleSpanHandler()
378
-
379
- self.is_tracing = False # Flag to check if tracing is active
380
-
381
- self.user_detail = user_detail
382
-
383
- def start(self):
384
- """Start tracing by registering handlers."""
385
- if self.is_tracing:
386
- print("Tracing is already active.")
387
- return
388
-
389
- # Register handlers
390
- self.root_dispatcher.add_span_handler(self.span_handler)
391
- self.root_dispatcher.add_span_handler(self.simple_span_handler)
392
- self.root_dispatcher.add_event_handler(self.json_event_handler)
393
-
394
- self.is_tracing = True
395
- print("Tracing started.")
396
-
397
- def stop(self):
398
- """Stop tracing by unregistering handlers."""
399
- if not self.is_tracing:
400
- print("Tracing is not active.")
401
- return
402
-
403
- # Write current_trace to a JSON file
404
- final_traces = {
405
- "project_id": self.user_detail["project_id"],
406
- "trace_id": str(uuid.uuid4()),
407
- "session_id": None,
408
- "trace_type": "llamaindex",
409
- "metadata": self.user_detail["trace_user_detail"]["metadata"],
410
- "pipeline": self.user_detail["trace_user_detail"]["pipeline"],
411
- "traces": self.json_event_handler.current_trace,
412
-
413
- }
414
-
415
- with open('new_llamaindex_traces.json', 'w') as f:
416
- json.dump([final_traces], f, default=str, indent=4)
417
-
418
- llamaindex_instrumentation_data = extract_llama_index_data([final_traces])
419
- converted_back_to_callback = convert_llamaindex_instrumentation_to_callback(llamaindex_instrumentation_data)
420
-
421
- # Just indicate tracing is stopped
422
- self.is_tracing = False
423
- print("Tracing stopped.")
424
- return converted_back_to_callback
@@ -1,170 +0,0 @@
1
- import requests
2
- import json
3
- import os
4
- from datetime import datetime
5
- import logging
6
- logger = logging.getLogger(__name__)
7
-
8
- class UploadTraces:
9
- def __init__(self,
10
- json_file_path,
11
- project_name,
12
- project_id,
13
- dataset_name,
14
- user_detail,
15
- base_url):
16
- self.json_file_path = json_file_path
17
- self.project_name = project_name
18
- self.project_id = project_id
19
- self.dataset_name = dataset_name
20
- self.user_detail = user_detail
21
- self.base_url = base_url
22
- self.timeout = 10
23
-
24
- def _create_dataset_schema_with_trace(self, additional_metadata_keys=None, additional_pipeline_keys=None):
25
- SCHEMA_MAPPING_NEW = {
26
- "trace_id": {"columnType": "traceId"},
27
- "trace_uri": {"columnType": "traceUri"},
28
- "prompt": {"columnType": "prompt"},
29
- "response":{"columnType": "response"},
30
- "context": {"columnType": "context"},
31
- "llm_model": {"columnType":"pipeline"},
32
- "recorded_on": {"columnType": "timestamp"},
33
- "embed_model": {"columnType":"pipeline"},
34
- "log_source": {"columnType": "metadata"},
35
- "vector_store":{"columnType":"pipeline"},
36
- "feedback": {"columnType":"feedBack"},
37
- "model_name": {"columnType": "metadata"},
38
- "total_cost": {"columnType": "metadata", "dataType": "numerical"},
39
- "total_latency": {"columnType": "metadata", "dataType": "numerical"},
40
- "error": {"columnType": "metadata"},
41
- "externalId": {"columnType": "externalId"}
42
- }
43
-
44
- if additional_metadata_keys:
45
- for key in additional_metadata_keys:
46
- if key == "model_name":
47
- SCHEMA_MAPPING_NEW['response']["modelName"] = additional_metadata_keys[key]
48
- elif key == "error":
49
- pass
50
- else:
51
- SCHEMA_MAPPING_NEW[key] = {"columnType": key, "parentColumn": "response"}
52
-
53
- if self.user_detail and self.user_detail["trace_user_detail"]["metadata"]:
54
- for key in self.user_detail["trace_user_detail"]["metadata"]:
55
- if key not in SCHEMA_MAPPING_NEW:
56
- SCHEMA_MAPPING_NEW[key] = {"columnType": "metadata"}
57
-
58
- if additional_pipeline_keys:
59
- for key in additional_pipeline_keys:
60
- SCHEMA_MAPPING_NEW[key] = {"columnType": "pipeline"}
61
-
62
- def make_request():
63
- headers = {
64
- "Content-Type": "application/json",
65
- "Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
66
- "X-Project-Name": self.project_name,
67
- }
68
- payload = json.dumps({
69
- "datasetName": self.dataset_name,
70
- "schemaMapping": SCHEMA_MAPPING_NEW,
71
- "traceFolderUrl": None,
72
- })
73
- response = requests.request("POST",
74
- f"{self.base_url}/v1/llm/dataset/logs",
75
- headers=headers,
76
- data=payload,
77
- timeout=self.timeout
78
- )
79
-
80
- return response
81
-
82
- response = make_request()
83
-
84
- if response.status_code == 401:
85
- # get_token() # Fetch a new token and set it in the environment
86
- response = make_request() # Retry the request
87
- if response.status_code != 200:
88
- return response.status_code
89
- return response.status_code
90
-
91
- def _get_presigned_url(self):
92
- payload = json.dumps({
93
- "datasetName": self.dataset_name,
94
- "numFiles": 1,
95
- })
96
- headers = {
97
- "Content-Type": "application/json",
98
- "Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
99
- "X-Project-Name": self.project_name,
100
- }
101
-
102
- # Changed to POST from GET
103
- response = requests.request("POST",
104
- f"{self.base_url}/v1/llm/presigned-url",
105
- headers=headers,
106
- data=payload,
107
- timeout=self.timeout)
108
- if response.status_code == 200:
109
- presignedUrls = response.json()["data"]["presignedUrls"][0]
110
- return presignedUrls
111
- else:
112
- response = requests.request("GET",
113
- f"{self.base_url}/v1/llm/presigned-url",
114
- headers=headers,
115
- data=payload,
116
- timeout=self.timeout)
117
- if response.status_code == 200:
118
- presignedUrls = response.json()["data"]["presignedUrls"][0]
119
- return presignedUrls
120
-
121
- logger.error(f"Failed to fetch presigned URL: {response.json()['message']}")
122
- return None
123
-
124
- def _put_presigned_url(self, presignedUrl, filename):
125
- headers = {
126
- "Content-Type": "application/json",
127
- }
128
-
129
- if "blob.core.windows.net" in presignedUrl: # Azure
130
- headers["x-ms-blob-type"] = "BlockBlob"
131
- # print(f"Uploading traces...")
132
- with open(filename) as f:
133
- payload = f.read().replace("\n", "").replace("\r", "").encode()
134
-
135
-
136
- response = requests.request("PUT",
137
- presignedUrl,
138
- headers=headers,
139
- data=payload,
140
- timeout=self.timeout)
141
- if response.status_code != 200 or response.status_code != 201:
142
- return response, response.status_code
143
-
144
- def _insert_traces(self, presignedUrl):
145
- headers = {
146
- "Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
147
- "Content-Type": "application/json",
148
- "X-Project-Name": self.project_name,
149
- }
150
- payload = json.dumps({
151
- "datasetName": self.dataset_name,
152
- "presignedUrl": presignedUrl,
153
- })
154
- response = requests.request("POST",
155
- f"{self.base_url}/v1/llm/insert/trace",
156
- headers=headers,
157
- data=payload,
158
- timeout=self.timeout)
159
-
160
- def upload_traces(self, additional_metadata_keys=None, additional_pipeline_keys=None):
161
- try:
162
- self._create_dataset_schema_with_trace(additional_metadata_keys, additional_pipeline_keys)
163
- presignedUrl = self._get_presigned_url()
164
- if presignedUrl is None:
165
- return
166
- self._put_presigned_url(presignedUrl, self.json_file_path)
167
- self._insert_traces(presignedUrl)
168
- # print("Traces uploaded")
169
- except Exception as e:
170
- print(f"Error while uploading rag traces: {e}")
@@ -1,62 +0,0 @@
1
- import json
2
-
3
- def convert_langchain_callbacks_output(result, project_name="", metadata="", pipeline=""):
4
- initial_struc = [{
5
- "project_name": project_name,
6
- "trace_id": result["trace_id"],
7
- "session_id": "NA",
8
- "metadata" : metadata,
9
- "pipeline" : pipeline,
10
- "traces" : []
11
- }]
12
- traces_data = []
13
-
14
- prompt = result["data"]["prompt"]
15
- response = result["data"]["response"]
16
- context = result["data"]["context"]
17
- final_prompt = ""
18
-
19
- prompt_structured_data = {
20
- "traceloop.entity.input": json.dumps({
21
- "kwargs": {
22
- "input": prompt,
23
- }
24
- })
25
- }
26
- prompt_data = {
27
- "name": "retrieve_documents.langchain.workflow",
28
- "attributes": prompt_structured_data,
29
- }
30
-
31
- traces_data.append(prompt_data)
32
-
33
- context_structured_data = {
34
- "traceloop.entity.input": json.dumps({
35
- "kwargs": {
36
- "context": context
37
- }
38
- }),
39
- "traceloop.entity.output": json.dumps({
40
- "kwargs": {
41
- "text": prompt
42
- }
43
- })
44
- }
45
- context_data = {
46
- "name": "PromptTemplate.langchain.task",
47
- "attributes": context_structured_data,
48
- }
49
- traces_data.append(context_data)
50
-
51
- response_structured_data = {"gen_ai.completion.0.content": response,
52
- "gen_ai.prompt.0.content": prompt}
53
- response_data = {
54
- "name": "ChatOpenAI.langchain.task",
55
- "attributes" : response_structured_data
56
- }
57
- traces_data.append(response_data)
58
-
59
- initial_struc[0]["traces"] = traces_data
60
-
61
- initial_struc[0]["error"] = result["error"]
62
- return initial_struc