ragaai-catalyst 2.1.5b6__py3-none-any.whl → 2.1.5b7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,469 @@
1
+ from audioop import add
2
+ import os
3
+ import uuid
4
+ import datetime
5
+ import logging
6
+ import asyncio
7
+ import aiohttp
8
+ import requests
9
+ from contextlib import contextmanager
10
+ from concurrent.futures import ThreadPoolExecutor
11
+ from ragaai_catalyst.tracers.langchain_callback import LangchainTracer
12
+ from ragaai_catalyst.tracers.utils.convert_langchain_callbacks_output import convert_langchain_callbacks_output
13
+
14
+ from ragaai_catalyst.tracers.utils.langchain_tracer_extraction_logic import langchain_tracer_extraction
15
+ from ragaai_catalyst.tracers.upload_traces import UploadTraces
16
+ import tempfile
17
+ import json
18
+
19
+ from opentelemetry.sdk import trace as trace_sdk
20
+ from opentelemetry.sdk.trace.export import SimpleSpanProcessor
21
+ from ragaai_catalyst.tracers.exporters.file_span_exporter import FileSpanExporter
22
+ from ragaai_catalyst.tracers.exporters.raga_exporter import RagaExporter
23
+ from ragaai_catalyst.tracers.instrumentators import (
24
+ LangchainInstrumentor,
25
+ OpenAIInstrumentor,
26
+ LlamaIndexInstrumentor,
27
+ )
28
+ from ragaai_catalyst.tracers.utils import get_unique_key
29
+ # from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
30
+ from ragaai_catalyst import RagaAICatalyst
31
+ from ragaai_catalyst.tracers.agentic_tracing import AgenticTracing, TrackName
32
+ from ragaai_catalyst.tracers.agentic_tracing.tracers.llm_tracer import LLMTracerMixin
33
+ from ragaai_catalyst.tracers.agentic_tracing.utils.trace_utils import load_model_costs, update_model_costs_from_github
34
+
35
+ logger = logging.getLogger(__name__)
36
+
37
+ class Tracer(AgenticTracing):
38
+ NUM_PROJECTS = 100
39
+ TIMEOUT = 10
40
+ def __init__(
41
+ self,
42
+ project_name,
43
+ dataset_name,
44
+ trace_name=None,
45
+ tracer_type=None,
46
+ pipeline=None,
47
+ metadata=None,
48
+ description=None,
49
+ upload_timeout=30, # Default timeout of 30 seconds
50
+ update_llm_cost=True, # Parameter to control model cost updates
51
+ auto_instrumentation={ # to control automatic instrumentation of different components
52
+ 'llm':True,
53
+ 'tool':True,
54
+ 'agent':True,
55
+ 'user_interaction':True,
56
+ 'file_io':True,
57
+ 'network':True,
58
+ 'custom':True
59
+ },
60
+ interval_time=2,
61
+ # auto_instrumentation=True/False # to control automatic instrumentation of everything
62
+
63
+ ):
64
+ """
65
+ Initializes a Tracer object.
66
+
67
+ Args:
68
+ project_name (str): The name of the project.
69
+ dataset_name (str): The name of the dataset.
70
+ tracer_type (str, optional): The type of tracer. Defaults to None.
71
+ pipeline (dict, optional): The pipeline configuration. Defaults to None.
72
+ metadata (dict, optional): The metadata. Defaults to None.
73
+ description (str, optional): The description. Defaults to None.
74
+ upload_timeout (int, optional): The upload timeout in seconds. Defaults to 30.
75
+ update_llm_cost (bool, optional): Whether to update model costs from GitHub. Defaults to True.
76
+ """
77
+
78
+ user_detail = {
79
+ "project_name": project_name,
80
+ "project_id": None, # Will be set after project validation
81
+ "dataset_name": dataset_name,
82
+ "interval_time": interval_time,
83
+ "trace_name": trace_name if trace_name else f"trace_{datetime.datetime.now().strftime('%Y%m%d%H%M%S')}",
84
+ "trace_user_detail": {"metadata": metadata} if metadata else {}
85
+ }
86
+
87
+ # take care of auto_instrumentation
88
+ if isinstance(auto_instrumentation, bool):
89
+ if auto_instrumentation:
90
+ auto_instrumentation = {
91
+ "llm": True,
92
+ "tool": True,
93
+ "agent": True,
94
+ "user_interaction": True,
95
+ "file_io": True,
96
+ "network": True,
97
+ "custom": True
98
+ }
99
+ else:
100
+ auto_instrumentation = {
101
+ "llm": False,
102
+ "tool": False,
103
+ "agent": False,
104
+ "user_interaction": False,
105
+ "file_io": False,
106
+ "network": False,
107
+ "custom": False
108
+ }
109
+ elif isinstance(auto_instrumentation, dict):
110
+ auto_instrumentation = {k: v for k, v in auto_instrumentation.items()}
111
+ for key in ["llm", "tool", "agent", "user_interaction", "file_io", "network", "custom"]:
112
+ if key not in auto_instrumentation:
113
+ auto_instrumentation[key] = True
114
+
115
+ super().__init__(user_detail=user_detail, auto_instrumentation=auto_instrumentation)
116
+
117
+ self.project_name = project_name
118
+ self.dataset_name = dataset_name
119
+ self.tracer_type = tracer_type
120
+ self.metadata = self._improve_metadata(metadata, tracer_type)
121
+ # self.metadata["total_cost"] = 0.0
122
+ # self.metadata["total_tokens"] = 0
123
+ self.pipeline = pipeline
124
+ self.description = description
125
+ self.upload_timeout = upload_timeout
126
+ self.base_url = f"{RagaAICatalyst.BASE_URL}"
127
+ self.timeout = 30
128
+ self.num_projects = 100
129
+ self.start_time = datetime.datetime.now().astimezone().isoformat()
130
+ self.model_cost_dict = load_model_costs()
131
+
132
+ if update_llm_cost:
133
+ # First update the model costs file from GitHub
134
+ update_model_costs_from_github()
135
+
136
+ try:
137
+ response = requests.get(
138
+ f"{self.base_url}/v2/llm/projects?size={self.num_projects}",
139
+ headers={
140
+ "Authorization": f'Bearer {os.getenv("RAGAAI_CATALYST_TOKEN")}',
141
+ },
142
+ timeout=self.timeout,
143
+ )
144
+ response.raise_for_status()
145
+ logger.debug("Projects list retrieved successfully")
146
+
147
+ project_list = [
148
+ project["name"] for project in response.json()["data"]["content"]
149
+ ]
150
+ if project_name not in project_list:
151
+ raise ValueError("Project not found. Please enter a valid project name")
152
+
153
+ self.project_id = [
154
+ project["id"] for project in response.json()["data"]["content"] if project["name"] == project_name
155
+ ][0]
156
+ # super().__init__(user_detail=self._pass_user_data())
157
+ # self.file_tracker = TrackName()
158
+ self._pass_user_data()
159
+
160
+ except requests.exceptions.RequestException as e:
161
+ logger.error(f"Failed to retrieve projects list: {e}")
162
+ raise
163
+
164
+ if tracer_type == "langchain":
165
+ # self.raga_client = RagaExporter(project_name=self.project_name, dataset_name=self.dataset_name)
166
+
167
+ # self._tracer_provider = self._setup_provider()
168
+ # self._instrumentor = self._setup_instrumentor(tracer_type)
169
+ # self.is_instrumented = False
170
+ # self._upload_task = None
171
+ self._upload_task = None
172
+ elif tracer_type == "llamaindex":
173
+ self._upload_task = None
174
+ from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
175
+
176
+ else:
177
+ self._upload_task = None
178
+ # raise ValueError (f"Currently supported tracer types are 'langchain' and 'llamaindex'.")
179
+
180
+
181
+ def set_dataset_name(self, dataset_name):
182
+ """
183
+ Reinitialize the Tracer with a new dataset name while keeping all other parameters the same.
184
+
185
+ Args:
186
+ dataset_name (str): The new dataset name to set
187
+ """
188
+ # Store current parameters
189
+ current_params = {
190
+ 'project_name': self.project_name,
191
+ 'tracer_type': self.tracer_type,
192
+ 'pipeline': self.pipeline,
193
+ 'metadata': self.metadata,
194
+ 'description': self.description,
195
+ 'upload_timeout': self.upload_timeout
196
+ }
197
+
198
+ # Reinitialize self with new dataset_name and stored parameters
199
+ self.__init__(
200
+ dataset_name=dataset_name,
201
+ **current_params
202
+ )
203
+
204
+ def _improve_metadata(self, metadata, tracer_type):
205
+ if metadata is None:
206
+ metadata = {}
207
+ metadata.setdefault("log_source", f"{tracer_type}_tracer")
208
+ metadata.setdefault("recorded_on", str(datetime.datetime.now()))
209
+ return metadata
210
+
211
+ def _add_unique_key(self, data, key_name):
212
+ data[key_name] = get_unique_key(data)
213
+ return data
214
+
215
+ def _setup_provider(self):
216
+ self.filespanx = FileSpanExporter(
217
+ project_name=self.project_name,
218
+ metadata=self.metadata,
219
+ pipeline=self.pipeline,
220
+ raga_client=self.raga_client,
221
+ )
222
+ tracer_provider = trace_sdk.TracerProvider()
223
+ tracer_provider.add_span_processor(SimpleSpanProcessor(self.filespanx))
224
+ return tracer_provider
225
+
226
+ def _setup_instrumentor(self, tracer_type):
227
+ instrumentors = {
228
+ "langchain": LangchainInstrumentor,
229
+ "openai": OpenAIInstrumentor,
230
+ "llama_index": LlamaIndexInstrumentor,
231
+ }
232
+ if tracer_type not in instrumentors:
233
+ raise ValueError(f"Invalid tracer type: {tracer_type}")
234
+ return instrumentors[tracer_type]().get()
235
+
236
+ @contextmanager
237
+ def trace(self):
238
+ """
239
+ Synchronous context manager for tracing.
240
+ Usage:
241
+ with tracer.trace():
242
+ # Your code here
243
+ """
244
+ self.start()
245
+ try:
246
+ yield self
247
+ finally:
248
+ self.stop()
249
+
250
+ def start(self):
251
+ """Start the tracer."""
252
+ if self.tracer_type == "langchain":
253
+ # if not self.is_instrumented:
254
+ # self._instrumentor().instrument(tracer_provider=self._tracer_provider)
255
+ # self.is_instrumented = True
256
+ # print(f"Tracer started for project: {self.project_name}")
257
+ self.langchain_tracer = LangchainTracer()
258
+ return self.langchain_tracer.start()
259
+ elif self.tracer_type == "llamaindex":
260
+ from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
261
+ return LlamaIndexTracer(self._pass_user_data()).start()
262
+ else:
263
+ super().start()
264
+ return self
265
+
266
+ def stop(self):
267
+ """Stop the tracer and initiate trace upload."""
268
+ if self.tracer_type == "langchain":
269
+ # if not self.is_instrumented:
270
+ # logger.warning("Tracer was not started. No traces to upload.")
271
+ # return "No traces to upload"
272
+
273
+ # print("Stopping tracer and initiating trace upload...")
274
+ # self._cleanup()
275
+ # self._upload_task = self._run_async(self._upload_traces())
276
+ # self.is_active = False
277
+ # self.dataset_name = None
278
+
279
+ # filename = f"langchain_callback_traces.json"
280
+ # filepath = os.path.join(tempfile.gettempdir(), filename)
281
+
282
+ user_detail = self._pass_user_data()
283
+ data, additional_metadata = self.langchain_tracer.stop()
284
+
285
+ # Add cost if possible
286
+ if additional_metadata.get('model_name'):
287
+ try:
288
+ model_cost_data = self.model_cost_dict[additional_metadata['model_name']]
289
+ if 'tokens' in additional_metadata and all(k in additional_metadata['tokens'] for k in ['prompt', 'completion']):
290
+ prompt_cost = additional_metadata["tokens"]["prompt"]*model_cost_data["input_cost_per_token"]
291
+ completion_cost = additional_metadata["tokens"]["completion"]*model_cost_data["output_cost_per_token"]
292
+ additional_metadata.setdefault('cost', {})["total_cost"] = prompt_cost + completion_cost
293
+ else:
294
+ logger.warning("Token information missing in additional_metadata")
295
+ except Exception as e:
296
+ logger.warning(f"Error adding cost: {e}")
297
+ else:
298
+ logger.debug("Model name not available in additional_metadata, skipping cost calculation")
299
+
300
+ # Safely get total tokens and cost
301
+ if 'tokens' in additional_metadata and 'total' in additional_metadata['tokens']:
302
+ additional_metadata["total_tokens"] = additional_metadata["tokens"]["total"]
303
+ else:
304
+ additional_metadata["total_tokens"] = 0
305
+ logger.warning("Total tokens information not available")
306
+
307
+ if 'cost' in additional_metadata and 'total_cost' in additional_metadata['cost']:
308
+ additional_metadata["total_cost"] = additional_metadata["cost"]["total_cost"]
309
+ else:
310
+ additional_metadata["total_cost"] = 0.0
311
+ logger.warning("Total cost information not available")
312
+
313
+ # Safely remove tokens and cost dictionaries if they exist
314
+ additional_metadata.pop("tokens", None)
315
+ additional_metadata.pop("cost", None)
316
+
317
+ # Safely merge metadata
318
+ combined_metadata = {}
319
+ if user_detail.get('trace_user_detail', {}).get('metadata'):
320
+ combined_metadata.update(user_detail['trace_user_detail']['metadata'])
321
+ if additional_metadata:
322
+ combined_metadata.update(additional_metadata)
323
+
324
+ langchain_traces = langchain_tracer_extraction(data)
325
+ final_result = convert_langchain_callbacks_output(langchain_traces)
326
+
327
+ # Safely set required fields in final_result
328
+ if final_result and isinstance(final_result, list) and len(final_result) > 0:
329
+ final_result[0]['project_name'] = user_detail.get('project_name', '')
330
+ final_result[0]['trace_id'] = str(uuid.uuid4())
331
+ final_result[0]['session_id'] = None
332
+ final_result[0]['metadata'] = combined_metadata
333
+ final_result[0]['pipeline'] = user_detail.get('trace_user_detail', {}).get('pipeline')
334
+
335
+ filepath_3 = os.path.join(os.getcwd(), "final_result.json")
336
+ with open(filepath_3, 'w') as f:
337
+ json.dump(final_result, f, indent=2)
338
+
339
+ print(filepath_3)
340
+ else:
341
+ logger.warning("No valid langchain traces found in final_result")
342
+
343
+ additional_metadata_keys = list(additional_metadata.keys()) if additional_metadata else None
344
+
345
+ UploadTraces(json_file_path=filepath_3,
346
+ project_name=self.project_name,
347
+ project_id=self.project_id,
348
+ dataset_name=self.dataset_name,
349
+ user_detail=user_detail,
350
+ base_url=self.base_url
351
+ ).upload_traces(additional_metadata_keys=additional_metadata_keys)
352
+
353
+ return
354
+
355
+ elif self.tracer_type == "llamaindex":
356
+ from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
357
+ return LlamaIndexTracer(self._pass_user_data()).stop()
358
+ else:
359
+ super().stop()
360
+
361
+ def get_upload_status(self):
362
+ """Check the status of the trace upload."""
363
+ if self.tracer_type == "langchain":
364
+ if self._upload_task is None:
365
+ return "No upload task in progress."
366
+ if self._upload_task.done():
367
+ try:
368
+ result = self._upload_task.result()
369
+ return f"Upload completed: {result}"
370
+ except Exception as e:
371
+ return f"Upload failed: {str(e)}"
372
+ return "Upload in progress..."
373
+
374
+ def _run_async(self, coroutine):
375
+ """Run an asynchronous coroutine in a separate thread."""
376
+ loop = asyncio.new_event_loop()
377
+ with ThreadPoolExecutor() as executor:
378
+ future = executor.submit(lambda: loop.run_until_complete(coroutine))
379
+ return future
380
+
381
+ async def _upload_traces(self):
382
+ """
383
+ Asynchronously uploads traces to the RagaAICatalyst server.
384
+
385
+ This function uploads the traces generated by the RagaAICatalyst client to the RagaAICatalyst server. It uses the `aiohttp` library to make an asynchronous HTTP request to the server. The function first checks if the `RAGAAI_CATALYST_TOKEN` environment variable is set. If not, it raises a `ValueError` with the message "RAGAAI_CATALYST_TOKEN not found. Cannot upload traces.".
386
+
387
+ The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `upload_timeout` attribute of the `Tracer` object.
388
+
389
+ If the upload is successful, the function returns the string "Files uploaded successfully" if the `upload_stat` variable is truthy, otherwise it returns the string "No files to upload".
390
+
391
+ If the upload times out, the function returns a string with the message "Upload timed out after {self.upload_timeout} seconds".
392
+
393
+ If any other exception occurs during the upload, the function returns a string with the message "Upload failed: {str(e)}", where `{str(e)}` is the string representation of the exception.
394
+
395
+ Parameters:
396
+ None
397
+
398
+ Returns:
399
+ A string indicating the status of the upload.
400
+ """
401
+ async with aiohttp.ClientSession() as session:
402
+ if not os.getenv("RAGAAI_CATALYST_TOKEN"):
403
+ raise ValueError(
404
+ "RAGAAI_CATALYST_TOKEN not found. Cannot upload traces."
405
+ )
406
+
407
+ try:
408
+ upload_stat = await asyncio.wait_for(
409
+ self.raga_client.check_and_upload_files(
410
+ session=session,
411
+ file_paths=[self.filespanx.sync_file],
412
+ ),
413
+ timeout=self.upload_timeout,
414
+ )
415
+ return (
416
+ "Files uploaded successfully"
417
+ if upload_stat
418
+ else "No files to upload"
419
+ )
420
+ except asyncio.TimeoutError:
421
+ return f"Upload timed out after {self.upload_timeout} seconds"
422
+ except Exception as e:
423
+ return f"Upload failed: {str(e)}"
424
+
425
+ def _cleanup(self):
426
+ """
427
+ Cleans up the tracer by uninstrumenting the instrumentor, shutting down the tracer provider,
428
+ and resetting the instrumentation flag. This function is called when the tracer is no longer
429
+ needed.
430
+
431
+ Parameters:
432
+ self (Tracer): The Tracer instance.
433
+
434
+ Returns:
435
+ None
436
+ """
437
+ if self.is_instrumented:
438
+ try:
439
+ self._instrumentor().uninstrument()
440
+ self._tracer_provider.shutdown()
441
+ self.is_instrumented = False
442
+ print("Tracer provider shut down successfully")
443
+ except Exception as e:
444
+ logger.error(f"Error during tracer shutdown: {str(e)}")
445
+
446
+ # Reset instrumentation flag
447
+ self.is_instrumented = False
448
+ # Note: We're not resetting all attributes here to allow for upload status checking
449
+
450
+ def _pass_user_data(self):
451
+ user_detail = {
452
+ "project_name":self.project_name,
453
+ "project_id": self.project_id,
454
+ "dataset_name":self.dataset_name,
455
+ "trace_user_detail" : {
456
+ "project_id": self.project_id,
457
+ "trace_id": "",
458
+ "session_id": None,
459
+ "trace_type": self.tracer_type,
460
+ "traces": [],
461
+ "metadata": self.metadata,
462
+ "pipeline": {
463
+ "llm_model": (getattr(self, "pipeline", {}) or {}).get("llm_model", ""),
464
+ "vector_store": (getattr(self, "pipeline", {}) or {}).get("vector_store", ""),
465
+ "embed_model": (getattr(self, "pipeline", {}) or {}).get("embed_model", "")
466
+ }
467
+ }
468
+ }
469
+ return user_detail
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ragaai_catalyst
3
- Version: 2.1.5b6
3
+ Version: 2.1.5b7
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>
6
6
  Requires-Python: <3.13,>=3.9
@@ -15,6 +15,7 @@ ragaai_catalyst/tracers/__init__.py,sha256=LfgTes-nHpazssbGKnn8kyLZNr49kIPrlkrqq
15
15
  ragaai_catalyst/tracers/distributed.py,sha256=AIRvS5Ur4jbFDXsUkYuCTmtGoHHx3LOG4n5tWOh610U,10330
16
16
  ragaai_catalyst/tracers/langchain_callback.py,sha256=LvMBhgvAX8ftyBQ9Naeui46EoDa2nHQZq48Ra6nL-Qg,21991
17
17
  ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
18
+ ragaai_catalyst/tracers/tracer.py,sha256=bLgO3lQmoumo-JtqZFi4DUqhu9itM5GaLFNY7hmsI1g,20267
18
19
  ragaai_catalyst/tracers/upload_traces.py,sha256=2TWdRTN6FMaX-dqDv8BJWQS0xrCGYKkXEYOi2kK3Z3Y,5487
19
20
  ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
20
21
  ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
@@ -64,8 +65,8 @@ ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpa
64
65
  ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=ofrNrxf2b1hpjDh_zeaxiYq86azn1MF3kW8-ViYPEg0,1641
65
66
  ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py,sha256=cghjCuUe8w-2MZdh9xgtRGe3y219u26GGzpnuY4Wt6Q,3047
66
67
  ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
67
- ragaai_catalyst-2.1.5b6.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
68
- ragaai_catalyst-2.1.5b6.dist-info/METADATA,sha256=9wEYmF1UNiH1Py1WJeM_pf5qL71wfyzoXWvUsHtyj6Y,12764
69
- ragaai_catalyst-2.1.5b6.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
70
- ragaai_catalyst-2.1.5b6.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
71
- ragaai_catalyst-2.1.5b6.dist-info/RECORD,,
68
+ ragaai_catalyst-2.1.5b7.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
69
+ ragaai_catalyst-2.1.5b7.dist-info/METADATA,sha256=cVHEchxtHjkR_9AKHeDlqTpZqYNMSoF5gKR37D9icAY,12764
70
+ ragaai_catalyst-2.1.5b7.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
71
+ ragaai_catalyst-2.1.5b7.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
72
+ ragaai_catalyst-2.1.5b7.dist-info/RECORD,,