ragaai-catalyst 2.1.5b8__py3-none-any.whl → 2.1.5b10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,7 +14,8 @@
14
14
  "supports_audio_output": true,
15
15
  "supports_prompt_caching": true,
16
16
  "supports_response_schema": true,
17
- "supports_system_messages": true
17
+ "supports_system_messages": true,
18
+ "deprecation_date": "date when the model becomes deprecated in the format YYYY-MM-DD"
18
19
  },
19
20
  "omni-moderation-latest": {
20
21
  "max_tokens": 32768,
@@ -441,7 +442,8 @@
441
442
  "mode": "chat",
442
443
  "supports_function_calling": true,
443
444
  "supports_prompt_caching": true,
444
- "supports_system_messages": true
445
+ "supports_system_messages": true,
446
+ "deprecation_date": "2025-06-06"
445
447
  },
446
448
  "gpt-4-32k": {
447
449
  "max_tokens": 4096,
@@ -540,7 +542,8 @@
540
542
  "mode": "chat",
541
543
  "supports_vision": true,
542
544
  "supports_prompt_caching": true,
543
- "supports_system_messages": true
545
+ "supports_system_messages": true,
546
+ "deprecation_date": "2024-12-06"
544
547
  },
545
548
  "gpt-4-1106-vision-preview": {
546
549
  "max_tokens": 4096,
@@ -552,7 +555,8 @@
552
555
  "mode": "chat",
553
556
  "supports_vision": true,
554
557
  "supports_prompt_caching": true,
555
- "supports_system_messages": true
558
+ "supports_system_messages": true,
559
+ "deprecation_date": "2024-12-06"
556
560
  },
557
561
  "gpt-3.5-turbo": {
558
562
  "max_tokens": 4097,
@@ -1223,7 +1227,8 @@
1223
1227
  "litellm_provider": "azure",
1224
1228
  "mode": "chat",
1225
1229
  "supports_function_calling": true,
1226
- "supports_parallel_function_calling": true
1230
+ "supports_parallel_function_calling": true,
1231
+ "deprecation_date": "2025-03-31"
1227
1232
  },
1228
1233
  "azure/gpt-35-turbo-0613": {
1229
1234
  "max_tokens": 4097,
@@ -1234,7 +1239,8 @@
1234
1239
  "litellm_provider": "azure",
1235
1240
  "mode": "chat",
1236
1241
  "supports_function_calling": true,
1237
- "supports_parallel_function_calling": true
1242
+ "supports_parallel_function_calling": true,
1243
+ "deprecation_date": "2025-02-13"
1238
1244
  },
1239
1245
  "azure/gpt-35-turbo-0301": {
1240
1246
  "max_tokens": 4097,
@@ -1245,7 +1251,8 @@
1245
1251
  "litellm_provider": "azure",
1246
1252
  "mode": "chat",
1247
1253
  "supports_function_calling": true,
1248
- "supports_parallel_function_calling": true
1254
+ "supports_parallel_function_calling": true,
1255
+ "deprecation_date": "2025-02-13"
1249
1256
  },
1250
1257
  "azure/gpt-35-turbo-0125": {
1251
1258
  "max_tokens": 4096,
@@ -1256,7 +1263,8 @@
1256
1263
  "litellm_provider": "azure",
1257
1264
  "mode": "chat",
1258
1265
  "supports_function_calling": true,
1259
- "supports_parallel_function_calling": true
1266
+ "supports_parallel_function_calling": true,
1267
+ "deprecation_date": "2025-03-31"
1260
1268
  },
1261
1269
  "azure/gpt-35-turbo-16k": {
1262
1270
  "max_tokens": 4096,
@@ -2042,6 +2050,84 @@
2042
2050
  "supports_function_calling": true,
2043
2051
  "supports_vision": true
2044
2052
  },
2053
+ "xai/grok-2-vision-1212": {
2054
+ "max_tokens": 32768,
2055
+ "max_input_tokens": 32768,
2056
+ "max_output_tokens": 32768,
2057
+ "input_cost_per_token": 2e-06,
2058
+ "input_cost_per_image": 2e-06,
2059
+ "output_cost_per_token": 1e-05,
2060
+ "litellm_provider": "xai",
2061
+ "mode": "chat",
2062
+ "supports_function_calling": true,
2063
+ "supports_vision": true
2064
+ },
2065
+ "xai/grok-2-vision-latest": {
2066
+ "max_tokens": 32768,
2067
+ "max_input_tokens": 32768,
2068
+ "max_output_tokens": 32768,
2069
+ "input_cost_per_token": 2e-06,
2070
+ "input_cost_per_image": 2e-06,
2071
+ "output_cost_per_token": 1e-05,
2072
+ "litellm_provider": "xai",
2073
+ "mode": "chat",
2074
+ "supports_function_calling": true,
2075
+ "supports_vision": true
2076
+ },
2077
+ "xai/grok-2-vision": {
2078
+ "max_tokens": 32768,
2079
+ "max_input_tokens": 32768,
2080
+ "max_output_tokens": 32768,
2081
+ "input_cost_per_token": 2e-06,
2082
+ "input_cost_per_image": 2e-06,
2083
+ "output_cost_per_token": 1e-05,
2084
+ "litellm_provider": "xai",
2085
+ "mode": "chat",
2086
+ "supports_function_calling": true,
2087
+ "supports_vision": true
2088
+ },
2089
+ "xai/grok-vision-beta": {
2090
+ "max_tokens": 8192,
2091
+ "max_input_tokens": 8192,
2092
+ "max_output_tokens": 8192,
2093
+ "input_cost_per_token": 5e-06,
2094
+ "input_cost_per_image": 5e-06,
2095
+ "output_cost_per_token": 1.5e-05,
2096
+ "litellm_provider": "xai",
2097
+ "mode": "chat",
2098
+ "supports_function_calling": true,
2099
+ "supports_vision": true
2100
+ },
2101
+ "xai/grok-2-1212": {
2102
+ "max_tokens": 131072,
2103
+ "max_input_tokens": 131072,
2104
+ "max_output_tokens": 131072,
2105
+ "input_cost_per_token": 2e-06,
2106
+ "output_cost_per_token": 1e-05,
2107
+ "litellm_provider": "xai",
2108
+ "mode": "chat",
2109
+ "supports_function_calling": true
2110
+ },
2111
+ "xai/grok-2": {
2112
+ "max_tokens": 131072,
2113
+ "max_input_tokens": 131072,
2114
+ "max_output_tokens": 131072,
2115
+ "input_cost_per_token": 2e-06,
2116
+ "output_cost_per_token": 1e-05,
2117
+ "litellm_provider": "xai",
2118
+ "mode": "chat",
2119
+ "supports_function_calling": true
2120
+ },
2121
+ "xai/grok-2-latest": {
2122
+ "max_tokens": 131072,
2123
+ "max_input_tokens": 131072,
2124
+ "max_output_tokens": 131072,
2125
+ "input_cost_per_token": 2e-06,
2126
+ "output_cost_per_token": 1e-05,
2127
+ "litellm_provider": "xai",
2128
+ "mode": "chat",
2129
+ "supports_function_calling": true
2130
+ },
2045
2131
  "deepseek/deepseek-coder": {
2046
2132
  "max_tokens": 4096,
2047
2133
  "max_input_tokens": 128000,
@@ -2353,7 +2439,8 @@
2353
2439
  "tool_use_system_prompt_tokens": 264,
2354
2440
  "supports_assistant_prefill": true,
2355
2441
  "supports_prompt_caching": true,
2356
- "supports_response_schema": true
2442
+ "supports_response_schema": true,
2443
+ "deprecation_date": "2025-03-01"
2357
2444
  },
2358
2445
  "claude-3-5-haiku-20241022": {
2359
2446
  "max_tokens": 8192,
@@ -2369,7 +2456,8 @@
2369
2456
  "tool_use_system_prompt_tokens": 264,
2370
2457
  "supports_assistant_prefill": true,
2371
2458
  "supports_prompt_caching": true,
2372
- "supports_response_schema": true
2459
+ "supports_response_schema": true,
2460
+ "deprecation_date": "2025-10-01"
2373
2461
  },
2374
2462
  "claude-3-opus-20240229": {
2375
2463
  "max_tokens": 4096,
@@ -2386,7 +2474,8 @@
2386
2474
  "tool_use_system_prompt_tokens": 395,
2387
2475
  "supports_assistant_prefill": true,
2388
2476
  "supports_prompt_caching": true,
2389
- "supports_response_schema": true
2477
+ "supports_response_schema": true,
2478
+ "deprecation_date": "2025-03-01"
2390
2479
  },
2391
2480
  "claude-3-sonnet-20240229": {
2392
2481
  "max_tokens": 4096,
@@ -2401,7 +2490,8 @@
2401
2490
  "tool_use_system_prompt_tokens": 159,
2402
2491
  "supports_assistant_prefill": true,
2403
2492
  "supports_prompt_caching": true,
2404
- "supports_response_schema": true
2493
+ "supports_response_schema": true,
2494
+ "deprecation_date": "2025-07-21"
2405
2495
  },
2406
2496
  "claude-3-5-sonnet-20240620": {
2407
2497
  "max_tokens": 8192,
@@ -2418,7 +2508,8 @@
2418
2508
  "tool_use_system_prompt_tokens": 159,
2419
2509
  "supports_assistant_prefill": true,
2420
2510
  "supports_prompt_caching": true,
2421
- "supports_response_schema": true
2511
+ "supports_response_schema": true,
2512
+ "deprecation_date": "2025-06-01"
2422
2513
  },
2423
2514
  "claude-3-5-sonnet-20241022": {
2424
2515
  "max_tokens": 8192,
@@ -2436,7 +2527,8 @@
2436
2527
  "supports_assistant_prefill": true,
2437
2528
  "supports_pdf_input": true,
2438
2529
  "supports_prompt_caching": true,
2439
- "supports_response_schema": true
2530
+ "supports_response_schema": true,
2531
+ "deprecation_date": "2025-10-01"
2440
2532
  },
2441
2533
  "text-bison": {
2442
2534
  "max_tokens": 2048,
@@ -2546,7 +2638,8 @@
2546
2638
  "output_cost_per_character": 5e-07,
2547
2639
  "litellm_provider": "vertex_ai-chat-models",
2548
2640
  "mode": "chat",
2549
- "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
2641
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
2642
+ "deprecation_date": "2025-04-09"
2550
2643
  },
2551
2644
  "chat-bison-32k": {
2552
2645
  "max_tokens": 8192,
@@ -2787,7 +2880,8 @@
2787
2880
  "litellm_provider": "vertex_ai-language-models",
2788
2881
  "mode": "chat",
2789
2882
  "supports_function_calling": true,
2790
- "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
2883
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
2884
+ "deprecation_date": "2025-04-09"
2791
2885
  },
2792
2886
  "gemini-1.0-ultra": {
2793
2887
  "max_tokens": 8192,
@@ -2832,7 +2926,8 @@
2832
2926
  "litellm_provider": "vertex_ai-language-models",
2833
2927
  "mode": "chat",
2834
2928
  "supports_function_calling": true,
2835
- "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
2929
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
2930
+ "deprecation_date": "2025-04-09"
2836
2931
  },
2837
2932
  "gemini-1.5-pro": {
2838
2933
  "max_tokens": 8192,
@@ -2914,7 +3009,8 @@
2914
3009
  "supports_function_calling": true,
2915
3010
  "supports_tool_choice": true,
2916
3011
  "supports_response_schema": true,
2917
- "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
3012
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
3013
+ "deprecation_date": "2025-05-24"
2918
3014
  },
2919
3015
  "gemini-1.5-pro-preview-0514": {
2920
3016
  "max_tokens": 8192,
@@ -3119,7 +3215,8 @@
3119
3215
  "supports_function_calling": true,
3120
3216
  "supports_vision": true,
3121
3217
  "supports_response_schema": true,
3122
- "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
3218
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
3219
+ "deprecation_date": "2025-05-24"
3123
3220
  },
3124
3221
  "gemini-1.5-flash-preview-0514": {
3125
3222
  "max_tokens": 8192,
@@ -3223,7 +3320,8 @@
3223
3320
  "mode": "chat",
3224
3321
  "supports_function_calling": true,
3225
3322
  "supports_vision": true,
3226
- "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
3323
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
3324
+ "deprecation_date": "2025-04-09"
3227
3325
  },
3228
3326
  "medlm-medium": {
3229
3327
  "max_tokens": 8192,
@@ -3929,7 +4027,8 @@
3929
4027
  "supports_prompt_caching": true,
3930
4028
  "tpm": 4000000,
3931
4029
  "rpm": 2000,
3932
- "source": "https://ai.google.dev/pricing"
4030
+ "source": "https://ai.google.dev/pricing",
4031
+ "deprecation_date": "2025-05-24"
3933
4032
  },
3934
4033
  "gemini/gemini-1.5-flash": {
3935
4034
  "max_tokens": 8192,
@@ -4205,7 +4304,8 @@
4205
4304
  "supports_prompt_caching": true,
4206
4305
  "tpm": 4000000,
4207
4306
  "rpm": 1000,
4208
- "source": "https://ai.google.dev/pricing"
4307
+ "source": "https://ai.google.dev/pricing",
4308
+ "deprecation_date": "2025-05-24"
4209
4309
  },
4210
4310
  "gemini/gemini-1.5-pro-exp-0801": {
4211
4311
  "max_tokens": 8192,
@@ -7341,7 +7441,8 @@
7341
7441
  "input_cost_per_token": 5e-06,
7342
7442
  "output_cost_per_token": 5e-06,
7343
7443
  "litellm_provider": "perplexity",
7344
- "mode": "chat"
7444
+ "mode": "chat",
7445
+ "deprecation_date": "2025-02-22"
7345
7446
  },
7346
7447
  "perplexity/llama-3.1-sonar-large-128k-online": {
7347
7448
  "max_tokens": 127072,
@@ -7350,7 +7451,8 @@
7350
7451
  "input_cost_per_token": 1e-06,
7351
7452
  "output_cost_per_token": 1e-06,
7352
7453
  "litellm_provider": "perplexity",
7353
- "mode": "chat"
7454
+ "mode": "chat",
7455
+ "deprecation_date": "2025-02-22"
7354
7456
  },
7355
7457
  "perplexity/llama-3.1-sonar-large-128k-chat": {
7356
7458
  "max_tokens": 131072,
@@ -7359,7 +7461,8 @@
7359
7461
  "input_cost_per_token": 1e-06,
7360
7462
  "output_cost_per_token": 1e-06,
7361
7463
  "litellm_provider": "perplexity",
7362
- "mode": "chat"
7464
+ "mode": "chat",
7465
+ "deprecation_date": "2025-02-22"
7363
7466
  },
7364
7467
  "perplexity/llama-3.1-sonar-small-128k-chat": {
7365
7468
  "max_tokens": 131072,
@@ -7368,7 +7471,8 @@
7368
7471
  "input_cost_per_token": 2e-07,
7369
7472
  "output_cost_per_token": 2e-07,
7370
7473
  "litellm_provider": "perplexity",
7371
- "mode": "chat"
7474
+ "mode": "chat",
7475
+ "deprecation_date": "2025-02-22"
7372
7476
  },
7373
7477
  "perplexity/llama-3.1-sonar-small-128k-online": {
7374
7478
  "max_tokens": 127072,
@@ -7377,7 +7481,8 @@
7377
7481
  "input_cost_per_token": 2e-07,
7378
7482
  "output_cost_per_token": 2e-07,
7379
7483
  "litellm_provider": "perplexity",
7380
- "mode": "chat"
7484
+ "mode": "chat",
7485
+ "deprecation_date": "2025-02-22"
7381
7486
  },
7382
7487
  "perplexity/pplx-7b-chat": {
7383
7488
  "max_tokens": 8192,
@@ -175,10 +175,12 @@ class LangchainTracer(BaseCallbackHandler):
175
175
  kwargs_copy['callbacks'].append(self)
176
176
 
177
177
  # Store model name if available
178
- if component_name in ["OpenAI", "ChatOpenAI_LangchainOpenAI", "ChatOpenAI_ChatModels",
179
- "ChatVertexAI", "ChatGoogleGenerativeAI", "ChatAnthropic", "ChatLiteLLM"]:
178
+ if component_name in ["OpenAI", "ChatOpenAI_LangchainOpenAI", "ChatOpenAI_ChatModels",
179
+ "ChatVertexAI", "VertexAI", "ChatGoogleGenerativeAI", "ChatAnthropic",
180
+ "ChatLiteLLM", "ChatBedrock"]:
180
181
  instance = args[0] if args else None
181
- model_name = kwargs.get('model_name') or kwargs.get('model')
182
+ model_name = kwargs.get('model_name') or kwargs.get('model') or kwargs.get('model_id')
183
+
182
184
  if instance and model_name:
183
185
  self.model_names[id(instance)] = model_name
184
186
 
@@ -217,12 +219,19 @@ class LangchainTracer(BaseCallbackHandler):
217
219
  components_to_patch["OpenAI"] = (OpenAI, "__init__")
218
220
  except ImportError:
219
221
  logger.debug("OpenAI not available for patching")
222
+
223
+ try:
224
+ from langchain_aws import ChatBedrock
225
+ components_to_patch["ChatBedrock"] = (ChatBedrock, "__init__")
226
+ except ImportError:
227
+ logger.debug("ChatBedrock not available for patching")
220
228
 
221
229
  try:
222
- from langchain_google_vertexai import ChatVertexAI
230
+ from langchain_google_vertexai import ChatVertexAI, VertexAI
223
231
  components_to_patch["ChatVertexAI"] = (ChatVertexAI, "__init__")
232
+ components_to_patch["VertexAI"] = (VertexAI, "__init__")
224
233
  except ImportError:
225
- logger.debug("ChatVertexAI not available for patching")
234
+ logger.debug("ChatVertexAI/VertexAI not available for patching")
226
235
 
227
236
  try:
228
237
  from langchain_google_genai import ChatGoogleGenerativeAI
@@ -298,6 +307,9 @@ class LangchainTracer(BaseCallbackHandler):
298
307
  elif name == "ChatVertexAI":
299
308
  from langchain_google_vertexai import ChatVertexAI
300
309
  imported_components[name] = ChatVertexAI
310
+ elif name == "VertexAI":
311
+ from langchain_google_vertexai import VertexAI
312
+ imported_components[name] = VertexAI
301
313
  elif name == "ChatGoogleGenerativeAI":
302
314
  from langchain_google_genai import ChatGoogleGenerativeAI
303
315
  imported_components[name] = ChatGoogleGenerativeAI
@@ -487,9 +499,16 @@ class LangchainTracer(BaseCallbackHandler):
487
499
  total_tokens = prompt_tokens + completion_tokens
488
500
  except Exception as e:
489
501
  # logger.debug(f"Error getting usage data: {e}")
490
- prompt_tokens = 0
491
- completion_tokens = 0
492
- total_tokens = 0
502
+ try:
503
+ usage_data = response.generations[0][0].generation_info['usage_metadata']
504
+ prompt_tokens = usage_data.get("prompt_token_count", 0)
505
+ completion_tokens = usage_data.get("candidates_token_count", 0)
506
+ total_tokens = prompt_tokens + completion_tokens
507
+ except Exception as e:
508
+ # logger.debug(f"Error getting token usage: {e}")
509
+ prompt_tokens = 0
510
+ completion_tokens = 0
511
+ total_tokens = 0
493
512
 
494
513
  # If no model name in llm_output, try to get it from stored model names
495
514
  try:
@@ -497,7 +516,7 @@ class LangchainTracer(BaseCallbackHandler):
497
516
  model = list(self.model_names.values())[0]
498
517
  except Exception as e:
499
518
  model=""
500
-
519
+
501
520
  self.additional_metadata = {
502
521
  'latency': latency,
503
522
  'model_name': model,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ragaai_catalyst
3
- Version: 2.1.5b8
3
+ Version: 2.1.5b10
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>
6
6
  Requires-Python: <3.13,>=3.9
@@ -13,7 +13,7 @@ ragaai_catalyst/synthetic_data_generation.py,sha256=uDV9tNwto2xSkWg5XHXUvjErW-4P
13
13
  ragaai_catalyst/utils.py,sha256=TlhEFwLyRU690HvANbyoRycR3nQ67lxVUQoUOfTPYQ0,3772
14
14
  ragaai_catalyst/tracers/__init__.py,sha256=LfgTes-nHpazssbGKnn8kyLZNr49kIPrlkrqqoTFTfc,301
15
15
  ragaai_catalyst/tracers/distributed.py,sha256=AIRvS5Ur4jbFDXsUkYuCTmtGoHHx3LOG4n5tWOh610U,10330
16
- ragaai_catalyst/tracers/langchain_callback.py,sha256=lLeED0Eg2kT4-_O9IUw3pAyi_Hm4AaX57VfeSiOwaUw,28134
16
+ ragaai_catalyst/tracers/langchain_callback.py,sha256=t5s9JD6HdzJRnAd9Zp8qLppCY6azz11hqHAykkNq9zw,29204
17
17
  ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
18
18
  ragaai_catalyst/tracers/tracer.py,sha256=Yq2HhgT4785t9573kksJ7ngM3qCLPgZbZ0IpgOHdTTo,19223
19
19
  ragaai_catalyst/tracers/upload_traces.py,sha256=2TWdRTN6FMaX-dqDv8BJWQS0xrCGYKkXEYOi2kK3Z3Y,5487
@@ -48,7 +48,7 @@ ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py,sha256=515NND
48
48
  ragaai_catalyst/tracers/agentic_tracing/utils/generic.py,sha256=WwXT01xmp8MSr7KinuDCSK9a1ifpLcT7ajFkvYviG_A,1190
49
49
  ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py,sha256=vPZ4dn4EHFW0kqd1GyRpsYXbfrRrd0DXCmh-pzsDBNE,1109
50
50
  ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py,sha256=wlXCuaRe81s-7FWdJ_MquXFGRZZfNrZxLIIxl-Ohbqk,15541
51
- ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json,sha256=E_uKa1SSrigaorCiAShZr4inKNMc54jcEy4B_7pT4DA,295002
51
+ ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json,sha256=kQwC8AYTfJCqPm1F_heR7FoEhIpEZgBRWvkHRncfhzU,298689
52
52
  ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py,sha256=MqeRNGxzeuh9qTK0NbYMftl9V9Z0V7gMgBoHkrXP56k,1592
53
53
  ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py,sha256=H8WNsk4v_5T6OUw4TFOzlDLjQhJwjh1nAMyMAoqMEi4,6946
54
54
  ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=RciiDdo2riibEoM8X0FKHaXi78y3bWwNkV8U0leqigk,3508
@@ -65,8 +65,8 @@ ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpa
65
65
  ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=ofrNrxf2b1hpjDh_zeaxiYq86azn1MF3kW8-ViYPEg0,1641
66
66
  ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py,sha256=cghjCuUe8w-2MZdh9xgtRGe3y219u26GGzpnuY4Wt6Q,3047
67
67
  ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
68
- ragaai_catalyst-2.1.5b8.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
69
- ragaai_catalyst-2.1.5b8.dist-info/METADATA,sha256=OaiEW7uA1wnQO562QbKGgtlZuue1PTGTjK9-AW5gkLQ,12764
70
- ragaai_catalyst-2.1.5b8.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
71
- ragaai_catalyst-2.1.5b8.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
72
- ragaai_catalyst-2.1.5b8.dist-info/RECORD,,
68
+ ragaai_catalyst-2.1.5b10.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
69
+ ragaai_catalyst-2.1.5b10.dist-info/METADATA,sha256=DVGyoXWxgwks82eTOMYm_eNKxwcIvvbnuiRMA_f5EkU,12765
70
+ ragaai_catalyst-2.1.5b10.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
71
+ ragaai_catalyst-2.1.5b10.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
72
+ ragaai_catalyst-2.1.5b10.dist-info/RECORD,,