ragaai-catalyst 2.2.4.1b3__tar.gz → 2.2.4.1b5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (221) hide show
  1. {ragaai_catalyst-2.2.4.1b3/ragaai_catalyst.egg-info → ragaai_catalyst-2.2.4.1b5}/PKG-INFO +1 -1
  2. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/pyproject.toml +1 -1
  3. ragaai_catalyst-2.2.4.1b5/ragaai_catalyst/tracers/agentic_tracing/upload/session_manager.py +124 -0
  4. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +122 -34
  5. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +45 -48
  6. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +79 -42
  7. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +15 -40
  8. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/tracer.py +1 -23
  9. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5/ragaai_catalyst.egg-info}/PKG-INFO +1 -1
  10. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst.egg-info/SOURCES.txt +1 -0
  11. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  12. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  13. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
  14. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/.github/workflows/ci.yml +0 -0
  15. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/.gitignore +0 -0
  16. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/.gitmodules +0 -0
  17. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/LICENSE +0 -0
  18. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/Quickstart.md +0 -0
  19. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/README.md +0 -0
  20. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/agentic_tracing.md +0 -0
  21. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/dataset_management.md +0 -0
  22. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/autheticate.gif +0 -0
  23. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/create_project.gif +0 -0
  24. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/custom_metrics.png +0 -0
  25. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/dataset.gif +0 -0
  26. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/dataset.png +0 -0
  27. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/evaluation.gif +0 -0
  28. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/evaluation.png +0 -0
  29. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/guardrails.png +0 -0
  30. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/last_main.png +0 -0
  31. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/main.png +0 -0
  32. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/projects_new.png +0 -0
  33. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/img/trace_comp.png +0 -0
  34. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/prompt_management.md +0 -0
  35. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/docs/trace_management.md +0 -0
  36. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/all_llm_provider/all_llm_provider.py +0 -0
  37. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/all_llm_provider/config.py +0 -0
  38. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/all_llm_provider/run_all_llm_provider.py +0 -0
  39. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/crewai/scifi_writer/README.md +0 -0
  40. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/crewai/scifi_writer/requirements.txt +0 -0
  41. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/crewai/scifi_writer/sample.env +0 -0
  42. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/crewai/scifi_writer/scifi_writer.py +0 -0
  43. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/custom_agents/travel_agent/agents.py +0 -0
  44. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/custom_agents/travel_agent/config.py +0 -0
  45. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/custom_agents/travel_agent/main.py +0 -0
  46. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/custom_agents/travel_agent/tools.py +0 -0
  47. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/haystack/news_fetching/README.md +0 -0
  48. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/haystack/news_fetching/news_fetching.py +0 -0
  49. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/haystack/news_fetching/requirements.txt +0 -0
  50. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
  51. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
  52. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
  53. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
  54. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langchain/medical_rag/requirements.txt +0 -0
  55. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langchain/medical_rag/sample.env +0 -0
  56. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langgraph/personal_research_assistant/README.md +0 -0
  57. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langgraph/personal_research_assistant/requirements.txt +0 -0
  58. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
  59. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/langgraph/personal_research_assistant/sample.env +0 -0
  60. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
  61. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
  62. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
  63. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
  64. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/llamaindex_examples/legal_research_rag/requirements.txt +0 -0
  65. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/llamaindex_examples/legal_research_rag/sample.env +0 -0
  66. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/email_data_extraction_agent/README.md +0 -0
  67. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/email_data_extraction_agent/data_extraction_email.py +0 -0
  68. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/email_data_extraction_agent/requirements.txt +0 -0
  69. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/email_data_extraction_agent/sample.env +0 -0
  70. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/youtube_summary_agent/README.md +0 -0
  71. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/youtube_summary_agent/requirements.txt +0 -0
  72. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/youtube_summary_agent/sample.env +0 -0
  73. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/openai_agents_sdk/youtube_summary_agent/youtube_summary_agent.py +0 -0
  74. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/pii_masking_example/llamaindex_agentic_fastapi/app.py +0 -0
  75. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/pii_masking_example/llamaindex_agentic_fastapi/app_presidio.py +0 -0
  76. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/pii_masking_example/llamaindex_agentic_fastapi/request.py +0 -0
  77. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/pii_masking_example/llamaindex_agentic_fastapi/requirements.txt +0 -0
  78. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/smolagents/most_upvoted_paper/README.md +0 -0
  79. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
  80. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/smolagents/most_upvoted_paper/requirements.txt +0 -0
  81. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/examples/smolagents/most_upvoted_paper/sample.env +0 -0
  82. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/__init__.py +0 -0
  83. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/_version.py +0 -0
  84. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/dataset.py +0 -0
  85. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/evaluation.py +0 -0
  86. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/experiment.py +0 -0
  87. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/guard_executor.py +0 -0
  88. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/guardrails_manager.py +0 -0
  89. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/internal_api_completion.py +0 -0
  90. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/prompt_manager.py +0 -0
  91. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/proxy_call.py +0 -0
  92. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/ragaai_catalyst.py +0 -0
  93. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/__init__.py +0 -0
  94. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/config/detectors.toml +0 -0
  95. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/data_generator/scenario_generator.py +0 -0
  96. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/data_generator/test_case_generator.py +0 -0
  97. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/evaluator.py +0 -0
  98. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/llm_generator.py +0 -0
  99. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/llm_generator_old.py +0 -0
  100. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/red_teaming.py +0 -0
  101. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/requirements.txt +0 -0
  102. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/tests/grok.ipynb +0 -0
  103. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/tests/stereotype.ipynb +0 -0
  104. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/upload_result.py +0 -0
  105. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/utils/issue_description.py +0 -0
  106. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming/utils/rt.png +0 -0
  107. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/redteaming_old.py +0 -0
  108. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/synthetic_data_generation.py +0 -0
  109. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/__init__.py +0 -0
  110. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/README.md +0 -0
  111. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/__init__.py +0 -0
  112. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/data/__init__.py +0 -0
  113. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py +0 -0
  114. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -0
  115. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -0
  116. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -0
  117. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
  118. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -0
  119. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -0
  120. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py +0 -0
  121. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -0
  122. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +0 -0
  123. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -0
  124. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
  125. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -0
  126. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +0 -0
  127. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -0
  128. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -0
  129. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -0
  130. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py +0 -0
  131. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -0
  132. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -0
  133. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -0
  134. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +0 -0
  135. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py +0 -0
  136. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -0
  137. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -0
  138. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +0 -0
  139. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +0 -0
  140. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -0
  141. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -0
  142. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py +0 -0
  143. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +0 -0
  144. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py +0 -0
  145. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py +0 -0
  146. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/distributed.py +0 -0
  147. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/exporters/__init__.py +0 -0
  148. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +0 -0
  149. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -0
  150. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -0
  151. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/instrumentators/__init__.py +0 -0
  152. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/langchain_callback.py +0 -0
  153. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/llamaindex_callback.py +0 -0
  154. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -0
  155. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/upload_traces.py +0 -0
  156. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/__init__.py +0 -0
  157. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -0
  158. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -0
  159. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -0
  160. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -0
  161. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json +0 -0
  162. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py +0 -0
  163. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/rag_trace_json_converter.py +0 -0
  164. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/trace_json_converter.py +0 -0
  165. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/tracers/utils/utils.py +0 -0
  166. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst/utils.py +0 -0
  167. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst.egg-info/dependency_links.txt +0 -0
  168. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst.egg-info/requires.txt +0 -0
  169. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/ragaai_catalyst.egg-info/top_level.txt +0 -0
  170. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/requirements.txt +0 -0
  171. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/setup.cfg +0 -0
  172. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/test_report_20250407_183101.txt +0 -0
  173. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/README.md +0 -0
  174. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/environment.yml +0 -0
  175. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/__init__.py +0 -0
  176. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/all_llm_provider/all_llm_provider.py +0 -0
  177. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/all_llm_provider/config.py +0 -0
  178. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/all_llm_provider/test_all_llm_provider.py +0 -0
  179. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/crewai/scifi_writer/sci_fi_story.md +0 -0
  180. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/crewai/scifi_writer/scifi_writer.py +0 -0
  181. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/crewai/scifi_writer/test_scifi_writer.py +0 -0
  182. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/custom_agents/travel_agent/agents.py +0 -0
  183. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/custom_agents/travel_agent/config.py +0 -0
  184. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/custom_agents/travel_agent/main.py +0 -0
  185. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/custom_agents/travel_agent/test_travel_agent.py +0 -0
  186. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/custom_agents/travel_agent/tools.py +0 -0
  187. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/haystack/news_fetching/news_fetching.py +0 -0
  188. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/haystack/news_fetching/test_news_fetching.py +0 -0
  189. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
  190. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
  191. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
  192. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
  193. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/langchain/medical_rag/test_diagnosis_agent.py +0 -0
  194. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
  195. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/langgraph/personal_research_assistant/test_research_assistant.py +0 -0
  196. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
  197. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
  198. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
  199. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
  200. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/llamaindex_examples/legal_research_rag/test_legal_rag.py +0 -0
  201. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
  202. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/smolagents/most_upvoted_paper/paper.pdf +0 -0
  203. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/smolagents/most_upvoted_paper/test_most_upvoted_paper.py +0 -0
  204. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/test_utils/get_components.py +0 -0
  205. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/examples/test_utils/get_trace_data.py +0 -0
  206. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/run_pytest_and_print_and_save_results.py +0 -0
  207. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/table_result.png +0 -0
  208. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_base_tracer_add_metrics.py +0 -0
  209. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_base_tracer_metrics.py +0 -0
  210. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_data/util_synthetic_data_doc.pdf +0 -0
  211. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_data/util_synthetic_data_invalid.csv +0 -0
  212. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_data/util_synthetic_data_valid.csv +0 -0
  213. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_data/util_test_dataset.csv +0 -0
  214. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_data/util_test_langchain_tracing.pdf +0 -0
  215. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_dataset.py +0 -0
  216. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_evaluation.py +0 -0
  217. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_evaluation_metrics.py +0 -0
  218. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_prompt_manager.py +0 -0
  219. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_synthetic_data_generation.py +0 -0
  220. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests/test_catalyst/test_the_configuration.py +0 -0
  221. {ragaai_catalyst-2.2.4.1b3 → ragaai_catalyst-2.2.4.1b5}/tests_requirements.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ragaai_catalyst
3
- Version: 2.2.4.1b3
3
+ Version: 2.2.4.1b5
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
6
6
  Requires-Python: <=3.13.2,>=3.10
@@ -8,7 +8,7 @@ description = "RAGA AI CATALYST"
8
8
  readme = "README.md"
9
9
  requires-python = ">=3.10,<=3.13.2"
10
10
  # license = {file = "LICENSE"}
11
- version = "2.2.4.1.beta.3"
11
+ version = "2.2.4.1.beta.5"
12
12
  authors = [
13
13
  {name = "Kiran Scaria", email = "kiran.scaria@raga.ai"},
14
14
  {name = "Kedar Gaikwad", email = "kedar.gaikwad@raga.ai"},
@@ -0,0 +1,124 @@
1
+ import logging
2
+ import os
3
+ import threading
4
+
5
+ from requests.adapters import HTTPAdapter
6
+ from urllib3.util.retry import Retry
7
+ from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
8
+ from requests.exceptions import ConnectionError, Timeout
9
+ from ragaai_catalyst import RagaAICatalyst
10
+ import requests
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class SessionManager:
16
+ """Shared session manager with connection pooling for HTTP requests"""
17
+ _instance = None
18
+ _session = None
19
+ _lock = threading.Lock()
20
+
21
+ def __new__(cls):
22
+ if cls._instance is None:
23
+ with cls._lock: # Thread-safe singleton
24
+ if cls._instance is None: # Double-check locking
25
+ logger.info("Creating new SessionManager singleton instance")
26
+ cls._instance = super(SessionManager, cls).__new__(cls)
27
+ cls._instance._initialize_session()
28
+ else:
29
+ logger.debug("SessionManager instance already exists, returning existing instance")
30
+ else:
31
+ logger.debug("SessionManager instance exists, returning existing instance")
32
+ return cls._instance
33
+
34
+ def _initialize_session(self):
35
+ """Initialize session with connection pooling and retry strategy"""
36
+ logger.info("Initializing HTTP session with connection pooling and retry strategy")
37
+ self._session = requests.Session()
38
+
39
+ retry_strategy = Retry(
40
+ total=3, # number of retries
41
+ backoff_factor=0.5, # wait 0.5, 1, 2... seconds between retries
42
+ status_forcelist=[500, 502, 503, 504] # HTTP status codes to retry on
43
+ )
44
+
45
+ adapter = HTTPAdapter(
46
+ max_retries=retry_strategy,
47
+ pool_connections=5, # number of connection pools to cache (per host)
48
+ pool_maxsize=50, # maximum number of connections in each pool
49
+ pool_block=True # Block/wait when pool is full rather than raising error
50
+ )
51
+
52
+ self._session.mount("http://", adapter)
53
+ self._session.mount("https://", adapter)
54
+
55
+ logger.info("HTTP session initialized successfully with adapters mounted for http:// and https://")
56
+
57
+ # Warm up connection pool using RagaAICatalyst.BASE_URL
58
+ if os.getenv("RAGAAI_CATALYST_BASE_URL") is not None:
59
+ base_url = os.getenv("RAGAAI_CATALYST_BASE_URL")
60
+ logger.info(f"Warming up connection pool using RagaAICatalyst.BASE_URL: {base_url}")
61
+ self.warm_up_connections(base_url)
62
+ else:
63
+ logger.warning(f"RAGAAI_CATALYST_BASE_URL {base_url} not available, skipping connection warmup")
64
+
65
+ @property
66
+ def session(self):
67
+ if self._session is None:
68
+ logger.warning("Session accessed but not initialized, reinitializing...")
69
+ self._initialize_session()
70
+ return self._session
71
+
72
+ def warm_up_connections(self, base_url, num_connections=3):
73
+ """
74
+ Warm up the connection pool by making lightweight requests to healthcheck endpoint.
75
+ This can help prevent RemoteDisconnected errors on initial requests.
76
+ """
77
+ if not self._session:
78
+ return
79
+
80
+ # Construct healthcheck URL
81
+ healthcheck_url = f"{base_url.rstrip('/')}/healthcheck"
82
+ logger.info(f"Warming up connection pool with {num_connections} connections to {healthcheck_url}")
83
+
84
+ for i in range(num_connections):
85
+ try:
86
+ # Make a lightweight HEAD request to the healthcheck endpoint to warm up the connection
87
+ response = self._session.head(healthcheck_url, timeout=5)
88
+ logger.info(f"Warmup connection {i+1}: Status {response.status_code}")
89
+ except Exception as e:
90
+ logger.error(f"Warmup connection {i+1} failed (this is normal): {e}")
91
+ # Ignore failures during warmup as they're expected
92
+ continue
93
+
94
+ logger.info("Connection pool warmup completed")
95
+
96
+ def close(self):
97
+ """Close the session"""
98
+ if self._session:
99
+ logger.info("Closing HTTP session")
100
+ self._session.close()
101
+ self._session = None
102
+ logger.info("HTTP session closed successfully")
103
+ else:
104
+ logger.debug("Close called but session was already None")
105
+
106
+ def handle_request_exceptions(self, e, operation_name):
107
+ """Handle common request exceptions with appropriate logging"""
108
+ logger.error(f"Exception occurred during {operation_name}")
109
+ if isinstance(e, (PoolError, MaxRetryError)):
110
+ logger.error(f"Connection pool exhausted during {operation_name}: {e}")
111
+ elif isinstance(e, NewConnectionError):
112
+ logger.error(f"Failed to establish new connection during {operation_name}: {e}")
113
+ elif isinstance(e, ConnectionError):
114
+ logger.error(f"Connection error during {operation_name}: {e}")
115
+ elif isinstance(e, Timeout):
116
+ logger.error(f"Request timeout during {operation_name}: {e}")
117
+ else:
118
+ logger.error(f"Unexpected error during {operation_name}: {e}")
119
+
120
+
121
+ # Global session manager instance
122
+ logger.info("Creating global SessionManager instance")
123
+ session_manager = SessionManager()
124
+ logger.info(f"Global SessionManager instance created with ID: {id(session_manager)}")
@@ -22,6 +22,7 @@ from typing import Dict, Any, Optional
22
22
  import threading
23
23
  import uuid
24
24
 
25
+
25
26
  # Set up logging
26
27
  log_dir = os.path.join(tempfile.gettempdir(), "ragaai_logs")
27
28
  os.makedirs(log_dir, exist_ok=True)
@@ -49,11 +50,13 @@ try:
49
50
  from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
50
51
  # from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
51
52
  from ragaai_catalyst.tracers.agentic_tracing.utils.create_dataset_schema import create_dataset_schema_with_trace
53
+ from ragaai_catalyst.tracers.agentic_tracing.upload.session_manager import session_manager
52
54
  from ragaai_catalyst import RagaAICatalyst
53
55
  IMPORTS_AVAILABLE = True
54
56
  except ImportError:
55
57
  logger.warning("RagaAI Catalyst imports not available - running in test mode")
56
58
  IMPORTS_AVAILABLE = False
59
+ session_manager = None
57
60
 
58
61
  # Define task queue directory
59
62
  QUEUE_DIR = os.path.join(tempfile.gettempdir(), "ragaai_tasks")
@@ -72,6 +75,10 @@ _executor_lock = threading.Lock()
72
75
  _futures: Dict[str, Any] = {}
73
76
  _futures_lock = threading.Lock()
74
77
 
78
+ # Dataset creation cache to avoid redundant API calls
79
+ _dataset_cache: Dict[str, Dict[str, Any]] = {}
80
+ _dataset_cache_lock = threading.Lock()
81
+ DATASET_CACHE_DURATION = 600 # 10 minutes in seconds
75
82
 
76
83
  _cleanup_lock = threading.Lock()
77
84
  _last_cleanup = 0
@@ -88,7 +95,7 @@ def get_executor(max_workers=None):
88
95
  if _executor is None:
89
96
  # Calculate optimal worker count
90
97
  if max_workers is None:
91
- max_workers = min(32, (os.cpu_count() or 1) * 4)
98
+ max_workers = min(8, (os.cpu_count() or 1) * 4)
92
99
 
93
100
  logger.info(f"Creating ThreadPoolExecutor with {max_workers} workers")
94
101
  _executor = concurrent.futures.ThreadPoolExecutor(
@@ -110,9 +117,57 @@ def generate_unique_task_id():
110
117
  unique_id = str(uuid.uuid4())[:8] # Short UUID
111
118
  return f"task_{int(time.time())}_{os.getpid()}_{counter}_{unique_id}"
112
119
 
120
+ def _generate_dataset_cache_key(dataset_name: str, project_name: str, base_url: str) -> str:
121
+ """Generate a unique cache key for dataset creation"""
122
+ return f"{dataset_name}#{project_name}#{base_url}"
123
+
124
+ def _is_dataset_cached(cache_key: str) -> bool:
125
+ """Check if dataset creation is cached and still valid"""
126
+ with _dataset_cache_lock:
127
+ if cache_key not in _dataset_cache:
128
+ return False
129
+
130
+ cache_entry = _dataset_cache[cache_key]
131
+ cache_time = cache_entry.get('timestamp', 0)
132
+ current_time = time.time()
133
+
134
+ # Check if cache is still valid (within 10 minutes)
135
+ if current_time - cache_time <= DATASET_CACHE_DURATION:
136
+ logger.info(f"Dataset creation cache hit for key: {cache_key}")
137
+ return True
138
+ else:
139
+ # Cache expired, remove it
140
+ logger.info(f"Dataset creation cache expired for key: {cache_key}")
141
+ del _dataset_cache[cache_key]
142
+ return False
143
+
144
+ def _cache_dataset_creation(cache_key: str, response: Any) -> None:
145
+ """Cache successful dataset creation"""
146
+ with _dataset_cache_lock:
147
+ _dataset_cache[cache_key] = {
148
+ 'timestamp': time.time(),
149
+ 'response': response
150
+ }
151
+
152
+ def _cleanup_expired_cache_entries() -> None:
153
+ """Remove expired cache entries"""
154
+ current_time = time.time()
155
+ with _dataset_cache_lock:
156
+ expired_keys = []
157
+ for cache_key, cache_entry in _dataset_cache.items():
158
+ cache_time = cache_entry.get('timestamp', 0)
159
+ if current_time - cache_time > DATASET_CACHE_DURATION:
160
+ expired_keys.append(cache_key)
161
+
162
+ for key in expired_keys:
163
+ del _dataset_cache[key]
164
+
165
+ if expired_keys:
166
+ logger.info(f"Cleaned up {len(expired_keys)} expired dataset cache entries")
167
+
113
168
  def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
114
169
  project_name: str, project_id: str, dataset_name: str,
115
- user_details: Dict[str, Any], base_url: str, timeout=120, fail_on_trace_error=True) -> Dict[str, Any]:
170
+ user_details: Dict[str, Any], base_url: str, tracer_type, timeout=120, fail_on_trace_error=True) -> Dict[str, Any]:
116
171
  """
117
172
  Process a single upload task
118
173
 
@@ -165,20 +220,36 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
165
220
  save_task_status(result)
166
221
  return result
167
222
 
168
- # Step 1: Create dataset schema
223
+ # Step 1: Create dataset schema (with caching)
169
224
  logger.info(f"Creating dataset schema for {dataset_name} with base_url: {base_url} and timeout: {timeout}")
170
- try:
171
- response = create_dataset_schema_with_trace(
172
- dataset_name=dataset_name,
173
- project_name=project_name,
174
- base_url=base_url,
175
- user_details=user_details,
176
- timeout=timeout
177
- )
178
- logger.info(f"Dataset schema created: {response}")
179
- except Exception as e:
180
- logger.error(f"Error creating dataset schema: {e}")
181
- # Continue with other steps
225
+
226
+ # Generate cache key and check if dataset creation is already cached
227
+ cache_key = _generate_dataset_cache_key(dataset_name, project_name, base_url)
228
+
229
+ if _is_dataset_cached(cache_key):
230
+ logger.info(f"Dataset schema creation skipped (cached) for {dataset_name}")
231
+ else:
232
+ try:
233
+ # Clean up expired cache entries periodically
234
+ # _cleanup_expired_cache_entries()
235
+
236
+ response = create_dataset_schema_with_trace(
237
+ dataset_name=dataset_name,
238
+ project_name=project_name,
239
+ base_url=base_url,
240
+ user_details=user_details,
241
+ timeout=timeout
242
+ )
243
+ logger.info(f"Dataset schema created: {response}")
244
+
245
+ # Cache the response only if status code is 200
246
+ if response and hasattr(response, 'status_code') and response.status_code in [200, 201]:
247
+ _cache_dataset_creation(cache_key, response)
248
+ logger.info(f"Response cached successfully for dataset: {dataset_name} and key: {cache_key}")
249
+
250
+ except Exception as e:
251
+ logger.error(f"Error creating dataset schema: {e}")
252
+ # Continue with other steps
182
253
 
183
254
  # Step 2: Upload trace metrics
184
255
  # if filepath and os.path.exists(filepath):
@@ -238,28 +309,34 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
238
309
  logger.error(error_msg)
239
310
 
240
311
  # Step 4: Upload code hash
241
- if hash_id and zip_path and os.path.exists(zip_path):
242
- logger.info(f"Uploading code hash {hash_id} with base_url: {base_url} and timeout: {timeout}")
243
- try:
244
- response = upload_code(
245
- hash_id=hash_id,
246
- zip_path=zip_path,
247
- project_name=project_name,
248
- dataset_name=dataset_name,
249
- base_url=base_url,
250
- timeout=timeout
251
- )
252
- logger.info(f"Code hash uploaded: {response}")
253
- except Exception as e:
254
- logger.error(f"Error uploading code hash: {e}")
255
- else:
256
- logger.warning(f"Code zip {zip_path} not found, skipping code upload")
257
-
312
+ if tracer_type.startswith("agentic/"):
313
+ logger.info(f"Tracer type '{tracer_type}' matches agentic pattern, proceeding with code upload")
314
+ if hash_id and zip_path and os.path.exists(zip_path):
315
+ logger.info(f"Uploading code hash {hash_id} with base_url: {base_url} and timeout: {timeout}")
316
+ try:
317
+ response = upload_code(
318
+ hash_id=hash_id,
319
+ zip_path=zip_path,
320
+ project_name=project_name,
321
+ dataset_name=dataset_name,
322
+ base_url=base_url,
323
+ timeout=timeout
324
+ )
325
+ if response is None:
326
+ error_msg = "Code hash not uploaded"
327
+ logger.error(error_msg)
328
+ else:
329
+ logger.info(f"Code hash uploaded successfully: {response}")
330
+ except Exception as e:
331
+ logger.error(f"Error uploading code hash: {e}")
332
+ else:
333
+ logger.warning(f"Code zip {zip_path} not found, skipping code upload")
334
+
258
335
  # Mark task as completed
259
336
  result["status"] = STATUS_COMPLETED
260
337
  result["end_time"] = datetime.now().isoformat()
261
338
  logger.info(f"Task {task_id} completed successfully")
262
-
339
+
263
340
  except Exception as e:
264
341
  logger.error(f"Error processing task {task_id}: {e}")
265
342
  result["status"] = STATUS_FAILED
@@ -302,7 +379,8 @@ def save_task_status(task_status: Dict[str, Any]):
302
379
  with open(status_path, "w") as f:
303
380
  json.dump(task_status, f, indent=2)
304
381
 
305
- def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url, timeout=120):
382
+ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url,
383
+ tracer_type, timeout=120):
306
384
  """
307
385
  Submit a new upload task using futures.
308
386
 
@@ -349,6 +427,7 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
349
427
  dataset_name=dataset_name,
350
428
  user_details=user_details,
351
429
  base_url=base_url,
430
+ tracer_type = tracer_type,
352
431
  timeout=timeout,
353
432
  fail_on_trace_error=True
354
433
  )
@@ -379,6 +458,7 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
379
458
  dataset_name=dataset_name,
380
459
  user_details=user_details,
381
460
  base_url=base_url,
461
+ tracer_type=tracer_type,
382
462
  timeout=timeout,
383
463
  fail_on_trace_error=True
384
464
  )
@@ -550,6 +630,14 @@ def shutdown(timeout=120):
550
630
 
551
631
  _executor = None
552
632
 
633
+ # Close the session manager to clean up HTTP connections
634
+ if session_manager is not None:
635
+ try:
636
+ session_manager.close()
637
+ logger.info("Session manager closed successfully")
638
+ except Exception as e:
639
+ logger.error(f"Error closing session manager: {e}")
640
+
553
641
  # Register shutdown handler
554
642
  atexit.register(shutdown)
555
643
 
@@ -4,13 +4,13 @@ import os
4
4
  import re
5
5
  import time
6
6
  from urllib.parse import urlparse, urlunparse
7
+ from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
8
+ from requests.exceptions import ConnectionError, Timeout, RequestException
9
+ from .session_manager import session_manager
7
10
 
8
11
  import requests
9
12
 
10
13
  logger = logging.getLogger(__name__)
11
- logging_level = (
12
- logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
13
- )
14
14
 
15
15
  from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
16
16
 
@@ -47,42 +47,36 @@ class UploadAgenticTraces:
47
47
  "X-Project-Name": self.project_name,
48
48
  }
49
49
 
50
- logger.debug("Started getting presigned url: ")
51
- logger.debug(f"Payload: {payload}")
52
- logger.debug(f"Headers: {headers}")
53
50
  try:
54
51
  start_time = time.time()
55
52
  endpoint = f"{self.base_url}/v1/llm/presigned-url"
56
53
  # Changed to POST from GET
57
- response = requests.request(
54
+ response = session_manager.session.request(
58
55
  "POST", endpoint, headers=headers, data=payload, timeout=self.timeout
59
56
  )
60
57
  elapsed_ms = (time.time() - start_time) * 1000
61
58
  logger.debug(
62
59
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
63
60
  )
64
- if response.status_code in [200, 201]:
61
+
62
+ if response.status_code == 200:
65
63
  presignedURLs = response.json()["data"]["presignedUrls"][0]
66
- logger.debug(f"Got presigned url: {presignedURLs}")
67
64
  presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
68
- logger.debug(f"Updated presigned url: {presignedurl}")
69
65
  return presignedurl
70
66
  else:
71
67
  # If POST fails, try GET
72
- response = requests.request(
68
+ response = session_manager.session.request(
73
69
  "GET", endpoint, headers=headers, data=payload, timeout=self.timeout
74
70
  )
75
71
  elapsed_ms = (time.time() - start_time) * 1000
76
72
  logger.debug(
77
73
  f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
78
74
  )
79
- if response.status_code in [200, 201]:
75
+ if response.status_code == 200:
80
76
  presignedURLs = response.json()["data"]["presignedUrls"][0]
81
- logger.debug(f"Got presigned url: {presignedURLs}")
82
77
  presignedurl = self.update_presigned_url(
83
78
  presignedURLs, self.base_url
84
79
  )
85
- logger.debug(f"Updated presigned url: {presignedurl}")
86
80
  return presignedurl
87
81
  elif response.status_code == 401:
88
82
  logger.warning("Received 401 error. Attempting to refresh token.")
@@ -92,7 +86,7 @@ class UploadAgenticTraces:
92
86
  "Authorization": f"Bearer {token}",
93
87
  "X-Project-Name": self.project_name,
94
88
  }
95
- response = requests.request(
89
+ response = session_manager.session.request(
96
90
  "POST",
97
91
  endpoint,
98
92
  headers=headers,
@@ -103,13 +97,11 @@ class UploadAgenticTraces:
103
97
  logger.debug(
104
98
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
105
99
  )
106
- if response.status_code in [200, 201]:
100
+ if response.status_code == 200:
107
101
  presignedURLs = response.json()["data"]["presignedUrls"][0]
108
- logger.debug(f"Got presigned url: {presignedURLs}")
109
102
  presignedurl = self.update_presigned_url(
110
103
  presignedURLs, self.base_url
111
104
  )
112
- logger.debug(f"Updated presigned url: {presignedurl}")
113
105
  return presignedurl
114
106
  else:
115
107
  logger.error(
@@ -121,8 +113,10 @@ class UploadAgenticTraces:
121
113
  f"Error while getting presigned url: {response.json()['message']}"
122
114
  )
123
115
  return None
124
-
125
- except requests.exceptions.RequestException as e:
116
+ except (PoolError, MaxRetryError, NewConnectionError, ConnectionError, Timeout) as e:
117
+ session_manager.handle_request_exceptions(e, "getting presigned URL")
118
+ return None
119
+ except RequestException as e:
126
120
  logger.error(f"Error while getting presigned url: {e}")
127
121
  return None
128
122
 
@@ -149,16 +143,16 @@ class UploadAgenticTraces:
149
143
 
150
144
  if "blob.core.windows.net" in presignedUrl: # Azure
151
145
  headers["x-ms-blob-type"] = "BlockBlob"
152
- print("Uploading agentic traces...")
146
+ logger.info("Uploading agentic traces to presigned URL...")
153
147
  try:
154
148
  with open(filename) as f:
155
149
  payload = f.read().replace("\n", "").replace("\r", "").encode()
156
150
  except Exception as e:
157
- print(f"Error while reading file: {e}")
151
+ logger.error(f"Error while reading file: {e}")
158
152
  return False
159
153
  try:
160
154
  start_time = time.time()
161
- response = requests.request(
155
+ response = session_manager.session.request(
162
156
  "PUT", presignedUrl, headers=headers, data=payload, timeout=self.timeout
163
157
  )
164
158
  elapsed_ms = (time.time() - start_time) * 1000
@@ -168,8 +162,11 @@ class UploadAgenticTraces:
168
162
  if response.status_code != 200 or response.status_code != 201:
169
163
  return response, response.status_code
170
164
  return True
171
- except requests.exceptions.RequestException as e:
172
- print(f"Error while uploading to presigned url: {e}")
165
+ except (PoolError, MaxRetryError, NewConnectionError, ConnectionError, Timeout) as e:
166
+ session_manager.handle_request_exceptions(e, "uploading trace to presigned URL")
167
+ return False
168
+ except RequestException as e:
169
+ logger.error(f"Error while uploading trace to presigned url: {e}")
173
170
  return False
174
171
 
175
172
  def insert_traces(self, presignedUrl):
@@ -185,21 +182,18 @@ class UploadAgenticTraces:
185
182
  "datasetSpans": self._get_dataset_spans(), # Extra key for agentic traces
186
183
  }
187
184
  )
188
- logger.debug(f"Inserting agentic traces to presigned url: {presignedUrl}")
189
185
  try:
190
186
  start_time = time.time()
191
187
  endpoint = f"{self.base_url}/v1/llm/insert/trace"
192
- response = requests.request(
188
+ response = session_manager.session.request(
193
189
  "POST", endpoint, headers=headers, data=payload, timeout=self.timeout
194
190
  )
195
- logger.debug(f"Payload: {payload}")
196
- logger.debug(f"Headers: {headers}")
197
191
  elapsed_ms = (time.time() - start_time) * 1000
198
192
  logger.debug(
199
193
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
200
194
  )
201
195
  if response.status_code in [200, 201]:
202
- logger.debug("Successfully inserted traces")
196
+ logger.info(f"Traces inserted successfully: {response.json()['message']}")
203
197
  return True
204
198
  elif response.status_code == 401:
205
199
  logger.warning("Received 401 error. Attempting to refresh token.")
@@ -209,7 +203,7 @@ class UploadAgenticTraces:
209
203
  "Content-Type": "application/json",
210
204
  "X-Project-Name": self.project_name,
211
205
  }
212
- response = requests.request(
206
+ response = session_manager.session.request(
213
207
  "POST",
214
208
  endpoint,
215
209
  headers=headers,
@@ -221,24 +215,27 @@ class UploadAgenticTraces:
221
215
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
222
216
  )
223
217
  if response.status_code in [200, 201]:
224
- logger.debug("Successfully inserted traces")
218
+ logger.info(f"Traces inserted successfully: {response.json()['message']}")
225
219
  return True
226
220
  else:
227
- logger.debug("Error while inserting traces")
221
+ logger.error(f"Error while inserting traces after 401: {response.json()['message']}")
228
222
  return False
229
223
  else:
230
- logger.debug("Error while inserting traces")
224
+ logger.error(f"Error while inserting traces: {response.json()['message']}")
231
225
  return False
232
- except requests.exceptions.RequestException as e:
233
- logger.debug(f"Error while inserting traces: {e}")
234
- return None
226
+ except (PoolError, MaxRetryError, NewConnectionError, ConnectionError, Timeout) as e:
227
+ session_manager.handle_request_exceptions(e, "inserting traces")
228
+ return False
229
+ except RequestException as e:
230
+ logger.error(f"Error while inserting traces: {e}")
231
+ return False
235
232
 
236
233
  def _get_dataset_spans(self):
237
234
  try:
238
235
  with open(self.json_file_path) as f:
239
236
  data = json.load(f)
240
237
  except Exception as e:
241
- logger.debug(f"Error while reading file: {e}")
238
+ print(f"Error while reading file: {e}")
242
239
  return None
243
240
  try:
244
241
  spans = data["data"][0]["spans"]
@@ -260,41 +257,41 @@ class UploadAgenticTraces:
260
257
  continue
261
258
  return dataset_spans
262
259
  except Exception as e:
263
- logger.debug(f"Error while reading dataset spans: {e}")
260
+ logger.error(f"Error while reading dataset spans: {e}")
264
261
  return None
265
262
 
266
263
  def upload_agentic_traces(self):
267
264
  try:
268
265
  presigned_url = self._get_presigned_url()
269
266
  if presigned_url is None:
270
- logger.debug("Warning: Failed to obtain presigned URL")
267
+ logger.warning("Warning: Failed to obtain presigned URL")
271
268
  return False
272
269
 
273
270
  # Upload the file using the presigned URL
274
271
  upload_result = self._put_presigned_url(presigned_url, self.json_file_path)
275
272
  if not upload_result:
276
- logger.debug("Error: Failed to upload file to presigned URL")
273
+ logger.error("Error: Failed to upload file to presigned URL")
277
274
  return False
278
275
  elif isinstance(upload_result, tuple):
279
276
  response, status_code = upload_result
280
277
  if status_code not in [200, 201]:
281
- logger.debug(
282
- f"Error: Upload failed with status code {status_code}: {response.text if hasattr(response, 'text') else 'Unknown error'}")
278
+ logger.error(
279
+ f"Error: Uploading agentic traces failed with status code {status_code}: {response.text if hasattr(response, 'text') else 'Unknown error'}")
283
280
  return False
284
281
  # Insert trace records
285
282
  insert_success = self.insert_traces(presigned_url)
286
283
  if not insert_success:
287
- logger.debug("Error: Failed to insert trace records")
284
+ print("Error: Failed to insert trace records")
288
285
  return False
289
286
 
290
- logger.debug("Successfully uploaded agentic traces")
287
+ logger.info("Successfully uploaded agentic traces")
291
288
  return True
292
289
  except FileNotFoundError:
293
- logger.debug(f"Error: Trace file not found at {self.json_file_path}")
290
+ logger.error(f"Error: Trace file not found at {self.json_file_path}")
294
291
  return False
295
292
  except ConnectionError as e:
296
- logger.debug(f"Error: Network connection failed while uploading traces: {e}")
293
+ logger.error(f"Error: Network connection failed while uploading traces: {e}")
297
294
  return False
298
295
  except Exception as e:
299
- logger.debug(f"Error while uploading agentic traces: {e}")
296
+ logger.error(f"Error while uploading agentic traces: {e}")
300
297
  return False