ragaai-catalyst 2.2.5b2__tar.gz → 2.2.5b4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/.gitignore +1 -64
- {ragaai_catalyst-2.2.5b2/ragaai_catalyst.egg-info → ragaai_catalyst-2.2.5b4}/PKG-INFO +2 -1
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/Quickstart.md +13 -8
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/pyproject.toml +2 -1
- ragaai_catalyst-2.2.5b4/ragaai_catalyst/tracers/agentic_tracing/upload/session_manager.py +156 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +122 -34
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +46 -32
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +80 -42
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +4 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/tracer.py +5 -2
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4/ragaai_catalyst.egg-info}/PKG-INFO +2 -1
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst.egg-info/SOURCES.txt +1 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst.egg-info/requires.txt +1 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/.github/workflows/ci.yml +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/.gitmodules +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/LICENSE +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/agentic_tracing.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/dataset_management.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/autheticate.gif +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/create_project.gif +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/custom_metrics.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/dataset.gif +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/dataset.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/evaluation.gif +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/evaluation.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/guardrails.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/last_main.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/main.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/projects_new.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/img/trace_comp.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/prompt_management.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/docs/trace_management.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/all_llm_provider/all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/all_llm_provider/config.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/all_llm_provider/run_all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/crewai/scifi_writer/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/crewai/scifi_writer/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/crewai/scifi_writer/sample.env +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/crewai/scifi_writer/scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/custom_agents/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/custom_agents/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/custom_agents/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/custom_agents/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/haystack/news_fetching/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/haystack/news_fetching/news_fetching.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/haystack/news_fetching/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langchain/medical_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langchain/medical_rag/sample.env +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langgraph/personal_research_assistant/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langgraph/personal_research_assistant/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/langgraph/personal_research_assistant/sample.env +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/llamaindex_examples/legal_research_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/llamaindex_examples/legal_research_rag/sample.env +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/email_data_extraction_agent/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/email_data_extraction_agent/data_extraction_email.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/email_data_extraction_agent/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/email_data_extraction_agent/sample.env +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/youtube_summary_agent/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/youtube_summary_agent/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/youtube_summary_agent/sample.env +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/openai_agents_sdk/youtube_summary_agent/youtube_summary_agent.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/pii_masking_example/llamaindex_agentic_fastapi/app.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/pii_masking_example/llamaindex_agentic_fastapi/app_presidio.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/pii_masking_example/llamaindex_agentic_fastapi/request.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/pii_masking_example/llamaindex_agentic_fastapi/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/smolagents/most_upvoted_paper/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/smolagents/most_upvoted_paper/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/examples/smolagents/most_upvoted_paper/sample.env +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/_version.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/dataset.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/evaluation.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/guard_executor.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/guardrails_manager.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/internal_api_completion.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/prompt_manager.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/proxy_call.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/ragaai_catalyst.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/config/detectors.toml +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/data_generator/scenario_generator.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/data_generator/test_case_generator.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/evaluator.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/llm_generator.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/llm_generator_old.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/red_teaming.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/tests/grok.ipynb +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/tests/stereotype.ipynb +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/upload_result.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/utils/issue_description.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming/utils/rt.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/redteaming_old.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/data/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/distributed.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/exporters/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/instrumentators/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/utils/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/utils/trace_json_converter.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/tracers/utils/utils.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst/utils.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst.egg-info/dependency_links.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/ragaai_catalyst.egg-info/top_level.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/requirements.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/setup.cfg +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/test_report_20250407_183101.txt +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/README.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/environment.yml +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/__init__.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/all_llm_provider/all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/all_llm_provider/config.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/all_llm_provider/test_all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/crewai/scifi_writer/sci_fi_story.md +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/crewai/scifi_writer/scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/crewai/scifi_writer/test_scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/custom_agents/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/custom_agents/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/custom_agents/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/custom_agents/travel_agent/test_travel_agent.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/custom_agents/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/haystack/news_fetching/news_fetching.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/haystack/news_fetching/test_news_fetching.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/langchain/medical_rag/test_diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/langgraph/personal_research_assistant/test_research_assistant.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/llamaindex_examples/legal_research_rag/test_legal_rag.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/smolagents/most_upvoted_paper/paper.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/smolagents/most_upvoted_paper/test_most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/test_utils/get_components.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/examples/test_utils/get_trace_data.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/run_pytest_and_print_and_save_results.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/table_result.png +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_base_tracer_add_metrics.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_base_tracer_metrics.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_data/util_synthetic_data_doc.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_data/util_synthetic_data_invalid.csv +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_data/util_synthetic_data_valid.csv +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_data/util_test_dataset.csv +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_data/util_test_langchain_tracing.pdf +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_dataset.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_evaluation.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_evaluation_metrics.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_prompt_manager.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests/test_catalyst/test_the_configuration.py +0 -0
- {ragaai_catalyst-2.2.5b2 → ragaai_catalyst-2.2.5b4}/tests_requirements.txt +0 -0
@@ -166,67 +166,4 @@ cython_debug/
|
|
166
166
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
167
167
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
168
168
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
169
|
-
#.idea/
|
170
|
-
|
171
|
-
|
172
|
-
agent_load_test_exceptions.csv
|
173
|
-
agent_load_test_failures.csv
|
174
|
-
agent_load_test_stats.csv
|
175
|
-
agent_load_test_stats_history.csv
|
176
|
-
ai_document_061023_2.pdf
|
177
|
-
catalyst_v2.ipynb
|
178
|
-
chroma_db/
|
179
|
-
custom_cost_bug.ipynb
|
180
|
-
custom_cost_bug.py
|
181
|
-
deephealth_sample_50_data.csv
|
182
|
-
dh_full_test.ipynb
|
183
|
-
diagnosis_agent.py
|
184
|
-
langchain_sample_trace.json
|
185
|
-
legal_rag.py
|
186
|
-
load_test_streaming.py
|
187
|
-
load_test_v1.py
|
188
|
-
load_test_v2.py
|
189
|
-
# locustfile.py
|
190
|
-
masking_example.py
|
191
|
-
model_prices_and_context_window_backup.json
|
192
|
-
# multi_llm_test.py
|
193
|
-
rag_agent_traces.json
|
194
|
-
rag_agent_traces_with_streaming.json
|
195
|
-
rag_agent_traces_wo_streaming.json
|
196
|
-
rag_extraction_logic_final.py
|
197
|
-
research_assistant.py
|
198
|
-
sci_fi_story.md
|
199
|
-
scifi_writer.py
|
200
|
-
stress_test_langchain.py
|
201
|
-
test.ipynb
|
202
|
-
test_token_refresh.py
|
203
|
-
uplaod_numerical_col_csv.py
|
204
|
-
dh_full_test.py
|
205
|
-
model_gemini_15_flash_002_w_stream.json
|
206
|
-
model_gemini_15_flash_002_wo_stream.json
|
207
|
-
model_gpt_4o_mini_w_stream.json
|
208
|
-
model_gpt_4o_mini_wo_stream.json
|
209
|
-
model_gpt_41_w_stream.json
|
210
|
-
model_gpt_41_wo_stream.json
|
211
|
-
RAG_LOAD_TEST_README.md
|
212
|
-
locustfile.py
|
213
|
-
locustfile_fixed.py
|
214
|
-
locustfile_rag.py
|
215
|
-
multi_llm_test.py
|
216
|
-
run_rag_load_test.py
|
217
|
-
set_dataset_name_test.py
|
218
|
-
simple_rag_app.py
|
219
|
-
trace_upload_analysis.md
|
220
|
-
updated_pydantic_test.py
|
221
|
-
langchain_rag.py
|
222
|
-
pydantic_test.ipynb
|
223
|
-
test_2.py
|
224
|
-
docker_gt_dataset1.csv
|
225
|
-
direct_anthropic.json
|
226
|
-
direct_llm_call_test.py
|
227
|
-
direct_llm_examples.py
|
228
|
-
direct_opanai.json
|
229
|
-
new_feature_test.ipynb
|
230
|
-
ex_google_adk.py
|
231
|
-
multi_tool_agent/
|
232
|
-
storyflow_agent.py
|
169
|
+
#.idea/
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.2.
|
3
|
+
Version: 2.2.5b4
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
|
6
6
|
Requires-Python: <=3.13.2,>=3.10
|
@@ -41,6 +41,7 @@ Requires-Dist: openinference-instrumentation-crewai
|
|
41
41
|
Requires-Dist: openinference-instrumentation-haystack
|
42
42
|
Requires-Dist: openinference-instrumentation-openai-agents
|
43
43
|
Requires-Dist: openinference-instrumentation-smolagents
|
44
|
+
Requires-Dist: openinference-instrumentation-google-adk
|
44
45
|
Requires-Dist: opentelemetry-sdk
|
45
46
|
Requires-Dist: opentelemetry-exporter-otlp
|
46
47
|
Requires-Dist: opentelemetry-proto>=1.12.0
|
@@ -1,4 +1,4 @@
|
|
1
|
-
# Quickstart
|
1
|
+
# Quickstart | RagaAI Catalyst
|
2
2
|
|
3
3
|
## **1. Install RagaAI Catalyst**
|
4
4
|
|
@@ -17,8 +17,6 @@ pip install ragaai-catalyst
|
|
17
17
|
2. Navigate to **Profile Settings** → **Authentication**.
|
18
18
|
3. Click **Generate New Key** to obtain your **Access Key** and **Secret Key**.
|
19
19
|
|
20
|
-

|
21
|
-
|
22
20
|
### **Initialize the SDK**
|
23
21
|
|
24
22
|
To begin using Catalyst, initialize it as follows:
|
@@ -33,6 +31,7 @@ catalyst = RagaAICatalyst(
|
|
33
31
|
)
|
34
32
|
```
|
35
33
|
|
34
|
+
|
36
35
|
## **3. Create Your First Project**
|
37
36
|
|
38
37
|
Create a new project and choose a use case from the available options:
|
@@ -47,8 +46,6 @@ project = catalyst.create_project(
|
|
47
46
|
# List available use cases
|
48
47
|
print(catalyst.project_use_cases())
|
49
48
|
```
|
50
|
-

|
51
|
-
|
52
49
|
|
53
50
|
### **Add a Dataset**
|
54
51
|
Initialize the dataset manager and create a dataset from a CSV file, DataFrame, or JSONl file.
|
@@ -74,7 +71,7 @@ dataset_manager.create_from_csv(
|
|
74
71
|
# View dataset schema
|
75
72
|
print(dataset_manager.get_schema_mapping())
|
76
73
|
```
|
77
|
-
|
74
|
+
|
78
75
|
|
79
76
|
## **4. Trace Your Application**
|
80
77
|
|
@@ -147,7 +144,7 @@ tracer.stop()
|
|
147
144
|
# Verify data capture
|
148
145
|
print(tracer.get_upload_status())
|
149
146
|
```
|
150
|
-
|
147
|
+
|
151
148
|
|
152
149
|
|
153
150
|
## **5. Evaluation Framework**
|
@@ -192,4 +189,12 @@ evaluation.add_metrics(
|
|
192
189
|
print(f"Status: {evaluation.get_status()}")
|
193
190
|
print(f"Results: {evaluation.get_results()}")
|
194
191
|
```
|
195
|
-
|
192
|
+
|
193
|
+
|
194
|
+
|
195
|
+
## **Next Steps**
|
196
|
+
- **Explore the Dashboard:** Visualize metrics and insights in the RagaAI Web UI.
|
197
|
+
|
198
|
+
|
199
|
+
|
200
|
+
**Version:** 1.0.0 | **Last Updated:** Mar 2025
|
@@ -8,7 +8,7 @@ description = "RAGA AI CATALYST"
|
|
8
8
|
readme = "README.md"
|
9
9
|
requires-python = ">=3.10,<=3.13.2"
|
10
10
|
# license = {file = "LICENSE"}
|
11
|
-
version = "2.2.5.beta.
|
11
|
+
version = "2.2.5.beta.4"
|
12
12
|
authors = [
|
13
13
|
{name = "Kiran Scaria", email = "kiran.scaria@raga.ai"},
|
14
14
|
{name = "Kedar Gaikwad", email = "kedar.gaikwad@raga.ai"},
|
@@ -57,6 +57,7 @@ dependencies = [
|
|
57
57
|
"openinference-instrumentation-haystack",
|
58
58
|
"openinference-instrumentation-openai-agents",
|
59
59
|
"openinference-instrumentation-smolagents",
|
60
|
+
"openinference-instrumentation-google-adk",
|
60
61
|
"opentelemetry-sdk",
|
61
62
|
"opentelemetry-exporter-otlp",
|
62
63
|
"opentelemetry-proto>=1.12.0",
|
@@ -0,0 +1,156 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
import threading
|
4
|
+
|
5
|
+
from requests.adapters import HTTPAdapter
|
6
|
+
from urllib3.util.retry import Retry
|
7
|
+
from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
|
8
|
+
from requests.exceptions import ConnectionError, Timeout
|
9
|
+
from http.client import RemoteDisconnected
|
10
|
+
from ragaai_catalyst import RagaAICatalyst
|
11
|
+
import requests
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class SessionManager:
|
17
|
+
"""Shared session manager with connection pooling for HTTP requests"""
|
18
|
+
_instance = None
|
19
|
+
_session = None
|
20
|
+
_lock = threading.Lock()
|
21
|
+
|
22
|
+
def __new__(cls):
|
23
|
+
if cls._instance is None:
|
24
|
+
with cls._lock: # Thread-safe singleton
|
25
|
+
if cls._instance is None: # Double-check locking
|
26
|
+
logger.info("Creating new SessionManager singleton instance")
|
27
|
+
cls._instance = super(SessionManager, cls).__new__(cls)
|
28
|
+
cls._instance._initialize_session()
|
29
|
+
else:
|
30
|
+
logger.debug("SessionManager instance already exists, returning existing instance")
|
31
|
+
else:
|
32
|
+
logger.debug("SessionManager instance exists, returning existing instance")
|
33
|
+
return cls._instance
|
34
|
+
|
35
|
+
def _initialize_session(self):
|
36
|
+
"""Initialize session with connection pooling and retry strategy"""
|
37
|
+
logger.info("Initializing HTTP session with connection pooling and retry strategy")
|
38
|
+
self._session = requests.Session()
|
39
|
+
|
40
|
+
retry_strategy = Retry(
|
41
|
+
total=3, # number of retries
|
42
|
+
connect=3, # number of retries for connection-related errors
|
43
|
+
read=3, # number of retries for read-related errors
|
44
|
+
backoff_factor=0.5, # wait 0.5, 1, 2... seconds between retries
|
45
|
+
status_forcelist=[500, 502, 503, 504] # HTTP status codes to retry on
|
46
|
+
)
|
47
|
+
|
48
|
+
adapter = HTTPAdapter(
|
49
|
+
max_retries=retry_strategy,
|
50
|
+
pool_connections=5, # number of connection pools to cache (per host)
|
51
|
+
pool_maxsize=50, # maximum number of connections in each pool
|
52
|
+
pool_block=True # Block/wait when pool is full rather than raising error
|
53
|
+
)
|
54
|
+
|
55
|
+
self._session.mount("http://", adapter)
|
56
|
+
self._session.mount("https://", adapter)
|
57
|
+
|
58
|
+
# Set session-level configuration to handle connection issues
|
59
|
+
self._session.headers.update({
|
60
|
+
'Connection': 'keep-alive',
|
61
|
+
'User-Agent': 'RagaAI-Catalyst/1.0'
|
62
|
+
})
|
63
|
+
|
64
|
+
logger.info("HTTP session initialized successfully with adapters mounted for http:// and https://")
|
65
|
+
|
66
|
+
# Warm up connection pool using RagaAICatalyst.BASE_URL
|
67
|
+
if os.getenv("RAGAAI_CATALYST_BASE_URL") is not None:
|
68
|
+
base_url = os.getenv("RAGAAI_CATALYST_BASE_URL")
|
69
|
+
logger.info(f"Warming up connection pool using RagaAICatalyst.BASE_URL: {base_url}")
|
70
|
+
self.warm_up_connections(base_url)
|
71
|
+
else:
|
72
|
+
logger.warning(f"RAGAAI_CATALYST_BASE_URL not available, skipping connection warmup")
|
73
|
+
|
74
|
+
@property
|
75
|
+
def session(self):
|
76
|
+
if self._session is None:
|
77
|
+
logger.warning("Session accessed but not initialized, reinitializing...")
|
78
|
+
self._initialize_session()
|
79
|
+
return self._session
|
80
|
+
|
81
|
+
def warm_up_connections(self, base_url, num_connections=3):
|
82
|
+
"""
|
83
|
+
Warm up the connection pool by making lightweight requests to healthcheck endpoint.
|
84
|
+
This can help prevent RemoteDisconnected errors on initial requests.
|
85
|
+
"""
|
86
|
+
if not self._session:
|
87
|
+
return
|
88
|
+
|
89
|
+
# Construct healthcheck URL
|
90
|
+
healthcheck_url = f"{base_url.rstrip('/')}/healthcheck"
|
91
|
+
logger.info(f"Warming up connection pool with {num_connections} connections to {healthcheck_url}")
|
92
|
+
|
93
|
+
for i in range(num_connections):
|
94
|
+
try:
|
95
|
+
# Make a lightweight HEAD request to the healthcheck endpoint to warm up the connection
|
96
|
+
response = self._session.head(healthcheck_url, timeout=10)
|
97
|
+
logger.info(f"Warmup connection {i+1}: Status {response.status_code}")
|
98
|
+
except Exception as e:
|
99
|
+
logger.warning(f"Warmup connection {i+1} failed (this may be normal): {e}")
|
100
|
+
# Ignore other failures during warmup as they're expected
|
101
|
+
continue
|
102
|
+
|
103
|
+
logger.info("Connection pool warmup completed")
|
104
|
+
|
105
|
+
def close(self):
|
106
|
+
"""Close the session"""
|
107
|
+
if self._session:
|
108
|
+
logger.info("Closing HTTP session")
|
109
|
+
self._session.close()
|
110
|
+
self._session = None
|
111
|
+
logger.info("HTTP session closed successfully")
|
112
|
+
else:
|
113
|
+
logger.debug("Close called but session was already None")
|
114
|
+
|
115
|
+
def handle_request_exceptions(self, e, operation_name):
|
116
|
+
"""Handle common request exceptions with appropriate logging"""
|
117
|
+
logger.error(f"Exception occurred during {operation_name}")
|
118
|
+
if isinstance(e, (PoolError, MaxRetryError)):
|
119
|
+
logger.error(f"Connection pool exhausted during {operation_name}: {e}")
|
120
|
+
elif isinstance(e, NewConnectionError):
|
121
|
+
logger.error(f"Failed to establish new connection during {operation_name}: {e}")
|
122
|
+
elif isinstance(e, RemoteDisconnected):
|
123
|
+
logger.error(f"Remote connection closed unexpectedly during {operation_name}: {e}")
|
124
|
+
elif isinstance(e, ConnectionError):
|
125
|
+
logger.error(f"Connection error during {operation_name}: {e}")
|
126
|
+
elif isinstance(e, Timeout):
|
127
|
+
logger.error(f"Request timeout during {operation_name}: {e}")
|
128
|
+
else:
|
129
|
+
logger.error(f"Unexpected error during {operation_name}: {e}")
|
130
|
+
|
131
|
+
def make_request_with_retry(self, method, url, **kwargs):
|
132
|
+
"""
|
133
|
+
Make HTTP request with additional retry logic for RemoteDisconnected errors
|
134
|
+
that may not be caught by urllib3's retry mechanism.
|
135
|
+
"""
|
136
|
+
max_retries = 3
|
137
|
+
for attempt in range(max_retries):
|
138
|
+
try:
|
139
|
+
response = self._session.request(method, url, **kwargs)
|
140
|
+
return response
|
141
|
+
except (RemoteDisconnected, ConnectionError) as e:
|
142
|
+
logger.warning(f"Connection error on attempt {attempt + 1}/{max_retries}: {e}")
|
143
|
+
if attempt == max_retries - 1:
|
144
|
+
# Re-raise the exception on the last attempt
|
145
|
+
raise
|
146
|
+
# Wait before retrying (exponential backoff)
|
147
|
+
import time
|
148
|
+
wait_time = 2 ** attempt
|
149
|
+
logger.info(f"Retrying in {wait_time} seconds...")
|
150
|
+
time.sleep(wait_time)
|
151
|
+
|
152
|
+
|
153
|
+
# Global session manager instance
|
154
|
+
logger.info("Creating global SessionManager instance")
|
155
|
+
session_manager = SessionManager()
|
156
|
+
logger.info(f"Global SessionManager instance created with ID: {id(session_manager)}")
|
@@ -22,6 +22,7 @@ from typing import Dict, Any, Optional
|
|
22
22
|
import threading
|
23
23
|
import uuid
|
24
24
|
|
25
|
+
|
25
26
|
# Set up logging
|
26
27
|
log_dir = os.path.join(tempfile.gettempdir(), "ragaai_logs")
|
27
28
|
os.makedirs(log_dir, exist_ok=True)
|
@@ -49,11 +50,13 @@ try:
|
|
49
50
|
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
|
50
51
|
# from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
|
51
52
|
from ragaai_catalyst.tracers.agentic_tracing.utils.create_dataset_schema import create_dataset_schema_with_trace
|
53
|
+
from ragaai_catalyst.tracers.agentic_tracing.upload.session_manager import session_manager
|
52
54
|
from ragaai_catalyst import RagaAICatalyst
|
53
55
|
IMPORTS_AVAILABLE = True
|
54
56
|
except ImportError:
|
55
57
|
logger.warning("RagaAI Catalyst imports not available - running in test mode")
|
56
58
|
IMPORTS_AVAILABLE = False
|
59
|
+
session_manager = None
|
57
60
|
|
58
61
|
# Define task queue directory
|
59
62
|
QUEUE_DIR = os.path.join(tempfile.gettempdir(), "ragaai_tasks")
|
@@ -72,6 +75,10 @@ _executor_lock = threading.Lock()
|
|
72
75
|
_futures: Dict[str, Any] = {}
|
73
76
|
_futures_lock = threading.Lock()
|
74
77
|
|
78
|
+
# Dataset creation cache to avoid redundant API calls
|
79
|
+
_dataset_cache: Dict[str, Dict[str, Any]] = {}
|
80
|
+
_dataset_cache_lock = threading.Lock()
|
81
|
+
DATASET_CACHE_DURATION = 600 # 10 minutes in seconds
|
75
82
|
|
76
83
|
_cleanup_lock = threading.Lock()
|
77
84
|
_last_cleanup = 0
|
@@ -88,7 +95,7 @@ def get_executor(max_workers=None):
|
|
88
95
|
if _executor is None:
|
89
96
|
# Calculate optimal worker count
|
90
97
|
if max_workers is None:
|
91
|
-
max_workers = min(
|
98
|
+
max_workers = min(8, (os.cpu_count() or 1) * 4)
|
92
99
|
|
93
100
|
logger.info(f"Creating ThreadPoolExecutor with {max_workers} workers")
|
94
101
|
_executor = concurrent.futures.ThreadPoolExecutor(
|
@@ -110,9 +117,57 @@ def generate_unique_task_id():
|
|
110
117
|
unique_id = str(uuid.uuid4())[:8] # Short UUID
|
111
118
|
return f"task_{int(time.time())}_{os.getpid()}_{counter}_{unique_id}"
|
112
119
|
|
120
|
+
def _generate_dataset_cache_key(dataset_name: str, project_name: str, base_url: str) -> str:
|
121
|
+
"""Generate a unique cache key for dataset creation"""
|
122
|
+
return f"{dataset_name}#{project_name}#{base_url}"
|
123
|
+
|
124
|
+
def _is_dataset_cached(cache_key: str) -> bool:
|
125
|
+
"""Check if dataset creation is cached and still valid"""
|
126
|
+
with _dataset_cache_lock:
|
127
|
+
if cache_key not in _dataset_cache:
|
128
|
+
return False
|
129
|
+
|
130
|
+
cache_entry = _dataset_cache[cache_key]
|
131
|
+
cache_time = cache_entry.get('timestamp', 0)
|
132
|
+
current_time = time.time()
|
133
|
+
|
134
|
+
# Check if cache is still valid (within 10 minutes)
|
135
|
+
if current_time - cache_time <= DATASET_CACHE_DURATION:
|
136
|
+
logger.info(f"Dataset creation cache hit for key: {cache_key}")
|
137
|
+
return True
|
138
|
+
else:
|
139
|
+
# Cache expired, remove it
|
140
|
+
logger.info(f"Dataset creation cache expired for key: {cache_key}")
|
141
|
+
del _dataset_cache[cache_key]
|
142
|
+
return False
|
143
|
+
|
144
|
+
def _cache_dataset_creation(cache_key: str, response: Any) -> None:
|
145
|
+
"""Cache successful dataset creation"""
|
146
|
+
with _dataset_cache_lock:
|
147
|
+
_dataset_cache[cache_key] = {
|
148
|
+
'timestamp': time.time(),
|
149
|
+
'response': response
|
150
|
+
}
|
151
|
+
|
152
|
+
def _cleanup_expired_cache_entries() -> None:
|
153
|
+
"""Remove expired cache entries"""
|
154
|
+
current_time = time.time()
|
155
|
+
with _dataset_cache_lock:
|
156
|
+
expired_keys = []
|
157
|
+
for cache_key, cache_entry in _dataset_cache.items():
|
158
|
+
cache_time = cache_entry.get('timestamp', 0)
|
159
|
+
if current_time - cache_time > DATASET_CACHE_DURATION:
|
160
|
+
expired_keys.append(cache_key)
|
161
|
+
|
162
|
+
for key in expired_keys:
|
163
|
+
del _dataset_cache[key]
|
164
|
+
|
165
|
+
if expired_keys:
|
166
|
+
logger.info(f"Cleaned up {len(expired_keys)} expired dataset cache entries")
|
167
|
+
|
113
168
|
def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
114
169
|
project_name: str, project_id: str, dataset_name: str,
|
115
|
-
user_details: Dict[str, Any], base_url: str, timeout=120, fail_on_trace_error=True) -> Dict[str, Any]:
|
170
|
+
user_details: Dict[str, Any], base_url: str, tracer_type, timeout=120, fail_on_trace_error=True) -> Dict[str, Any]:
|
116
171
|
"""
|
117
172
|
Process a single upload task
|
118
173
|
|
@@ -165,20 +220,36 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
165
220
|
save_task_status(result)
|
166
221
|
return result
|
167
222
|
|
168
|
-
# Step 1: Create dataset schema
|
223
|
+
# Step 1: Create dataset schema (with caching)
|
169
224
|
logger.info(f"Creating dataset schema for {dataset_name} with base_url: {base_url} and timeout: {timeout}")
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
225
|
+
|
226
|
+
# Generate cache key and check if dataset creation is already cached
|
227
|
+
cache_key = _generate_dataset_cache_key(dataset_name, project_name, base_url)
|
228
|
+
|
229
|
+
if _is_dataset_cached(cache_key):
|
230
|
+
logger.info(f"Dataset schema creation skipped (cached) for {dataset_name}")
|
231
|
+
else:
|
232
|
+
try:
|
233
|
+
# Clean up expired cache entries periodically
|
234
|
+
# _cleanup_expired_cache_entries()
|
235
|
+
|
236
|
+
response = create_dataset_schema_with_trace(
|
237
|
+
dataset_name=dataset_name,
|
238
|
+
project_name=project_name,
|
239
|
+
base_url=base_url,
|
240
|
+
user_details=user_details,
|
241
|
+
timeout=timeout
|
242
|
+
)
|
243
|
+
logger.info(f"Dataset schema created: {response}")
|
244
|
+
|
245
|
+
# Cache the response only if status code is 200
|
246
|
+
if response and hasattr(response, 'status_code') and response.status_code in [200, 201]:
|
247
|
+
_cache_dataset_creation(cache_key, response)
|
248
|
+
logger.info(f"Response cached successfully for dataset: {dataset_name} and key: {cache_key}")
|
249
|
+
|
250
|
+
except Exception as e:
|
251
|
+
logger.error(f"Error creating dataset schema: {e}")
|
252
|
+
# Continue with other steps
|
182
253
|
|
183
254
|
# Step 2: Upload trace metrics
|
184
255
|
# if filepath and os.path.exists(filepath):
|
@@ -238,28 +309,34 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
238
309
|
logger.error(error_msg)
|
239
310
|
|
240
311
|
# Step 4: Upload code hash
|
241
|
-
if
|
242
|
-
logger.info(f"
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
312
|
+
if tracer_type.startswith("agentic/"):
|
313
|
+
logger.info(f"Tracer type '{tracer_type}' matches agentic pattern, proceeding with code upload")
|
314
|
+
if hash_id and zip_path and os.path.exists(zip_path):
|
315
|
+
logger.info(f"Uploading code hash {hash_id} with base_url: {base_url} and timeout: {timeout}")
|
316
|
+
try:
|
317
|
+
response = upload_code(
|
318
|
+
hash_id=hash_id,
|
319
|
+
zip_path=zip_path,
|
320
|
+
project_name=project_name,
|
321
|
+
dataset_name=dataset_name,
|
322
|
+
base_url=base_url,
|
323
|
+
timeout=timeout
|
324
|
+
)
|
325
|
+
if response is None:
|
326
|
+
error_msg = "Code hash not uploaded"
|
327
|
+
logger.error(error_msg)
|
328
|
+
else:
|
329
|
+
logger.info(f"Code hash uploaded successfully: {response}")
|
330
|
+
except Exception as e:
|
331
|
+
logger.error(f"Error uploading code hash: {e}")
|
332
|
+
else:
|
333
|
+
logger.warning(f"Code zip {zip_path} not found, skipping code upload")
|
334
|
+
|
258
335
|
# Mark task as completed
|
259
336
|
result["status"] = STATUS_COMPLETED
|
260
337
|
result["end_time"] = datetime.now().isoformat()
|
261
338
|
logger.info(f"Task {task_id} completed successfully")
|
262
|
-
|
339
|
+
|
263
340
|
except Exception as e:
|
264
341
|
logger.error(f"Error processing task {task_id}: {e}")
|
265
342
|
result["status"] = STATUS_FAILED
|
@@ -302,7 +379,8 @@ def save_task_status(task_status: Dict[str, Any]):
|
|
302
379
|
with open(status_path, "w") as f:
|
303
380
|
json.dump(task_status, f, indent=2)
|
304
381
|
|
305
|
-
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url,
|
382
|
+
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url,
|
383
|
+
tracer_type, timeout=120):
|
306
384
|
"""
|
307
385
|
Submit a new upload task using futures.
|
308
386
|
|
@@ -349,6 +427,7 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
349
427
|
dataset_name=dataset_name,
|
350
428
|
user_details=user_details,
|
351
429
|
base_url=base_url,
|
430
|
+
tracer_type = tracer_type,
|
352
431
|
timeout=timeout,
|
353
432
|
fail_on_trace_error=True
|
354
433
|
)
|
@@ -379,6 +458,7 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
379
458
|
dataset_name=dataset_name,
|
380
459
|
user_details=user_details,
|
381
460
|
base_url=base_url,
|
461
|
+
tracer_type=tracer_type,
|
382
462
|
timeout=timeout,
|
383
463
|
fail_on_trace_error=True
|
384
464
|
)
|
@@ -550,6 +630,14 @@ def shutdown(timeout=120):
|
|
550
630
|
|
551
631
|
_executor = None
|
552
632
|
|
633
|
+
# Close the session manager to clean up HTTP connections
|
634
|
+
if session_manager is not None:
|
635
|
+
try:
|
636
|
+
session_manager.close()
|
637
|
+
logger.info("Session manager closed successfully")
|
638
|
+
except Exception as e:
|
639
|
+
logger.error(f"Error closing session manager: {e}")
|
640
|
+
|
553
641
|
# Register shutdown handler
|
554
642
|
atexit.register(shutdown)
|
555
643
|
|