ragaai-catalyst 2.2.4.1b1__tar.gz → 2.2.4.1b3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ragaai_catalyst-2.2.4.1b1/ragaai_catalyst.egg-info → ragaai_catalyst-2.2.4.1b3}/PKG-INFO +1 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/pyproject.toml +1 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +34 -122
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +48 -45
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +42 -79
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +40 -15
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/tracer.py +23 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3/ragaai_catalyst.egg-info}/PKG-INFO +1 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst.egg-info/SOURCES.txt +0 -1
- ragaai_catalyst-2.2.4.1b1/ragaai_catalyst/tracers/agentic_tracing/upload/session_manager.py +0 -92
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/.github/workflows/ci.yml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/.gitignore +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/.gitmodules +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/LICENSE +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/Quickstart.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/agentic_tracing.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/dataset_management.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/autheticate.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/create_project.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/custom_metrics.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/dataset.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/dataset.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/evaluation.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/evaluation.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/guardrails.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/last_main.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/main.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/projects_new.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/img/trace_comp.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/prompt_management.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/docs/trace_management.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/all_llm_provider/all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/all_llm_provider/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/all_llm_provider/run_all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/crewai/scifi_writer/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/crewai/scifi_writer/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/crewai/scifi_writer/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/crewai/scifi_writer/scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/custom_agents/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/custom_agents/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/custom_agents/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/custom_agents/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/haystack/news_fetching/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/haystack/news_fetching/news_fetching.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/haystack/news_fetching/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langchain/medical_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langchain/medical_rag/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langgraph/personal_research_assistant/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langgraph/personal_research_assistant/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/langgraph/personal_research_assistant/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/llamaindex_examples/legal_research_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/llamaindex_examples/legal_research_rag/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/email_data_extraction_agent/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/email_data_extraction_agent/data_extraction_email.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/email_data_extraction_agent/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/email_data_extraction_agent/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/youtube_summary_agent/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/youtube_summary_agent/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/youtube_summary_agent/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/openai_agents_sdk/youtube_summary_agent/youtube_summary_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/pii_masking_example/llamaindex_agentic_fastapi/app.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/pii_masking_example/llamaindex_agentic_fastapi/app_presidio.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/pii_masking_example/llamaindex_agentic_fastapi/request.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/pii_masking_example/llamaindex_agentic_fastapi/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/smolagents/most_upvoted_paper/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/smolagents/most_upvoted_paper/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/examples/smolagents/most_upvoted_paper/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/_version.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/dataset.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/evaluation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/experiment.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/guard_executor.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/guardrails_manager.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/internal_api_completion.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/prompt_manager.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/proxy_call.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/ragaai_catalyst.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/config/detectors.toml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/data_generator/scenario_generator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/data_generator/test_case_generator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/evaluator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/llm_generator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/llm_generator_old.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/red_teaming.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/tests/grok.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/tests/stereotype.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/upload_result.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/utils/issue_description.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming/utils/rt.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/redteaming_old.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/data/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/distributed.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/exporters/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/instrumentators/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/langchain_callback.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/llamaindex_callback.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/upload_traces.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/rag_trace_json_converter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/trace_json_converter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/tracers/utils/utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst/utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst.egg-info/dependency_links.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst.egg-info/requires.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/ragaai_catalyst.egg-info/top_level.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/setup.cfg +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/test_report_20250407_183101.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/environment.yml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/all_llm_provider/all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/all_llm_provider/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/all_llm_provider/test_all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/crewai/scifi_writer/sci_fi_story.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/crewai/scifi_writer/scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/crewai/scifi_writer/test_scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/custom_agents/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/custom_agents/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/custom_agents/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/custom_agents/travel_agent/test_travel_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/custom_agents/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/haystack/news_fetching/news_fetching.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/haystack/news_fetching/test_news_fetching.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/langchain/medical_rag/test_diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/langgraph/personal_research_assistant/test_research_assistant.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/llamaindex_examples/legal_research_rag/test_legal_rag.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/smolagents/most_upvoted_paper/paper.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/smolagents/most_upvoted_paper/test_most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/test_utils/get_components.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/examples/test_utils/get_trace_data.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/run_pytest_and_print_and_save_results.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/table_result.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_base_tracer_add_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_base_tracer_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_data/util_synthetic_data_doc.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_data/util_synthetic_data_invalid.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_data/util_synthetic_data_valid.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_data/util_test_dataset.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_data/util_test_langchain_tracing.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_dataset.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_evaluation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_evaluation_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_prompt_manager.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests/test_catalyst/test_the_configuration.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b3}/tests_requirements.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.2.4.
|
3
|
+
Version: 2.2.4.1b3
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
|
6
6
|
Requires-Python: <=3.13.2,>=3.10
|
@@ -8,7 +8,7 @@ description = "RAGA AI CATALYST"
|
|
8
8
|
readme = "README.md"
|
9
9
|
requires-python = ">=3.10,<=3.13.2"
|
10
10
|
# license = {file = "LICENSE"}
|
11
|
-
version = "2.2.4.1.beta.
|
11
|
+
version = "2.2.4.1.beta.3"
|
12
12
|
authors = [
|
13
13
|
{name = "Kiran Scaria", email = "kiran.scaria@raga.ai"},
|
14
14
|
{name = "Kedar Gaikwad", email = "kedar.gaikwad@raga.ai"},
|
@@ -22,7 +22,6 @@ from typing import Dict, Any, Optional
|
|
22
22
|
import threading
|
23
23
|
import uuid
|
24
24
|
|
25
|
-
|
26
25
|
# Set up logging
|
27
26
|
log_dir = os.path.join(tempfile.gettempdir(), "ragaai_logs")
|
28
27
|
os.makedirs(log_dir, exist_ok=True)
|
@@ -50,13 +49,11 @@ try:
|
|
50
49
|
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
|
51
50
|
# from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
|
52
51
|
from ragaai_catalyst.tracers.agentic_tracing.utils.create_dataset_schema import create_dataset_schema_with_trace
|
53
|
-
from ragaai_catalyst.tracers.agentic_tracing.upload.session_manager import session_manager
|
54
52
|
from ragaai_catalyst import RagaAICatalyst
|
55
53
|
IMPORTS_AVAILABLE = True
|
56
54
|
except ImportError:
|
57
55
|
logger.warning("RagaAI Catalyst imports not available - running in test mode")
|
58
56
|
IMPORTS_AVAILABLE = False
|
59
|
-
session_manager = None
|
60
57
|
|
61
58
|
# Define task queue directory
|
62
59
|
QUEUE_DIR = os.path.join(tempfile.gettempdir(), "ragaai_tasks")
|
@@ -75,10 +72,6 @@ _executor_lock = threading.Lock()
|
|
75
72
|
_futures: Dict[str, Any] = {}
|
76
73
|
_futures_lock = threading.Lock()
|
77
74
|
|
78
|
-
# Dataset creation cache to avoid redundant API calls
|
79
|
-
_dataset_cache: Dict[str, Dict[str, Any]] = {}
|
80
|
-
_dataset_cache_lock = threading.Lock()
|
81
|
-
DATASET_CACHE_DURATION = 600 # 10 minutes in seconds
|
82
75
|
|
83
76
|
_cleanup_lock = threading.Lock()
|
84
77
|
_last_cleanup = 0
|
@@ -95,7 +88,7 @@ def get_executor(max_workers=None):
|
|
95
88
|
if _executor is None:
|
96
89
|
# Calculate optimal worker count
|
97
90
|
if max_workers is None:
|
98
|
-
max_workers = min(
|
91
|
+
max_workers = min(32, (os.cpu_count() or 1) * 4)
|
99
92
|
|
100
93
|
logger.info(f"Creating ThreadPoolExecutor with {max_workers} workers")
|
101
94
|
_executor = concurrent.futures.ThreadPoolExecutor(
|
@@ -117,57 +110,9 @@ def generate_unique_task_id():
|
|
117
110
|
unique_id = str(uuid.uuid4())[:8] # Short UUID
|
118
111
|
return f"task_{int(time.time())}_{os.getpid()}_{counter}_{unique_id}"
|
119
112
|
|
120
|
-
def _generate_dataset_cache_key(dataset_name: str, project_name: str, base_url: str) -> str:
|
121
|
-
"""Generate a unique cache key for dataset creation"""
|
122
|
-
return f"{dataset_name}#{project_name}#{base_url}"
|
123
|
-
|
124
|
-
def _is_dataset_cached(cache_key: str) -> bool:
|
125
|
-
"""Check if dataset creation is cached and still valid"""
|
126
|
-
with _dataset_cache_lock:
|
127
|
-
if cache_key not in _dataset_cache:
|
128
|
-
return False
|
129
|
-
|
130
|
-
cache_entry = _dataset_cache[cache_key]
|
131
|
-
cache_time = cache_entry.get('timestamp', 0)
|
132
|
-
current_time = time.time()
|
133
|
-
|
134
|
-
# Check if cache is still valid (within 10 minutes)
|
135
|
-
if current_time - cache_time <= DATASET_CACHE_DURATION:
|
136
|
-
logger.info(f"Dataset creation cache hit for key: {cache_key}")
|
137
|
-
return True
|
138
|
-
else:
|
139
|
-
# Cache expired, remove it
|
140
|
-
logger.info(f"Dataset creation cache expired for key: {cache_key}")
|
141
|
-
del _dataset_cache[cache_key]
|
142
|
-
return False
|
143
|
-
|
144
|
-
def _cache_dataset_creation(cache_key: str, response: Any) -> None:
|
145
|
-
"""Cache successful dataset creation"""
|
146
|
-
with _dataset_cache_lock:
|
147
|
-
_dataset_cache[cache_key] = {
|
148
|
-
'timestamp': time.time(),
|
149
|
-
'response': response
|
150
|
-
}
|
151
|
-
|
152
|
-
def _cleanup_expired_cache_entries() -> None:
|
153
|
-
"""Remove expired cache entries"""
|
154
|
-
current_time = time.time()
|
155
|
-
with _dataset_cache_lock:
|
156
|
-
expired_keys = []
|
157
|
-
for cache_key, cache_entry in _dataset_cache.items():
|
158
|
-
cache_time = cache_entry.get('timestamp', 0)
|
159
|
-
if current_time - cache_time > DATASET_CACHE_DURATION:
|
160
|
-
expired_keys.append(cache_key)
|
161
|
-
|
162
|
-
for key in expired_keys:
|
163
|
-
del _dataset_cache[key]
|
164
|
-
|
165
|
-
if expired_keys:
|
166
|
-
logger.info(f"Cleaned up {len(expired_keys)} expired dataset cache entries")
|
167
|
-
|
168
113
|
def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
169
114
|
project_name: str, project_id: str, dataset_name: str,
|
170
|
-
user_details: Dict[str, Any], base_url: str,
|
115
|
+
user_details: Dict[str, Any], base_url: str, timeout=120, fail_on_trace_error=True) -> Dict[str, Any]:
|
171
116
|
"""
|
172
117
|
Process a single upload task
|
173
118
|
|
@@ -220,36 +165,20 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
220
165
|
save_task_status(result)
|
221
166
|
return result
|
222
167
|
|
223
|
-
# Step 1: Create dataset schema
|
168
|
+
# Step 1: Create dataset schema
|
224
169
|
logger.info(f"Creating dataset schema for {dataset_name} with base_url: {base_url} and timeout: {timeout}")
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
dataset_name=dataset_name,
|
238
|
-
project_name=project_name,
|
239
|
-
base_url=base_url,
|
240
|
-
user_details=user_details,
|
241
|
-
timeout=timeout
|
242
|
-
)
|
243
|
-
logger.info(f"Dataset schema created: {response}")
|
244
|
-
|
245
|
-
# Cache the response only if status code is 200
|
246
|
-
if response and hasattr(response, 'status_code') and response.status_code in [200, 201]:
|
247
|
-
_cache_dataset_creation(cache_key, response)
|
248
|
-
logger.info(f"Response cached successfully for dataset: {dataset_name} and key: {cache_key}")
|
249
|
-
|
250
|
-
except Exception as e:
|
251
|
-
logger.error(f"Error creating dataset schema: {e}")
|
252
|
-
# Continue with other steps
|
170
|
+
try:
|
171
|
+
response = create_dataset_schema_with_trace(
|
172
|
+
dataset_name=dataset_name,
|
173
|
+
project_name=project_name,
|
174
|
+
base_url=base_url,
|
175
|
+
user_details=user_details,
|
176
|
+
timeout=timeout
|
177
|
+
)
|
178
|
+
logger.info(f"Dataset schema created: {response}")
|
179
|
+
except Exception as e:
|
180
|
+
logger.error(f"Error creating dataset schema: {e}")
|
181
|
+
# Continue with other steps
|
253
182
|
|
254
183
|
# Step 2: Upload trace metrics
|
255
184
|
# if filepath and os.path.exists(filepath):
|
@@ -309,34 +238,28 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
309
238
|
logger.error(error_msg)
|
310
239
|
|
311
240
|
# Step 4: Upload code hash
|
312
|
-
if
|
313
|
-
logger.info(f"
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
logger.info(f"Code hash uploaded successfully: {response}")
|
330
|
-
except Exception as e:
|
331
|
-
logger.error(f"Error uploading code hash: {e}")
|
332
|
-
else:
|
333
|
-
logger.warning(f"Code zip {zip_path} not found, skipping code upload")
|
334
|
-
|
241
|
+
if hash_id and zip_path and os.path.exists(zip_path):
|
242
|
+
logger.info(f"Uploading code hash {hash_id} with base_url: {base_url} and timeout: {timeout}")
|
243
|
+
try:
|
244
|
+
response = upload_code(
|
245
|
+
hash_id=hash_id,
|
246
|
+
zip_path=zip_path,
|
247
|
+
project_name=project_name,
|
248
|
+
dataset_name=dataset_name,
|
249
|
+
base_url=base_url,
|
250
|
+
timeout=timeout
|
251
|
+
)
|
252
|
+
logger.info(f"Code hash uploaded: {response}")
|
253
|
+
except Exception as e:
|
254
|
+
logger.error(f"Error uploading code hash: {e}")
|
255
|
+
else:
|
256
|
+
logger.warning(f"Code zip {zip_path} not found, skipping code upload")
|
257
|
+
|
335
258
|
# Mark task as completed
|
336
259
|
result["status"] = STATUS_COMPLETED
|
337
260
|
result["end_time"] = datetime.now().isoformat()
|
338
261
|
logger.info(f"Task {task_id} completed successfully")
|
339
|
-
|
262
|
+
|
340
263
|
except Exception as e:
|
341
264
|
logger.error(f"Error processing task {task_id}: {e}")
|
342
265
|
result["status"] = STATUS_FAILED
|
@@ -379,8 +302,7 @@ def save_task_status(task_status: Dict[str, Any]):
|
|
379
302
|
with open(status_path, "w") as f:
|
380
303
|
json.dump(task_status, f, indent=2)
|
381
304
|
|
382
|
-
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url,
|
383
|
-
tracer_type, timeout=120):
|
305
|
+
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url, timeout=120):
|
384
306
|
"""
|
385
307
|
Submit a new upload task using futures.
|
386
308
|
|
@@ -427,7 +349,6 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
427
349
|
dataset_name=dataset_name,
|
428
350
|
user_details=user_details,
|
429
351
|
base_url=base_url,
|
430
|
-
tracer_type = tracer_type,
|
431
352
|
timeout=timeout,
|
432
353
|
fail_on_trace_error=True
|
433
354
|
)
|
@@ -458,7 +379,6 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
458
379
|
dataset_name=dataset_name,
|
459
380
|
user_details=user_details,
|
460
381
|
base_url=base_url,
|
461
|
-
tracer_type=tracer_type,
|
462
382
|
timeout=timeout,
|
463
383
|
fail_on_trace_error=True
|
464
384
|
)
|
@@ -630,14 +550,6 @@ def shutdown(timeout=120):
|
|
630
550
|
|
631
551
|
_executor = None
|
632
552
|
|
633
|
-
# Close the session manager to clean up HTTP connections
|
634
|
-
if session_manager is not None:
|
635
|
-
try:
|
636
|
-
session_manager.close()
|
637
|
-
logger.info("Session manager closed successfully")
|
638
|
-
except Exception as e:
|
639
|
-
logger.error(f"Error closing session manager: {e}")
|
640
|
-
|
641
553
|
# Register shutdown handler
|
642
554
|
atexit.register(shutdown)
|
643
555
|
|
@@ -4,13 +4,13 @@ import os
|
|
4
4
|
import re
|
5
5
|
import time
|
6
6
|
from urllib.parse import urlparse, urlunparse
|
7
|
-
from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
|
8
|
-
from requests.exceptions import ConnectionError, Timeout, RequestException
|
9
|
-
from .session_manager import session_manager
|
10
7
|
|
11
8
|
import requests
|
12
9
|
|
13
10
|
logger = logging.getLogger(__name__)
|
11
|
+
logging_level = (
|
12
|
+
logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
|
13
|
+
)
|
14
14
|
|
15
15
|
from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
|
16
16
|
|
@@ -47,36 +47,42 @@ class UploadAgenticTraces:
|
|
47
47
|
"X-Project-Name": self.project_name,
|
48
48
|
}
|
49
49
|
|
50
|
+
logger.debug("Started getting presigned url: ")
|
51
|
+
logger.debug(f"Payload: {payload}")
|
52
|
+
logger.debug(f"Headers: {headers}")
|
50
53
|
try:
|
51
54
|
start_time = time.time()
|
52
55
|
endpoint = f"{self.base_url}/v1/llm/presigned-url"
|
53
56
|
# Changed to POST from GET
|
54
|
-
response =
|
57
|
+
response = requests.request(
|
55
58
|
"POST", endpoint, headers=headers, data=payload, timeout=self.timeout
|
56
59
|
)
|
57
60
|
elapsed_ms = (time.time() - start_time) * 1000
|
58
61
|
logger.debug(
|
59
62
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
60
63
|
)
|
61
|
-
|
62
|
-
if response.status_code == 200:
|
64
|
+
if response.status_code in [200, 201]:
|
63
65
|
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
66
|
+
logger.debug(f"Got presigned url: {presignedURLs}")
|
64
67
|
presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
|
68
|
+
logger.debug(f"Updated presigned url: {presignedurl}")
|
65
69
|
return presignedurl
|
66
70
|
else:
|
67
71
|
# If POST fails, try GET
|
68
|
-
response =
|
72
|
+
response = requests.request(
|
69
73
|
"GET", endpoint, headers=headers, data=payload, timeout=self.timeout
|
70
74
|
)
|
71
75
|
elapsed_ms = (time.time() - start_time) * 1000
|
72
76
|
logger.debug(
|
73
77
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
74
78
|
)
|
75
|
-
if response.status_code
|
79
|
+
if response.status_code in [200, 201]:
|
76
80
|
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
81
|
+
logger.debug(f"Got presigned url: {presignedURLs}")
|
77
82
|
presignedurl = self.update_presigned_url(
|
78
83
|
presignedURLs, self.base_url
|
79
84
|
)
|
85
|
+
logger.debug(f"Updated presigned url: {presignedurl}")
|
80
86
|
return presignedurl
|
81
87
|
elif response.status_code == 401:
|
82
88
|
logger.warning("Received 401 error. Attempting to refresh token.")
|
@@ -86,7 +92,7 @@ class UploadAgenticTraces:
|
|
86
92
|
"Authorization": f"Bearer {token}",
|
87
93
|
"X-Project-Name": self.project_name,
|
88
94
|
}
|
89
|
-
response =
|
95
|
+
response = requests.request(
|
90
96
|
"POST",
|
91
97
|
endpoint,
|
92
98
|
headers=headers,
|
@@ -97,11 +103,13 @@ class UploadAgenticTraces:
|
|
97
103
|
logger.debug(
|
98
104
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
99
105
|
)
|
100
|
-
if response.status_code
|
106
|
+
if response.status_code in [200, 201]:
|
101
107
|
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
108
|
+
logger.debug(f"Got presigned url: {presignedURLs}")
|
102
109
|
presignedurl = self.update_presigned_url(
|
103
110
|
presignedURLs, self.base_url
|
104
111
|
)
|
112
|
+
logger.debug(f"Updated presigned url: {presignedurl}")
|
105
113
|
return presignedurl
|
106
114
|
else:
|
107
115
|
logger.error(
|
@@ -113,10 +121,8 @@ class UploadAgenticTraces:
|
|
113
121
|
f"Error while getting presigned url: {response.json()['message']}"
|
114
122
|
)
|
115
123
|
return None
|
116
|
-
|
117
|
-
|
118
|
-
return None
|
119
|
-
except RequestException as e:
|
124
|
+
|
125
|
+
except requests.exceptions.RequestException as e:
|
120
126
|
logger.error(f"Error while getting presigned url: {e}")
|
121
127
|
return None
|
122
128
|
|
@@ -143,16 +149,16 @@ class UploadAgenticTraces:
|
|
143
149
|
|
144
150
|
if "blob.core.windows.net" in presignedUrl: # Azure
|
145
151
|
headers["x-ms-blob-type"] = "BlockBlob"
|
146
|
-
|
152
|
+
print("Uploading agentic traces...")
|
147
153
|
try:
|
148
154
|
with open(filename) as f:
|
149
155
|
payload = f.read().replace("\n", "").replace("\r", "").encode()
|
150
156
|
except Exception as e:
|
151
|
-
|
157
|
+
print(f"Error while reading file: {e}")
|
152
158
|
return False
|
153
159
|
try:
|
154
160
|
start_time = time.time()
|
155
|
-
response =
|
161
|
+
response = requests.request(
|
156
162
|
"PUT", presignedUrl, headers=headers, data=payload, timeout=self.timeout
|
157
163
|
)
|
158
164
|
elapsed_ms = (time.time() - start_time) * 1000
|
@@ -162,11 +168,8 @@ class UploadAgenticTraces:
|
|
162
168
|
if response.status_code != 200 or response.status_code != 201:
|
163
169
|
return response, response.status_code
|
164
170
|
return True
|
165
|
-
except
|
166
|
-
|
167
|
-
return False
|
168
|
-
except RequestException as e:
|
169
|
-
logger.error(f"Error while uploading trace to presigned url: {e}")
|
171
|
+
except requests.exceptions.RequestException as e:
|
172
|
+
print(f"Error while uploading to presigned url: {e}")
|
170
173
|
return False
|
171
174
|
|
172
175
|
def insert_traces(self, presignedUrl):
|
@@ -182,18 +185,21 @@ class UploadAgenticTraces:
|
|
182
185
|
"datasetSpans": self._get_dataset_spans(), # Extra key for agentic traces
|
183
186
|
}
|
184
187
|
)
|
188
|
+
logger.debug(f"Inserting agentic traces to presigned url: {presignedUrl}")
|
185
189
|
try:
|
186
190
|
start_time = time.time()
|
187
191
|
endpoint = f"{self.base_url}/v1/llm/insert/trace"
|
188
|
-
response =
|
192
|
+
response = requests.request(
|
189
193
|
"POST", endpoint, headers=headers, data=payload, timeout=self.timeout
|
190
194
|
)
|
195
|
+
logger.debug(f"Payload: {payload}")
|
196
|
+
logger.debug(f"Headers: {headers}")
|
191
197
|
elapsed_ms = (time.time() - start_time) * 1000
|
192
198
|
logger.debug(
|
193
199
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
194
200
|
)
|
195
201
|
if response.status_code in [200, 201]:
|
196
|
-
logger.
|
202
|
+
logger.debug("Successfully inserted traces")
|
197
203
|
return True
|
198
204
|
elif response.status_code == 401:
|
199
205
|
logger.warning("Received 401 error. Attempting to refresh token.")
|
@@ -203,7 +209,7 @@ class UploadAgenticTraces:
|
|
203
209
|
"Content-Type": "application/json",
|
204
210
|
"X-Project-Name": self.project_name,
|
205
211
|
}
|
206
|
-
response =
|
212
|
+
response = requests.request(
|
207
213
|
"POST",
|
208
214
|
endpoint,
|
209
215
|
headers=headers,
|
@@ -215,27 +221,24 @@ class UploadAgenticTraces:
|
|
215
221
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
216
222
|
)
|
217
223
|
if response.status_code in [200, 201]:
|
218
|
-
logger.
|
224
|
+
logger.debug("Successfully inserted traces")
|
219
225
|
return True
|
220
226
|
else:
|
221
|
-
logger.
|
227
|
+
logger.debug("Error while inserting traces")
|
222
228
|
return False
|
223
229
|
else:
|
224
|
-
logger.
|
230
|
+
logger.debug("Error while inserting traces")
|
225
231
|
return False
|
226
|
-
except
|
227
|
-
|
228
|
-
return
|
229
|
-
except RequestException as e:
|
230
|
-
logger.error(f"Error while inserting traces: {e}")
|
231
|
-
return False
|
232
|
+
except requests.exceptions.RequestException as e:
|
233
|
+
logger.debug(f"Error while inserting traces: {e}")
|
234
|
+
return None
|
232
235
|
|
233
236
|
def _get_dataset_spans(self):
|
234
237
|
try:
|
235
238
|
with open(self.json_file_path) as f:
|
236
239
|
data = json.load(f)
|
237
240
|
except Exception as e:
|
238
|
-
|
241
|
+
logger.debug(f"Error while reading file: {e}")
|
239
242
|
return None
|
240
243
|
try:
|
241
244
|
spans = data["data"][0]["spans"]
|
@@ -257,41 +260,41 @@ class UploadAgenticTraces:
|
|
257
260
|
continue
|
258
261
|
return dataset_spans
|
259
262
|
except Exception as e:
|
260
|
-
logger.
|
263
|
+
logger.debug(f"Error while reading dataset spans: {e}")
|
261
264
|
return None
|
262
265
|
|
263
266
|
def upload_agentic_traces(self):
|
264
267
|
try:
|
265
268
|
presigned_url = self._get_presigned_url()
|
266
269
|
if presigned_url is None:
|
267
|
-
logger.
|
270
|
+
logger.debug("Warning: Failed to obtain presigned URL")
|
268
271
|
return False
|
269
272
|
|
270
273
|
# Upload the file using the presigned URL
|
271
274
|
upload_result = self._put_presigned_url(presigned_url, self.json_file_path)
|
272
275
|
if not upload_result:
|
273
|
-
logger.
|
276
|
+
logger.debug("Error: Failed to upload file to presigned URL")
|
274
277
|
return False
|
275
278
|
elif isinstance(upload_result, tuple):
|
276
279
|
response, status_code = upload_result
|
277
280
|
if status_code not in [200, 201]:
|
278
|
-
logger.
|
279
|
-
f"Error:
|
281
|
+
logger.debug(
|
282
|
+
f"Error: Upload failed with status code {status_code}: {response.text if hasattr(response, 'text') else 'Unknown error'}")
|
280
283
|
return False
|
281
284
|
# Insert trace records
|
282
285
|
insert_success = self.insert_traces(presigned_url)
|
283
286
|
if not insert_success:
|
284
|
-
|
287
|
+
logger.debug("Error: Failed to insert trace records")
|
285
288
|
return False
|
286
289
|
|
287
|
-
logger.
|
290
|
+
logger.debug("Successfully uploaded agentic traces")
|
288
291
|
return True
|
289
292
|
except FileNotFoundError:
|
290
|
-
logger.
|
293
|
+
logger.debug(f"Error: Trace file not found at {self.json_file_path}")
|
291
294
|
return False
|
292
295
|
except ConnectionError as e:
|
293
|
-
logger.
|
296
|
+
logger.debug(f"Error: Network connection failed while uploading traces: {e}")
|
294
297
|
return False
|
295
298
|
except Exception as e:
|
296
|
-
logger.
|
299
|
+
logger.debug(f"Error while uploading agentic traces: {e}")
|
297
300
|
return False
|