ragaai-catalyst 2.2.4.1b1__tar.gz → 2.2.4.1b2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ragaai_catalyst-2.2.4.1b1/ragaai_catalyst.egg-info → ragaai_catalyst-2.2.4.1b2}/PKG-INFO +1 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/pyproject.toml +1 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +34 -122
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +46 -45
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +42 -79
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +40 -15
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/tracer.py +23 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2/ragaai_catalyst.egg-info}/PKG-INFO +1 -1
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst.egg-info/SOURCES.txt +0 -1
- ragaai_catalyst-2.2.4.1b1/ragaai_catalyst/tracers/agentic_tracing/upload/session_manager.py +0 -92
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/.github/workflows/ci.yml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/.gitignore +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/.gitmodules +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/LICENSE +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/Quickstart.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/agentic_tracing.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/dataset_management.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/autheticate.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/create_project.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/custom_metrics.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/dataset.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/dataset.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/evaluation.gif +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/evaluation.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/guardrails.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/last_main.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/main.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/projects_new.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/img/trace_comp.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/prompt_management.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/docs/trace_management.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/all_llm_provider/all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/all_llm_provider/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/all_llm_provider/run_all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/crewai/scifi_writer/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/crewai/scifi_writer/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/crewai/scifi_writer/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/crewai/scifi_writer/scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/custom_agents/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/custom_agents/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/custom_agents/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/custom_agents/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/haystack/news_fetching/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/haystack/news_fetching/news_fetching.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/haystack/news_fetching/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langchain/medical_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langchain/medical_rag/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langgraph/personal_research_assistant/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langgraph/personal_research_assistant/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/langgraph/personal_research_assistant/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/llamaindex_examples/legal_research_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/llamaindex_examples/legal_research_rag/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/email_data_extraction_agent/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/email_data_extraction_agent/data_extraction_email.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/email_data_extraction_agent/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/email_data_extraction_agent/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/youtube_summary_agent/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/youtube_summary_agent/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/youtube_summary_agent/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/openai_agents_sdk/youtube_summary_agent/youtube_summary_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/pii_masking_example/llamaindex_agentic_fastapi/app.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/pii_masking_example/llamaindex_agentic_fastapi/app_presidio.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/pii_masking_example/llamaindex_agentic_fastapi/request.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/pii_masking_example/llamaindex_agentic_fastapi/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/smolagents/most_upvoted_paper/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/smolagents/most_upvoted_paper/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/examples/smolagents/most_upvoted_paper/sample.env +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/_version.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/dataset.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/evaluation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/experiment.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/guard_executor.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/guardrails_manager.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/internal_api_completion.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/prompt_manager.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/proxy_call.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/ragaai_catalyst.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/config/detectors.toml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/data_generator/scenario_generator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/data_generator/test_case_generator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/evaluator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/llm_generator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/llm_generator_old.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/red_teaming.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/tests/grok.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/tests/stereotype.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/upload_result.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/utils/issue_description.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming/utils/rt.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/redteaming_old.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/data/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/distributed.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/exporters/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/instrumentators/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/langchain_callback.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/llamaindex_callback.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/upload_traces.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/rag_trace_json_converter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/trace_json_converter.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/tracers/utils/utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst/utils.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst.egg-info/dependency_links.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst.egg-info/requires.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/ragaai_catalyst.egg-info/top_level.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/requirements.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/setup.cfg +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/test_report_20250407_183101.txt +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/README.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/environment.yml +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/__init__.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/all_llm_provider/all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/all_llm_provider/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/all_llm_provider/test_all_llm_provider.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/crewai/scifi_writer/sci_fi_story.md +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/crewai/scifi_writer/scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/crewai/scifi_writer/test_scifi_writer.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/custom_agents/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/custom_agents/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/custom_agents/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/custom_agents/travel_agent/test_travel_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/custom_agents/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/haystack/news_fetching/news_fetching.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/haystack/news_fetching/test_news_fetching.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/langchain/medical_rag/test_diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/langgraph/personal_research_assistant/test_research_assistant.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/llamaindex_examples/legal_research_rag/test_legal_rag.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/smolagents/most_upvoted_paper/paper.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/smolagents/most_upvoted_paper/test_most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/test_utils/get_components.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/examples/test_utils/get_trace_data.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/run_pytest_and_print_and_save_results.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/table_result.png +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_base_tracer_add_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_base_tracer_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_data/util_synthetic_data_doc.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_data/util_synthetic_data_invalid.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_data/util_synthetic_data_valid.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_data/util_test_dataset.csv +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_data/util_test_langchain_tracing.pdf +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_dataset.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_evaluation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_evaluation_metrics.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_prompt_manager.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests/test_catalyst/test_the_configuration.py +0 -0
- {ragaai_catalyst-2.2.4.1b1 → ragaai_catalyst-2.2.4.1b2}/tests_requirements.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.2.4.
|
3
|
+
Version: 2.2.4.1b2
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
|
6
6
|
Requires-Python: <=3.13.2,>=3.10
|
@@ -8,7 +8,7 @@ description = "RAGA AI CATALYST"
|
|
8
8
|
readme = "README.md"
|
9
9
|
requires-python = ">=3.10,<=3.13.2"
|
10
10
|
# license = {file = "LICENSE"}
|
11
|
-
version = "2.2.4.1.beta.
|
11
|
+
version = "2.2.4.1.beta.2"
|
12
12
|
authors = [
|
13
13
|
{name = "Kiran Scaria", email = "kiran.scaria@raga.ai"},
|
14
14
|
{name = "Kedar Gaikwad", email = "kedar.gaikwad@raga.ai"},
|
@@ -22,7 +22,6 @@ from typing import Dict, Any, Optional
|
|
22
22
|
import threading
|
23
23
|
import uuid
|
24
24
|
|
25
|
-
|
26
25
|
# Set up logging
|
27
26
|
log_dir = os.path.join(tempfile.gettempdir(), "ragaai_logs")
|
28
27
|
os.makedirs(log_dir, exist_ok=True)
|
@@ -50,13 +49,11 @@ try:
|
|
50
49
|
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
|
51
50
|
# from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
|
52
51
|
from ragaai_catalyst.tracers.agentic_tracing.utils.create_dataset_schema import create_dataset_schema_with_trace
|
53
|
-
from ragaai_catalyst.tracers.agentic_tracing.upload.session_manager import session_manager
|
54
52
|
from ragaai_catalyst import RagaAICatalyst
|
55
53
|
IMPORTS_AVAILABLE = True
|
56
54
|
except ImportError:
|
57
55
|
logger.warning("RagaAI Catalyst imports not available - running in test mode")
|
58
56
|
IMPORTS_AVAILABLE = False
|
59
|
-
session_manager = None
|
60
57
|
|
61
58
|
# Define task queue directory
|
62
59
|
QUEUE_DIR = os.path.join(tempfile.gettempdir(), "ragaai_tasks")
|
@@ -75,10 +72,6 @@ _executor_lock = threading.Lock()
|
|
75
72
|
_futures: Dict[str, Any] = {}
|
76
73
|
_futures_lock = threading.Lock()
|
77
74
|
|
78
|
-
# Dataset creation cache to avoid redundant API calls
|
79
|
-
_dataset_cache: Dict[str, Dict[str, Any]] = {}
|
80
|
-
_dataset_cache_lock = threading.Lock()
|
81
|
-
DATASET_CACHE_DURATION = 600 # 10 minutes in seconds
|
82
75
|
|
83
76
|
_cleanup_lock = threading.Lock()
|
84
77
|
_last_cleanup = 0
|
@@ -95,7 +88,7 @@ def get_executor(max_workers=None):
|
|
95
88
|
if _executor is None:
|
96
89
|
# Calculate optimal worker count
|
97
90
|
if max_workers is None:
|
98
|
-
max_workers = min(
|
91
|
+
max_workers = min(32, (os.cpu_count() or 1) * 4)
|
99
92
|
|
100
93
|
logger.info(f"Creating ThreadPoolExecutor with {max_workers} workers")
|
101
94
|
_executor = concurrent.futures.ThreadPoolExecutor(
|
@@ -117,57 +110,9 @@ def generate_unique_task_id():
|
|
117
110
|
unique_id = str(uuid.uuid4())[:8] # Short UUID
|
118
111
|
return f"task_{int(time.time())}_{os.getpid()}_{counter}_{unique_id}"
|
119
112
|
|
120
|
-
def _generate_dataset_cache_key(dataset_name: str, project_name: str, base_url: str) -> str:
|
121
|
-
"""Generate a unique cache key for dataset creation"""
|
122
|
-
return f"{dataset_name}#{project_name}#{base_url}"
|
123
|
-
|
124
|
-
def _is_dataset_cached(cache_key: str) -> bool:
|
125
|
-
"""Check if dataset creation is cached and still valid"""
|
126
|
-
with _dataset_cache_lock:
|
127
|
-
if cache_key not in _dataset_cache:
|
128
|
-
return False
|
129
|
-
|
130
|
-
cache_entry = _dataset_cache[cache_key]
|
131
|
-
cache_time = cache_entry.get('timestamp', 0)
|
132
|
-
current_time = time.time()
|
133
|
-
|
134
|
-
# Check if cache is still valid (within 10 minutes)
|
135
|
-
if current_time - cache_time <= DATASET_CACHE_DURATION:
|
136
|
-
logger.info(f"Dataset creation cache hit for key: {cache_key}")
|
137
|
-
return True
|
138
|
-
else:
|
139
|
-
# Cache expired, remove it
|
140
|
-
logger.info(f"Dataset creation cache expired for key: {cache_key}")
|
141
|
-
del _dataset_cache[cache_key]
|
142
|
-
return False
|
143
|
-
|
144
|
-
def _cache_dataset_creation(cache_key: str, response: Any) -> None:
|
145
|
-
"""Cache successful dataset creation"""
|
146
|
-
with _dataset_cache_lock:
|
147
|
-
_dataset_cache[cache_key] = {
|
148
|
-
'timestamp': time.time(),
|
149
|
-
'response': response
|
150
|
-
}
|
151
|
-
|
152
|
-
def _cleanup_expired_cache_entries() -> None:
|
153
|
-
"""Remove expired cache entries"""
|
154
|
-
current_time = time.time()
|
155
|
-
with _dataset_cache_lock:
|
156
|
-
expired_keys = []
|
157
|
-
for cache_key, cache_entry in _dataset_cache.items():
|
158
|
-
cache_time = cache_entry.get('timestamp', 0)
|
159
|
-
if current_time - cache_time > DATASET_CACHE_DURATION:
|
160
|
-
expired_keys.append(cache_key)
|
161
|
-
|
162
|
-
for key in expired_keys:
|
163
|
-
del _dataset_cache[key]
|
164
|
-
|
165
|
-
if expired_keys:
|
166
|
-
logger.info(f"Cleaned up {len(expired_keys)} expired dataset cache entries")
|
167
|
-
|
168
113
|
def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
169
114
|
project_name: str, project_id: str, dataset_name: str,
|
170
|
-
user_details: Dict[str, Any], base_url: str,
|
115
|
+
user_details: Dict[str, Any], base_url: str, timeout=120, fail_on_trace_error=True) -> Dict[str, Any]:
|
171
116
|
"""
|
172
117
|
Process a single upload task
|
173
118
|
|
@@ -220,36 +165,20 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
220
165
|
save_task_status(result)
|
221
166
|
return result
|
222
167
|
|
223
|
-
# Step 1: Create dataset schema
|
168
|
+
# Step 1: Create dataset schema
|
224
169
|
logger.info(f"Creating dataset schema for {dataset_name} with base_url: {base_url} and timeout: {timeout}")
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
dataset_name=dataset_name,
|
238
|
-
project_name=project_name,
|
239
|
-
base_url=base_url,
|
240
|
-
user_details=user_details,
|
241
|
-
timeout=timeout
|
242
|
-
)
|
243
|
-
logger.info(f"Dataset schema created: {response}")
|
244
|
-
|
245
|
-
# Cache the response only if status code is 200
|
246
|
-
if response and hasattr(response, 'status_code') and response.status_code in [200, 201]:
|
247
|
-
_cache_dataset_creation(cache_key, response)
|
248
|
-
logger.info(f"Response cached successfully for dataset: {dataset_name} and key: {cache_key}")
|
249
|
-
|
250
|
-
except Exception as e:
|
251
|
-
logger.error(f"Error creating dataset schema: {e}")
|
252
|
-
# Continue with other steps
|
170
|
+
try:
|
171
|
+
response = create_dataset_schema_with_trace(
|
172
|
+
dataset_name=dataset_name,
|
173
|
+
project_name=project_name,
|
174
|
+
base_url=base_url,
|
175
|
+
user_details=user_details,
|
176
|
+
timeout=timeout
|
177
|
+
)
|
178
|
+
logger.info(f"Dataset schema created: {response}")
|
179
|
+
except Exception as e:
|
180
|
+
logger.error(f"Error creating dataset schema: {e}")
|
181
|
+
# Continue with other steps
|
253
182
|
|
254
183
|
# Step 2: Upload trace metrics
|
255
184
|
# if filepath and os.path.exists(filepath):
|
@@ -309,34 +238,28 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
309
238
|
logger.error(error_msg)
|
310
239
|
|
311
240
|
# Step 4: Upload code hash
|
312
|
-
if
|
313
|
-
logger.info(f"
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
logger.info(f"Code hash uploaded successfully: {response}")
|
330
|
-
except Exception as e:
|
331
|
-
logger.error(f"Error uploading code hash: {e}")
|
332
|
-
else:
|
333
|
-
logger.warning(f"Code zip {zip_path} not found, skipping code upload")
|
334
|
-
|
241
|
+
if hash_id and zip_path and os.path.exists(zip_path):
|
242
|
+
logger.info(f"Uploading code hash {hash_id} with base_url: {base_url} and timeout: {timeout}")
|
243
|
+
try:
|
244
|
+
response = upload_code(
|
245
|
+
hash_id=hash_id,
|
246
|
+
zip_path=zip_path,
|
247
|
+
project_name=project_name,
|
248
|
+
dataset_name=dataset_name,
|
249
|
+
base_url=base_url,
|
250
|
+
timeout=timeout
|
251
|
+
)
|
252
|
+
logger.info(f"Code hash uploaded: {response}")
|
253
|
+
except Exception as e:
|
254
|
+
logger.error(f"Error uploading code hash: {e}")
|
255
|
+
else:
|
256
|
+
logger.warning(f"Code zip {zip_path} not found, skipping code upload")
|
257
|
+
|
335
258
|
# Mark task as completed
|
336
259
|
result["status"] = STATUS_COMPLETED
|
337
260
|
result["end_time"] = datetime.now().isoformat()
|
338
261
|
logger.info(f"Task {task_id} completed successfully")
|
339
|
-
|
262
|
+
|
340
263
|
except Exception as e:
|
341
264
|
logger.error(f"Error processing task {task_id}: {e}")
|
342
265
|
result["status"] = STATUS_FAILED
|
@@ -379,8 +302,7 @@ def save_task_status(task_status: Dict[str, Any]):
|
|
379
302
|
with open(status_path, "w") as f:
|
380
303
|
json.dump(task_status, f, indent=2)
|
381
304
|
|
382
|
-
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url,
|
383
|
-
tracer_type, timeout=120):
|
305
|
+
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url, timeout=120):
|
384
306
|
"""
|
385
307
|
Submit a new upload task using futures.
|
386
308
|
|
@@ -427,7 +349,6 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
427
349
|
dataset_name=dataset_name,
|
428
350
|
user_details=user_details,
|
429
351
|
base_url=base_url,
|
430
|
-
tracer_type = tracer_type,
|
431
352
|
timeout=timeout,
|
432
353
|
fail_on_trace_error=True
|
433
354
|
)
|
@@ -458,7 +379,6 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
458
379
|
dataset_name=dataset_name,
|
459
380
|
user_details=user_details,
|
460
381
|
base_url=base_url,
|
461
|
-
tracer_type=tracer_type,
|
462
382
|
timeout=timeout,
|
463
383
|
fail_on_trace_error=True
|
464
384
|
)
|
@@ -630,14 +550,6 @@ def shutdown(timeout=120):
|
|
630
550
|
|
631
551
|
_executor = None
|
632
552
|
|
633
|
-
# Close the session manager to clean up HTTP connections
|
634
|
-
if session_manager is not None:
|
635
|
-
try:
|
636
|
-
session_manager.close()
|
637
|
-
logger.info("Session manager closed successfully")
|
638
|
-
except Exception as e:
|
639
|
-
logger.error(f"Error closing session manager: {e}")
|
640
|
-
|
641
553
|
# Register shutdown handler
|
642
554
|
atexit.register(shutdown)
|
643
555
|
|
@@ -4,13 +4,13 @@ import os
|
|
4
4
|
import re
|
5
5
|
import time
|
6
6
|
from urllib.parse import urlparse, urlunparse
|
7
|
-
from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
|
8
|
-
from requests.exceptions import ConnectionError, Timeout, RequestException
|
9
|
-
from .session_manager import session_manager
|
10
7
|
|
11
8
|
import requests
|
12
9
|
|
13
10
|
logger = logging.getLogger(__name__)
|
11
|
+
logging_level = (
|
12
|
+
logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
|
13
|
+
)
|
14
14
|
|
15
15
|
from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
|
16
16
|
|
@@ -47,36 +47,40 @@ class UploadAgenticTraces:
|
|
47
47
|
"X-Project-Name": self.project_name,
|
48
48
|
}
|
49
49
|
|
50
|
+
logger.debug("Started getting presigned url: ")
|
50
51
|
try:
|
51
52
|
start_time = time.time()
|
52
53
|
endpoint = f"{self.base_url}/v1/llm/presigned-url"
|
53
54
|
# Changed to POST from GET
|
54
|
-
response =
|
55
|
+
response = requests.request(
|
55
56
|
"POST", endpoint, headers=headers, data=payload, timeout=self.timeout
|
56
57
|
)
|
57
58
|
elapsed_ms = (time.time() - start_time) * 1000
|
58
59
|
logger.debug(
|
59
60
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
60
61
|
)
|
61
|
-
|
62
|
-
if response.status_code == 200:
|
62
|
+
if response.status_code in [200, 201]:
|
63
63
|
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
64
|
+
logger.debug(f"Got presigned url: {presignedURLs}")
|
64
65
|
presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
|
66
|
+
logger.debug(f"Updated presigned url: {presignedurl}")
|
65
67
|
return presignedurl
|
66
68
|
else:
|
67
69
|
# If POST fails, try GET
|
68
|
-
response =
|
70
|
+
response = requests.request(
|
69
71
|
"GET", endpoint, headers=headers, data=payload, timeout=self.timeout
|
70
72
|
)
|
71
73
|
elapsed_ms = (time.time() - start_time) * 1000
|
72
74
|
logger.debug(
|
73
75
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
74
76
|
)
|
75
|
-
if response.status_code
|
77
|
+
if response.status_code in [200, 201]:
|
76
78
|
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
79
|
+
logger.debug(f"Got presigned url: {presignedURLs}")
|
77
80
|
presignedurl = self.update_presigned_url(
|
78
81
|
presignedURLs, self.base_url
|
79
82
|
)
|
83
|
+
logger.debug(f"Updated presigned url: {presignedurl}")
|
80
84
|
return presignedurl
|
81
85
|
elif response.status_code == 401:
|
82
86
|
logger.warning("Received 401 error. Attempting to refresh token.")
|
@@ -86,7 +90,7 @@ class UploadAgenticTraces:
|
|
86
90
|
"Authorization": f"Bearer {token}",
|
87
91
|
"X-Project-Name": self.project_name,
|
88
92
|
}
|
89
|
-
response =
|
93
|
+
response = requests.request(
|
90
94
|
"POST",
|
91
95
|
endpoint,
|
92
96
|
headers=headers,
|
@@ -97,11 +101,13 @@ class UploadAgenticTraces:
|
|
97
101
|
logger.debug(
|
98
102
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
99
103
|
)
|
100
|
-
if response.status_code
|
104
|
+
if response.status_code in [200, 201]:
|
101
105
|
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
106
|
+
logger.debug(f"Got presigned url: {presignedURLs}")
|
102
107
|
presignedurl = self.update_presigned_url(
|
103
108
|
presignedURLs, self.base_url
|
104
109
|
)
|
110
|
+
logger.debug(f"Updated presigned url: {presignedurl}")
|
105
111
|
return presignedurl
|
106
112
|
else:
|
107
113
|
logger.error(
|
@@ -113,10 +119,8 @@ class UploadAgenticTraces:
|
|
113
119
|
f"Error while getting presigned url: {response.json()['message']}"
|
114
120
|
)
|
115
121
|
return None
|
116
|
-
|
117
|
-
|
118
|
-
return None
|
119
|
-
except RequestException as e:
|
122
|
+
|
123
|
+
except requests.exceptions.RequestException as e:
|
120
124
|
logger.error(f"Error while getting presigned url: {e}")
|
121
125
|
return None
|
122
126
|
|
@@ -143,16 +147,16 @@ class UploadAgenticTraces:
|
|
143
147
|
|
144
148
|
if "blob.core.windows.net" in presignedUrl: # Azure
|
145
149
|
headers["x-ms-blob-type"] = "BlockBlob"
|
146
|
-
|
150
|
+
print("Uploading agentic traces...")
|
147
151
|
try:
|
148
152
|
with open(filename) as f:
|
149
153
|
payload = f.read().replace("\n", "").replace("\r", "").encode()
|
150
154
|
except Exception as e:
|
151
|
-
|
155
|
+
print(f"Error while reading file: {e}")
|
152
156
|
return False
|
153
157
|
try:
|
154
158
|
start_time = time.time()
|
155
|
-
response =
|
159
|
+
response = requests.request(
|
156
160
|
"PUT", presignedUrl, headers=headers, data=payload, timeout=self.timeout
|
157
161
|
)
|
158
162
|
elapsed_ms = (time.time() - start_time) * 1000
|
@@ -162,11 +166,8 @@ class UploadAgenticTraces:
|
|
162
166
|
if response.status_code != 200 or response.status_code != 201:
|
163
167
|
return response, response.status_code
|
164
168
|
return True
|
165
|
-
except
|
166
|
-
|
167
|
-
return False
|
168
|
-
except RequestException as e:
|
169
|
-
logger.error(f"Error while uploading trace to presigned url: {e}")
|
169
|
+
except requests.exceptions.RequestException as e:
|
170
|
+
print(f"Error while uploading to presigned url: {e}")
|
170
171
|
return False
|
171
172
|
|
172
173
|
def insert_traces(self, presignedUrl):
|
@@ -182,18 +183,21 @@ class UploadAgenticTraces:
|
|
182
183
|
"datasetSpans": self._get_dataset_spans(), # Extra key for agentic traces
|
183
184
|
}
|
184
185
|
)
|
186
|
+
logger.debug(f"Inserting agentic traces to presigned url: {presignedUrl}")
|
185
187
|
try:
|
186
188
|
start_time = time.time()
|
187
189
|
endpoint = f"{self.base_url}/v1/llm/insert/trace"
|
188
|
-
response =
|
190
|
+
response = requests.request(
|
189
191
|
"POST", endpoint, headers=headers, data=payload, timeout=self.timeout
|
190
192
|
)
|
193
|
+
logger.debug(f"Payload: {payload}")
|
194
|
+
logger.debug(f"Headers: {headers}")
|
191
195
|
elapsed_ms = (time.time() - start_time) * 1000
|
192
196
|
logger.debug(
|
193
197
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
194
198
|
)
|
195
199
|
if response.status_code in [200, 201]:
|
196
|
-
logger.
|
200
|
+
logger.debug("Successfully inserted traces")
|
197
201
|
return True
|
198
202
|
elif response.status_code == 401:
|
199
203
|
logger.warning("Received 401 error. Attempting to refresh token.")
|
@@ -203,7 +207,7 @@ class UploadAgenticTraces:
|
|
203
207
|
"Content-Type": "application/json",
|
204
208
|
"X-Project-Name": self.project_name,
|
205
209
|
}
|
206
|
-
response =
|
210
|
+
response = requests.request(
|
207
211
|
"POST",
|
208
212
|
endpoint,
|
209
213
|
headers=headers,
|
@@ -215,27 +219,24 @@ class UploadAgenticTraces:
|
|
215
219
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
216
220
|
)
|
217
221
|
if response.status_code in [200, 201]:
|
218
|
-
logger.
|
222
|
+
logger.debug("Successfully inserted traces")
|
219
223
|
return True
|
220
224
|
else:
|
221
|
-
logger.
|
225
|
+
logger.debug("Error while inserting traces")
|
222
226
|
return False
|
223
227
|
else:
|
224
|
-
logger.
|
228
|
+
logger.debug("Error while inserting traces")
|
225
229
|
return False
|
226
|
-
except
|
227
|
-
|
228
|
-
return
|
229
|
-
except RequestException as e:
|
230
|
-
logger.error(f"Error while inserting traces: {e}")
|
231
|
-
return False
|
230
|
+
except requests.exceptions.RequestException as e:
|
231
|
+
logger.debug(f"Error while inserting traces: {e}")
|
232
|
+
return None
|
232
233
|
|
233
234
|
def _get_dataset_spans(self):
|
234
235
|
try:
|
235
236
|
with open(self.json_file_path) as f:
|
236
237
|
data = json.load(f)
|
237
238
|
except Exception as e:
|
238
|
-
|
239
|
+
logger.debug(f"Error while reading file: {e}")
|
239
240
|
return None
|
240
241
|
try:
|
241
242
|
spans = data["data"][0]["spans"]
|
@@ -257,41 +258,41 @@ class UploadAgenticTraces:
|
|
257
258
|
continue
|
258
259
|
return dataset_spans
|
259
260
|
except Exception as e:
|
260
|
-
logger.
|
261
|
+
logger.debug(f"Error while reading dataset spans: {e}")
|
261
262
|
return None
|
262
263
|
|
263
264
|
def upload_agentic_traces(self):
|
264
265
|
try:
|
265
266
|
presigned_url = self._get_presigned_url()
|
266
267
|
if presigned_url is None:
|
267
|
-
logger.
|
268
|
+
logger.debug("Warning: Failed to obtain presigned URL")
|
268
269
|
return False
|
269
270
|
|
270
271
|
# Upload the file using the presigned URL
|
271
272
|
upload_result = self._put_presigned_url(presigned_url, self.json_file_path)
|
272
273
|
if not upload_result:
|
273
|
-
logger.
|
274
|
+
logger.debug("Error: Failed to upload file to presigned URL")
|
274
275
|
return False
|
275
276
|
elif isinstance(upload_result, tuple):
|
276
277
|
response, status_code = upload_result
|
277
278
|
if status_code not in [200, 201]:
|
278
|
-
logger.
|
279
|
-
f"Error:
|
279
|
+
logger.debug(
|
280
|
+
f"Error: Upload failed with status code {status_code}: {response.text if hasattr(response, 'text') else 'Unknown error'}")
|
280
281
|
return False
|
281
282
|
# Insert trace records
|
282
283
|
insert_success = self.insert_traces(presigned_url)
|
283
284
|
if not insert_success:
|
284
|
-
|
285
|
+
logger.debug("Error: Failed to insert trace records")
|
285
286
|
return False
|
286
287
|
|
287
|
-
logger.
|
288
|
+
logger.debug("Successfully uploaded agentic traces")
|
288
289
|
return True
|
289
290
|
except FileNotFoundError:
|
290
|
-
logger.
|
291
|
+
logger.debug(f"Error: Trace file not found at {self.json_file_path}")
|
291
292
|
return False
|
292
293
|
except ConnectionError as e:
|
293
|
-
logger.
|
294
|
+
logger.debug(f"Error: Network connection failed while uploading traces: {e}")
|
294
295
|
return False
|
295
296
|
except Exception as e:
|
296
|
-
logger.
|
297
|
+
logger.debug(f"Error while uploading agentic traces: {e}")
|
297
298
|
return False
|