ragaai-catalyst 2.1.5b35__tar.gz → 2.1.5b36__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ragaai_catalyst-2.1.5b35/ragaai_catalyst.egg-info → ragaai_catalyst-2.1.5b36}/PKG-INFO +1 -1
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/pyproject.toml +1 -1
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +20 -20
- ragaai_catalyst-2.1.5b36/ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +355 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36/ragaai_catalyst.egg-info}/PKG-INFO +1 -1
- ragaai_catalyst-2.1.5b35/ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +0 -684
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/.gitignore +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/LICENSE +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/README.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/dataset_management.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/autheticate.gif +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/create_project.gif +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/custom_metrics.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/dataset.gif +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/dataset.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/evaluation.gif +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/evaluation.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/guardrails.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/last_main.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/main.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/projects_new.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/img/trace_comp.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/prompt_management.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/docs/trace_management.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/FinancialAnalysisSystem.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/TravelPlanner.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/agentic_rag.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/custom_tracer_example.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/customer_support.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/finance.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/langgraph_examples/agentic_rag.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/langgraph_examples/customer_support.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/langgraph_examples/multi_tool.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/langgraph_examples/planning_agent.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/langgraph_multi_tools.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/azureopenai_react_agent.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/function_calling_agent.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/joke_gen_critique.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/joke_gen_critique_anthropic.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/joke_gen_critique_async.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/joke_gen_critique_azureopenai.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/joke_gen_critique_gemini.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/joke_gen_critique_litellm.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/joke_gen_critque_vertex.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/react_agent.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/llamaindex_examples/tool_call_agent.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/planning_agent.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/prompt_management_litellm.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/prompt_management_openai.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/sync_sample_call.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/examples/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/_version.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/dataset.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/evaluation.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/experiment.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/guard_executor.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/guardrails_manager.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/internal_api_completion.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/prompt_manager.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/proxy_call.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/ragaai_catalyst.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/config/detectors.toml +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/data_generator/scenario_generator.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/data_generator/test_case_generator.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/evaluator.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/llm_generator.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/llm_generator_old.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/red_teaming.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/requirements.txt +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/tests/grok.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/tests/stereotype.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/upload_result.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/utils/issue_description.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming/utils/rt.png +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/redteaming_old.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/README.md +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/data/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/distributed.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/exporters/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/instrumentators/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/instrumentators/langchain.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/instrumentators/llamaindex.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/instrumentators/openai.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/langchain_callback.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/llamaindex_callback.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/tracer.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/upload_traces.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/utils/__init__.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/tracers/utils/utils.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst/utils.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst.egg-info/SOURCES.txt +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst.egg-info/dependency_links.txt +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst.egg-info/requires.txt +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/ragaai_catalyst.egg-info/top_level.txt +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/requirements.txt +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/setup.cfg +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/autonomous_research_agent/.env.example +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/autonomous_research_agent/agents/base_agent.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/autonomous_research_agent/agents/coordinator.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/autonomous_research_agent/agents/discovery.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/autonomous_research_agent/agents/synthesis.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/autonomous_research_agent/research_script.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/autonomous_research_agent/utils/llm.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_base_tracer_add_metrics.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_base_tracer_metrics.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_configuration.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_dataset.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_evaluation.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_evaluation_metrics.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_langchain_tracing.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_llm_providers.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_prompt_manager.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_redteaming.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/test_synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.1.5b35 → ragaai_catalyst-2.1.5b36}/test/test_catalyst/upload_trace_zip_automation.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.5b36
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>
|
6
6
|
Requires-Python: <3.13,>=3.9
|
@@ -8,7 +8,7 @@ description = "RAGA AI CATALYST"
|
|
8
8
|
readme = "README.md"
|
9
9
|
requires-python = ">=3.9,<3.13"
|
10
10
|
# license = {file = "LICENSE"}
|
11
|
-
version = "2.1.5.
|
11
|
+
version = "2.1.5.b36"
|
12
12
|
authors = [
|
13
13
|
{name = "Kiran Scaria", email = "kiran.scaria@raga.ai"},
|
14
14
|
{name = "Kedar Gaikwad", email = "kedar.gaikwad@raga.ai"},
|
@@ -305,7 +305,7 @@ class BaseTracer:
|
|
305
305
|
|
306
306
|
logger.debug("Base URL used for uploading: {}".format(self.base_url))
|
307
307
|
|
308
|
-
# Submit to background process for uploading
|
308
|
+
# Submit to background process for uploading using futures
|
309
309
|
self.upload_task_id = submit_upload_task(
|
310
310
|
filepath=filepath,
|
311
311
|
hash_id=hash_id,
|
@@ -317,28 +317,28 @@ class BaseTracer:
|
|
317
317
|
base_url=self.base_url
|
318
318
|
)
|
319
319
|
|
320
|
-
#
|
321
|
-
|
320
|
+
# For backward compatibility
|
321
|
+
self._is_uploading = True
|
322
322
|
|
323
|
-
#
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
323
|
+
# Start checking for completion if a callback is registered
|
324
|
+
if self._upload_completed_callback:
|
325
|
+
# Start a thread to check status and call callback when complete
|
326
|
+
def check_status_and_callback():
|
327
|
+
status = self.get_upload_status()
|
328
|
+
if status.get("status") in ["completed", "failed"]:
|
329
|
+
self._is_uploading = False
|
330
|
+
# Execute callback
|
331
|
+
try:
|
332
|
+
self._upload_completed_callback(self)
|
333
|
+
except Exception as e:
|
334
|
+
logger.error(f"Error in upload completion callback: {e}")
|
335
|
+
return
|
336
336
|
|
337
|
-
|
338
|
-
|
337
|
+
# Check again after a delay
|
338
|
+
threading.Timer(5.0, check_status_and_callback).start()
|
339
339
|
|
340
|
-
|
341
|
-
|
340
|
+
# Start checking
|
341
|
+
threading.Timer(5.0, check_status_and_callback).start()
|
342
342
|
|
343
343
|
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|
344
344
|
|
@@ -0,0 +1,355 @@
|
|
1
|
+
"""
|
2
|
+
trace_uploader.py - A dedicated process for handling trace uploads
|
3
|
+
"""
|
4
|
+
|
5
|
+
import os
|
6
|
+
import sys
|
7
|
+
import json
|
8
|
+
import time
|
9
|
+
import signal
|
10
|
+
import logging
|
11
|
+
import argparse
|
12
|
+
import tempfile
|
13
|
+
from pathlib import Path
|
14
|
+
import multiprocessing
|
15
|
+
import queue
|
16
|
+
from datetime import datetime
|
17
|
+
import atexit
|
18
|
+
import glob
|
19
|
+
from logging.handlers import RotatingFileHandler
|
20
|
+
import concurrent.futures
|
21
|
+
from typing import Dict, Any, Optional
|
22
|
+
|
23
|
+
# Set up logging
|
24
|
+
log_dir = os.path.join(tempfile.gettempdir(), "ragaai_logs")
|
25
|
+
os.makedirs(log_dir, exist_ok=True)
|
26
|
+
|
27
|
+
# Define maximum file size (e.g., 5 MB) and backup count
|
28
|
+
max_file_size = 5 * 1024 * 1024 # 5 MB
|
29
|
+
backup_count = 1 # Number of backup files to keep
|
30
|
+
|
31
|
+
logging.basicConfig(
|
32
|
+
level=logging.DEBUG,
|
33
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
34
|
+
handlers=[
|
35
|
+
logging.StreamHandler(),
|
36
|
+
RotatingFileHandler(
|
37
|
+
os.path.join(log_dir, "trace_uploader.log"),
|
38
|
+
maxBytes=max_file_size,
|
39
|
+
backupCount=backup_count
|
40
|
+
)
|
41
|
+
]
|
42
|
+
)
|
43
|
+
logger = logging.getLogger("trace_uploader")
|
44
|
+
|
45
|
+
try:
|
46
|
+
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_agentic_traces import UploadAgenticTraces
|
47
|
+
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
|
48
|
+
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
|
49
|
+
from ragaai_catalyst.tracers.agentic_tracing.utils.create_dataset_schema import create_dataset_schema_with_trace
|
50
|
+
from ragaai_catalyst import RagaAICatalyst
|
51
|
+
IMPORTS_AVAILABLE = True
|
52
|
+
except ImportError:
|
53
|
+
logger.warning("RagaAI Catalyst imports not available - running in test mode")
|
54
|
+
IMPORTS_AVAILABLE = False
|
55
|
+
|
56
|
+
# Define task queue directory
|
57
|
+
QUEUE_DIR = os.path.join(tempfile.gettempdir(), "ragaai_tasks")
|
58
|
+
os.makedirs(QUEUE_DIR, exist_ok=True)
|
59
|
+
|
60
|
+
# Status codes
|
61
|
+
STATUS_PENDING = "pending"
|
62
|
+
STATUS_PROCESSING = "processing"
|
63
|
+
STATUS_COMPLETED = "completed"
|
64
|
+
STATUS_FAILED = "failed"
|
65
|
+
|
66
|
+
# Global executor for handling uploads
|
67
|
+
_executor = None
|
68
|
+
# Dictionary to track futures and their associated task IDs
|
69
|
+
_futures: Dict[str, Any] = {}
|
70
|
+
|
71
|
+
def get_executor():
|
72
|
+
"""Get or create the thread pool executor"""
|
73
|
+
global _executor
|
74
|
+
if _executor is None:
|
75
|
+
_executor = concurrent.futures.ThreadPoolExecutor(max_workers=8, thread_name_prefix="trace_uploader")
|
76
|
+
return _executor
|
77
|
+
|
78
|
+
def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
79
|
+
project_name: str, project_id: str, dataset_name: str,
|
80
|
+
user_details: Dict[str, Any], base_url: str) -> Dict[str, Any]:
|
81
|
+
"""
|
82
|
+
Process a single upload task
|
83
|
+
|
84
|
+
Args:
|
85
|
+
task_id: Unique identifier for the task
|
86
|
+
filepath: Path to the trace file
|
87
|
+
hash_id: Hash ID for the code
|
88
|
+
zip_path: Path to the code zip file
|
89
|
+
project_name: Project name
|
90
|
+
project_id: Project ID
|
91
|
+
dataset_name: Dataset name
|
92
|
+
user_details: User details dictionary
|
93
|
+
base_url: Base URL for API calls
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
Dict containing status and any error information
|
97
|
+
"""
|
98
|
+
logger.info(f"Processing upload task {task_id}")
|
99
|
+
result = {
|
100
|
+
"task_id": task_id,
|
101
|
+
"status": STATUS_PROCESSING,
|
102
|
+
"error": None,
|
103
|
+
"start_time": datetime.now().isoformat()
|
104
|
+
}
|
105
|
+
|
106
|
+
# Save initial status to file
|
107
|
+
save_task_status(result)
|
108
|
+
|
109
|
+
try:
|
110
|
+
# Check if file exists
|
111
|
+
if not os.path.exists(filepath):
|
112
|
+
error_msg = f"Task filepath does not exist: {filepath}"
|
113
|
+
logger.error(error_msg)
|
114
|
+
result["status"] = STATUS_FAILED
|
115
|
+
result["error"] = error_msg
|
116
|
+
save_task_status(result)
|
117
|
+
return result
|
118
|
+
|
119
|
+
if not IMPORTS_AVAILABLE:
|
120
|
+
logger.warning(f"Test mode: Simulating processing of task {task_id}")
|
121
|
+
time.sleep(2) # Simulate work
|
122
|
+
result["status"] = STATUS_COMPLETED
|
123
|
+
save_task_status(result)
|
124
|
+
return result
|
125
|
+
|
126
|
+
# Step 1: Create dataset schema
|
127
|
+
logger.info(f"Creating dataset schema for {dataset_name} with base_url: {base_url}")
|
128
|
+
try:
|
129
|
+
response = create_dataset_schema_with_trace(
|
130
|
+
dataset_name=dataset_name,
|
131
|
+
project_name=project_name,
|
132
|
+
base_url=base_url
|
133
|
+
)
|
134
|
+
logger.info(f"Dataset schema created: {response}")
|
135
|
+
except Exception as e:
|
136
|
+
logger.error(f"Error creating dataset schema: {e}")
|
137
|
+
# Continue with other steps
|
138
|
+
|
139
|
+
# Step 2: Upload trace metrics
|
140
|
+
if filepath and os.path.exists(filepath):
|
141
|
+
logger.info(f"Uploading trace metrics for {filepath}")
|
142
|
+
try:
|
143
|
+
response = upload_trace_metric(
|
144
|
+
json_file_path=filepath,
|
145
|
+
dataset_name=dataset_name,
|
146
|
+
project_name=project_name,
|
147
|
+
base_url=base_url
|
148
|
+
)
|
149
|
+
logger.info(f"Trace metrics uploaded: {response}")
|
150
|
+
except Exception as e:
|
151
|
+
logger.error(f"Error uploading trace metrics: {e}")
|
152
|
+
# Continue with other uploads
|
153
|
+
else:
|
154
|
+
logger.warning(f"Trace file {filepath} not found, skipping metrics upload")
|
155
|
+
|
156
|
+
# Step 3: Upload agentic traces
|
157
|
+
if filepath and os.path.exists(filepath):
|
158
|
+
logger.info(f"Uploading agentic traces for {filepath}")
|
159
|
+
try:
|
160
|
+
upload_traces = UploadAgenticTraces(
|
161
|
+
json_file_path=filepath,
|
162
|
+
project_name=project_name,
|
163
|
+
project_id=project_id,
|
164
|
+
dataset_name=dataset_name,
|
165
|
+
user_detail=user_details,
|
166
|
+
base_url=base_url,
|
167
|
+
)
|
168
|
+
upload_traces.upload_agentic_traces()
|
169
|
+
logger.info("Agentic traces uploaded successfully")
|
170
|
+
except Exception as e:
|
171
|
+
logger.error(f"Error uploading agentic traces: {e}")
|
172
|
+
# Continue with code upload
|
173
|
+
else:
|
174
|
+
logger.warning(f"Trace file {filepath} not found, skipping traces upload")
|
175
|
+
|
176
|
+
# Step 4: Upload code hash
|
177
|
+
if hash_id and zip_path and os.path.exists(zip_path):
|
178
|
+
logger.info(f"Uploading code hash {hash_id}")
|
179
|
+
try:
|
180
|
+
response = upload_code(
|
181
|
+
hash_id=hash_id,
|
182
|
+
zip_path=zip_path,
|
183
|
+
project_name=project_name,
|
184
|
+
dataset_name=dataset_name,
|
185
|
+
base_url=base_url
|
186
|
+
)
|
187
|
+
logger.info(f"Code hash uploaded: {response}")
|
188
|
+
except Exception as e:
|
189
|
+
logger.error(f"Error uploading code hash: {e}")
|
190
|
+
else:
|
191
|
+
logger.warning(f"Code zip {zip_path} not found, skipping code upload")
|
192
|
+
|
193
|
+
# Mark task as completed
|
194
|
+
result["status"] = STATUS_COMPLETED
|
195
|
+
result["end_time"] = datetime.now().isoformat()
|
196
|
+
logger.info(f"Task {task_id} completed successfully")
|
197
|
+
|
198
|
+
except Exception as e:
|
199
|
+
logger.error(f"Error processing task {task_id}: {e}")
|
200
|
+
result["status"] = STATUS_FAILED
|
201
|
+
result["error"] = str(e)
|
202
|
+
result["end_time"] = datetime.now().isoformat()
|
203
|
+
|
204
|
+
# Save final status
|
205
|
+
save_task_status(result)
|
206
|
+
return result
|
207
|
+
|
208
|
+
def save_task_status(task_status: Dict[str, Any]):
|
209
|
+
"""Save task status to a file"""
|
210
|
+
task_id = task_status["task_id"]
|
211
|
+
status_path = os.path.join(QUEUE_DIR, f"{task_id}_status.json")
|
212
|
+
with open(status_path, "w") as f:
|
213
|
+
json.dump(task_status, f, indent=2)
|
214
|
+
|
215
|
+
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url):
|
216
|
+
"""
|
217
|
+
Submit a new upload task using futures.
|
218
|
+
|
219
|
+
Args:
|
220
|
+
filepath: Path to the trace file
|
221
|
+
hash_id: Hash ID for the code
|
222
|
+
zip_path: Path to the code zip file
|
223
|
+
project_name: Project name
|
224
|
+
project_id: Project ID
|
225
|
+
dataset_name: Dataset name
|
226
|
+
user_details: User details dictionary
|
227
|
+
base_url: Base URL for API calls
|
228
|
+
|
229
|
+
Returns:
|
230
|
+
str: Task ID
|
231
|
+
"""
|
232
|
+
logger.info(f"Submitting new upload task for file: {filepath}")
|
233
|
+
logger.debug(f"Task details - Project: {project_name}, Dataset: {dataset_name}, Hash: {hash_id}, Base_URL: {base_url}")
|
234
|
+
|
235
|
+
# Verify the trace file exists
|
236
|
+
if not os.path.exists(filepath):
|
237
|
+
logger.error(f"Trace file not found: {filepath}")
|
238
|
+
return None
|
239
|
+
|
240
|
+
# Create absolute path to the trace file
|
241
|
+
filepath = os.path.abspath(filepath)
|
242
|
+
logger.debug(f"Using absolute filepath: {filepath}")
|
243
|
+
|
244
|
+
# Generate a unique task ID
|
245
|
+
task_id = f"task_{int(time.time())}_{os.getpid()}_{hash(str(time.time()))}"
|
246
|
+
|
247
|
+
# Submit the task to the executor
|
248
|
+
executor = get_executor()
|
249
|
+
future = executor.submit(
|
250
|
+
process_upload,
|
251
|
+
task_id=task_id,
|
252
|
+
filepath=filepath,
|
253
|
+
hash_id=hash_id,
|
254
|
+
zip_path=zip_path,
|
255
|
+
project_name=project_name,
|
256
|
+
project_id=project_id,
|
257
|
+
dataset_name=dataset_name,
|
258
|
+
user_details=user_details,
|
259
|
+
base_url=base_url
|
260
|
+
)
|
261
|
+
|
262
|
+
# Store the future for later status checks
|
263
|
+
_futures[task_id] = future
|
264
|
+
|
265
|
+
# Create initial status
|
266
|
+
initial_status = {
|
267
|
+
"task_id": task_id,
|
268
|
+
"status": STATUS_PENDING,
|
269
|
+
"error": None,
|
270
|
+
"start_time": datetime.now().isoformat()
|
271
|
+
}
|
272
|
+
save_task_status(initial_status)
|
273
|
+
|
274
|
+
return task_id
|
275
|
+
|
276
|
+
def get_task_status(task_id):
|
277
|
+
"""
|
278
|
+
Get the status of a task by ID.
|
279
|
+
|
280
|
+
Args:
|
281
|
+
task_id: Task ID to check
|
282
|
+
|
283
|
+
Returns:
|
284
|
+
dict: Task status information
|
285
|
+
"""
|
286
|
+
logger.debug(f"Getting status for task {task_id}")
|
287
|
+
|
288
|
+
# Check if we have a future for this task
|
289
|
+
future = _futures.get(task_id)
|
290
|
+
|
291
|
+
# If we have a future, check its status
|
292
|
+
if future:
|
293
|
+
if future.done():
|
294
|
+
try:
|
295
|
+
# Get the result (this will re-raise any exception that occurred)
|
296
|
+
result = future.result(timeout=0)
|
297
|
+
return result
|
298
|
+
except concurrent.futures.TimeoutError:
|
299
|
+
return {"status": STATUS_PROCESSING, "error": None}
|
300
|
+
except Exception as e:
|
301
|
+
logger.error(f"Error retrieving future result for task {task_id}: {e}")
|
302
|
+
return {"status": STATUS_FAILED, "error": str(e)}
|
303
|
+
else:
|
304
|
+
return {"status": STATUS_PROCESSING, "error": None}
|
305
|
+
|
306
|
+
# If we don't have a future, try to read from the status file
|
307
|
+
status_path = os.path.join(QUEUE_DIR, f"{task_id}_status.json")
|
308
|
+
if os.path.exists(status_path):
|
309
|
+
try:
|
310
|
+
with open(status_path, "r") as f:
|
311
|
+
return json.load(f)
|
312
|
+
except Exception as e:
|
313
|
+
logger.error(f"Error reading status file for task {task_id}: {e}")
|
314
|
+
return {"status": "unknown", "error": f"Error reading status: {e}"}
|
315
|
+
|
316
|
+
return {"status": "unknown", "error": "Task not found"}
|
317
|
+
|
318
|
+
def shutdown():
|
319
|
+
"""Shutdown the executor"""
|
320
|
+
global _executor
|
321
|
+
if _executor:
|
322
|
+
logger.info("Shutting down executor")
|
323
|
+
_executor.shutdown(wait=False)
|
324
|
+
_executor = None
|
325
|
+
|
326
|
+
# Register shutdown handler
|
327
|
+
atexit.register(shutdown)
|
328
|
+
|
329
|
+
# For backward compatibility
|
330
|
+
def ensure_uploader_running():
|
331
|
+
"""
|
332
|
+
Ensure the uploader is running.
|
333
|
+
This is a no-op in the futures implementation, but kept for API compatibility.
|
334
|
+
"""
|
335
|
+
get_executor() # Just ensure the executor is created
|
336
|
+
return True
|
337
|
+
|
338
|
+
# For backward compatibility with the old daemon mode
|
339
|
+
def run_daemon():
|
340
|
+
"""
|
341
|
+
Run the uploader as a daemon process.
|
342
|
+
This is a no-op in the futures implementation, but kept for API compatibility.
|
343
|
+
"""
|
344
|
+
logger.info("Daemon mode not needed in futures implementation")
|
345
|
+
return
|
346
|
+
|
347
|
+
if __name__ == "__main__":
|
348
|
+
parser = argparse.ArgumentParser(description="Trace uploader process")
|
349
|
+
parser.add_argument("--daemon", action="store_true", help="Run as daemon process")
|
350
|
+
args = parser.parse_args()
|
351
|
+
|
352
|
+
if args.daemon:
|
353
|
+
logger.info("Daemon mode not needed in futures implementation")
|
354
|
+
else:
|
355
|
+
logger.info("Interactive mode not needed in futures implementation")
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.5b36
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>
|
6
6
|
Requires-Python: <3.13,>=3.9
|