ragaai-catalyst 2.1.6b2__tar.gz → 2.1.6.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/PKG-INFO +5 -4
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/crewai/scifi_writer/scifi_writer.py +3 -7
- ragaai_catalyst-2.1.6.1/examples/openai_agents_sdk/README.md +71 -0
- ragaai_catalyst-2.1.6.1/examples/openai_agents_sdk/data_extraction_email.py +189 -0
- ragaai_catalyst-2.1.6.1/examples/openai_agents_sdk/requirements.txt +3 -0
- ragaai_catalyst-2.1.6.1/examples/openai_agents_sdk/sample.env +6 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/pyproject.toml +6 -6
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/guard_executor.py +1 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +3 -1
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +2 -2
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +9 -5
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +1 -1
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +13 -13
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py +2 -2
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +3 -2
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +4 -2
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/tracer.py +31 -12
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/upload_traces.py +5 -2
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/trace_json_converter.py +39 -24
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst.egg-info/PKG-INFO +5 -4
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst.egg-info/SOURCES.txt +4 -4
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst.egg-info/requires.txt +1 -1
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/requirements.txt +2 -2
- ragaai_catalyst-2.1.6b2/examples/autogen/crisis_response_coordinator/OAI_CONFIG_LIST.json +0 -6
- ragaai_catalyst-2.1.6b2/examples/autogen/crisis_response_coordinator/crisis_response_coordinator.py +0 -192
- ragaai_catalyst-2.1.6b2/examples/autogen/crisis_response_coordinator/requirements.txt +0 -1
- ragaai_catalyst-2.1.6b2/examples/autogen/crisis_response_coordinator/sample.env +0 -7
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/.gitignore +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/.gitmodules +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/LICENSE +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/README.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/dataset_management.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/autheticate.gif +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/create_project.gif +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/custom_metrics.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/dataset.gif +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/dataset.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/evaluation.gif +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/evaluation.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/guardrails.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/last_main.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/main.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/projects_new.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/img/trace_comp.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/prompt_management.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/docs/trace_management.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/crewai/scifi_writer/README.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/crewai/scifi_writer/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/crewai/scifi_writer/sample.env +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/custom_agents/travel_agent/agents.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/custom_agents/travel_agent/config.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/custom_agents/travel_agent/main.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/custom_agents/travel_agent/tools.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/haystack/rag/README.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/haystack/rag/rag.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/haystack/rag/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/haystack/rag/sample.env +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langchain/medical_rag/data/medical_texts/handbook1.pdf +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langchain/medical_rag/data/medical_texts/handbook2.pdf +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langchain/medical_rag/data/symptom_disease_map.csv +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langchain/medical_rag/diagnosis_agent.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langchain/medical_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langchain/medical_rag/sample.env +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langgraph/personal_research_assistant/README.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langgraph/personal_research_assistant/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langgraph/personal_research_assistant/research_assistant.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/langgraph/personal_research_assistant/sample.env +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/ca_overtime_2021.pdf +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/llamaindex_examples/legal_research_rag/legal_data/cases/fl_ada_2022.pdf +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/llamaindex_examples/legal_research_rag/legal_data/statutes.csv +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/llamaindex_examples/legal_research_rag/legal_rag.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/llamaindex_examples/legal_research_rag/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/llamaindex_examples/legal_research_rag/sample.env +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/most_upvoted_paper/README.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/most_upvoted_paper/most_upvoted_paper.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/most_upvoted_paper/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/most_upvoted_paper/sample.env +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/rag_using_chromadb/README.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/rag_using_chromadb/rag_using_chromadb.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/rag_using_chromadb/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/smolagents/rag_using_chromadb/sample.env +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/_version.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/dataset.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/evaluation.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/experiment.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/guardrails_manager.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/internal_api_completion.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/prompt_manager.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/proxy_call.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/ragaai_catalyst.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/config/detectors.toml +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/data_generator/scenario_generator.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/data_generator/test_case_generator.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/evaluator.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/llm_generator.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/llm_generator_old.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/red_teaming.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/requirements.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/tests/grok.ipynb +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/tests/stereotype.ipynb +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/upload_result.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/utils/issue_description.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming/utils/rt.png +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/redteaming_old.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/README.md +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/data/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/distributed.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/exporters/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/instrumentators/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/langchain_callback.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/llamaindex_callback.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/__init__.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/tracers/utils/utils.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst/utils.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst.egg-info/dependency_links.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/ragaai_catalyst.egg-info/top_level.txt +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/setup.cfg +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/examples/langgraph/personal_research_assistant/test_research_assistant.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_base_tracer_add_metrics.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_base_tracer_metrics.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_configuration.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_dataset.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_evaluation.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_evaluation_metrics.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_langchain_tracing.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_llm_providers.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_prompt_manager.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_redteaming.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/test_synthetic_data_generation.py +0 -0
- {ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/tests/test_catalyst/upload_trace_zip_automation.py +0 -0
@@ -1,9 +1,9 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.6.1
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>
|
6
|
-
Requires-Python: <3.13,>=3.
|
6
|
+
Requires-Python: <3.13,>=3.10
|
7
7
|
Description-Content-Type: text/markdown
|
8
8
|
License-File: LICENSE
|
9
9
|
Requires-Dist: aiohttp>=3.10.2
|
@@ -39,7 +39,7 @@ Requires-Dist: openinference-instrumentation-openai
|
|
39
39
|
Requires-Dist: openinference-instrumentation-bedrock
|
40
40
|
Requires-Dist: openinference-instrumentation-crewai
|
41
41
|
Requires-Dist: openinference-instrumentation-haystack
|
42
|
-
Requires-Dist: openinference-instrumentation-
|
42
|
+
Requires-Dist: openinference-instrumentation-openai-agents
|
43
43
|
Requires-Dist: openinference-instrumentation-smolagents
|
44
44
|
Requires-Dist: opentelemetry-sdk
|
45
45
|
Requires-Dist: opentelemetry-exporter-otlp
|
@@ -51,6 +51,7 @@ Requires-Dist: black; extra == "dev"
|
|
51
51
|
Requires-Dist: isort; extra == "dev"
|
52
52
|
Requires-Dist: mypy; extra == "dev"
|
53
53
|
Requires-Dist: flake8; extra == "dev"
|
54
|
+
Dynamic: license-file
|
54
55
|
|
55
56
|
# RagaAI Catalyst   
|
56
57
|
|
{ragaai_catalyst-2.1.6b2 → ragaai_catalyst-2.1.6.1}/examples/crewai/scifi_writer/scifi_writer.py
RENAMED
@@ -1,15 +1,11 @@
|
|
1
|
-
import sys
|
2
|
-
sys.path.append('.')
|
3
|
-
|
4
|
-
from ragaai_catalyst import RagaAICatalyst, init_tracing
|
5
|
-
from ragaai_catalyst.tracers import Tracer
|
6
|
-
|
7
1
|
import os
|
8
2
|
from dotenv import load_dotenv
|
9
3
|
from crewai import Agent, Task, Crew, Process
|
10
4
|
from crewai.tools import tool
|
11
5
|
from typing import Any
|
12
6
|
|
7
|
+
from ragaai_catalyst import RagaAICatalyst, init_tracing
|
8
|
+
from ragaai_catalyst.tracers import Tracer
|
13
9
|
|
14
10
|
load_dotenv()
|
15
11
|
|
@@ -98,4 +94,4 @@ try:
|
|
98
94
|
print("\nGenerated Story Content:")
|
99
95
|
print(file.read())
|
100
96
|
except FileNotFoundError:
|
101
|
-
print("Story file not found. Check the writer agent's execution.")
|
97
|
+
print("Story file not found. Check the writer agent's execution.")
|
@@ -0,0 +1,71 @@
|
|
1
|
+
# Email Data Extraction with OpenAI Agents SDK
|
2
|
+
|
3
|
+
This example demonstrates how to use the OpenAI Agents SDK with RagaAI Catalyst to extract structured information from emails.
|
4
|
+
|
5
|
+
## Overview
|
6
|
+
|
7
|
+
The application uses OpenAI's Agents SDK to parse unstructured email text and extract key information such as:
|
8
|
+
- Email subject and sender details
|
9
|
+
- Main discussion points
|
10
|
+
- Meeting information (date, time, location)
|
11
|
+
- Action items and tasks with assignees
|
12
|
+
- Next steps
|
13
|
+
|
14
|
+
The extracted data is structured using Pydantic models for easy manipulation and validation.
|
15
|
+
|
16
|
+
## Requirements
|
17
|
+
|
18
|
+
- Python 3.8+
|
19
|
+
- OpenAI API key
|
20
|
+
- RagaAI Catalyst credentials
|
21
|
+
|
22
|
+
## Installation
|
23
|
+
|
24
|
+
1. Clone the repository
|
25
|
+
2. Install the required dependencies:
|
26
|
+
```bash
|
27
|
+
pip install -r requirements.txt
|
28
|
+
```
|
29
|
+
3. Copy [sample.env](cci:7://file:///Users/ragaai_user/work/ragaai-catalyst/examples/openai_agents_sdk/sample.env:0:0-0:0) to [.env](cci:7://file:///Users/ragaai_user/work/ragaai-catalyst/examples/openai_agents_sdk/sample.env:0:0-0:0) and fill in your API keys:
|
30
|
+
```bash
|
31
|
+
cp sample.env .env
|
32
|
+
```
|
33
|
+
|
34
|
+
## Environment Variables
|
35
|
+
|
36
|
+
Configure the following environment variables in your [.env](cci:7://file:///Users/ragaai_user/work/ragaai-catalyst/examples/openai_agents_sdk/sample.env:0:0-0:0) file:
|
37
|
+
|
38
|
+
- `OPENAI_API_KEY`: Your OpenAI API key
|
39
|
+
- `CATALYST_ACCESS_KEY`: Your RagaAI Catalyst access key
|
40
|
+
- `CATALYST_SECRET_KEY`: Your RagaAI Catalyst secret key
|
41
|
+
- `CATALYST_BASE_URL`: RagaAI Catalyst base URL
|
42
|
+
- `PROJECT_NAME`: Name for your project in RagaAI Catalyst (default: 'email-extraction')
|
43
|
+
- `DATASET_NAME`: Name for your dataset in RagaAI Catalyst (default: 'email-data')
|
44
|
+
|
45
|
+
## Usage
|
46
|
+
|
47
|
+
Run the example script:
|
48
|
+
|
49
|
+
```bash
|
50
|
+
python data_extraction_email.py
|
51
|
+
```
|
52
|
+
The script will:
|
53
|
+
|
54
|
+
1. Initialize the RagaAI Catalyst client for tracing
|
55
|
+
2. Set up an OpenAI Agent with appropriate instructions
|
56
|
+
3. Process a sample email to extract structured data
|
57
|
+
4. Display the extracted information
|
58
|
+
|
59
|
+
## Customization
|
60
|
+
|
61
|
+
You can modify the `sample_email` variable in the script to process different emails, or adapt the code to read emails from files or an API.
|
62
|
+
|
63
|
+
The Pydantic models (`Person`, `Meeting`, `Task`, `EmailData`) can be extended to capture additional information as needed.
|
64
|
+
|
65
|
+
## Integration with RagaAI Catalyst
|
66
|
+
|
67
|
+
This example integrates with RagaAI Catalyst for tracing and monitoring agent interactions. The integration helps with:
|
68
|
+
|
69
|
+
- Tracking agent performance
|
70
|
+
- Debugging complex agent workflows
|
71
|
+
- Collecting data for future improvements
|
@@ -0,0 +1,189 @@
|
|
1
|
+
import os
|
2
|
+
import time
|
3
|
+
from typing import List, Optional, Callable, Any
|
4
|
+
from pydantic import BaseModel
|
5
|
+
from dotenv import load_dotenv
|
6
|
+
|
7
|
+
from agents import Agent, Runner, ModelSettings, set_tracing_export_api_key
|
8
|
+
|
9
|
+
from ragaai_catalyst import RagaAICatalyst, init_tracing
|
10
|
+
from ragaai_catalyst.tracers import Tracer
|
11
|
+
|
12
|
+
load_dotenv()
|
13
|
+
set_tracing_export_api_key(os.getenv('OPENAI_API_KEY'))
|
14
|
+
|
15
|
+
def initialize_catalyst():
|
16
|
+
"""Initialize RagaAI Catalyst using environment credentials."""
|
17
|
+
catalyst = RagaAICatalyst(
|
18
|
+
access_key=os.getenv('CATALYST_ACCESS_KEY'),
|
19
|
+
secret_key=os.getenv('CATALYST_SECRET_KEY'),
|
20
|
+
base_url=os.getenv('CATALYST_BASE_URL')
|
21
|
+
)
|
22
|
+
|
23
|
+
tracer = Tracer(
|
24
|
+
project_name=os.environ.get('PROJECT_NAME', 'email-extraction'),
|
25
|
+
dataset_name=os.environ.get('DATASET_NAME', 'email-data'),
|
26
|
+
tracer_type="agentic/openai_agents",
|
27
|
+
)
|
28
|
+
|
29
|
+
init_tracing(catalyst=catalyst, tracer=tracer)
|
30
|
+
|
31
|
+
class Person(BaseModel):
|
32
|
+
"""Person data model for email sender and recipients."""
|
33
|
+
name: str
|
34
|
+
role: Optional[str] = None
|
35
|
+
contact: Optional[str] = None
|
36
|
+
|
37
|
+
class Meeting(BaseModel):
|
38
|
+
"""Meeting data model for scheduled meetings in emails."""
|
39
|
+
date: str
|
40
|
+
time: str
|
41
|
+
location: Optional[str] = None
|
42
|
+
duration: Optional[str] = None
|
43
|
+
|
44
|
+
class Task(BaseModel):
|
45
|
+
"""Task data model for action items in emails."""
|
46
|
+
description: str
|
47
|
+
assignee: Optional[str] = None
|
48
|
+
deadline: Optional[str] = None
|
49
|
+
priority: Optional[str] = None
|
50
|
+
|
51
|
+
class EmailData(BaseModel):
|
52
|
+
"""Complete email data model with structured information."""
|
53
|
+
subject: str
|
54
|
+
sender: Person
|
55
|
+
recipients: List[Person]
|
56
|
+
main_points: List[str]
|
57
|
+
meetings: List[Meeting]
|
58
|
+
tasks: List[Task]
|
59
|
+
next_steps: Optional[str] = None
|
60
|
+
|
61
|
+
def initialize_agent(agent_name: str, agent_instructions: str|Callable, handoff_description: Optional[str]=None, handoffs: List[Agent]=list(), model_name: str='gpt-4o', temperature: float=0.3, max_tokens: int=1000, output_type: Optional[type[Any]]=None):
|
62
|
+
"""Initialize the OpenAI agent for email extraction."""
|
63
|
+
# Initialize the agent with appropriate configuration
|
64
|
+
# This could include model selection, temperature settings, etc.
|
65
|
+
model_settings = ModelSettings(
|
66
|
+
temperature=temperature,
|
67
|
+
max_tokens=max_tokens
|
68
|
+
)
|
69
|
+
agent = Agent(
|
70
|
+
name=agent_name,
|
71
|
+
instructions=agent_instructions,
|
72
|
+
handoff_description=handoff_description,
|
73
|
+
handoffs=handoffs,
|
74
|
+
model=model_name,
|
75
|
+
model_settings=model_settings,
|
76
|
+
output_type=output_type
|
77
|
+
)
|
78
|
+
return agent
|
79
|
+
|
80
|
+
email_extractor = initialize_agent(
|
81
|
+
agent_name="Email Extractor",
|
82
|
+
agent_instructions="You are an expert at extracting structured information from emails.",
|
83
|
+
model_name="gpt-4o",
|
84
|
+
temperature=0.2,
|
85
|
+
output_type=EmailData
|
86
|
+
)
|
87
|
+
|
88
|
+
async def extract_email_data(email_text: str) -> EmailData:
|
89
|
+
"""
|
90
|
+
Extract structured data from an email using an OpenAI agent.
|
91
|
+
|
92
|
+
Args:
|
93
|
+
email_text: The raw email text to process
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
EmailData object containing structured information from the email
|
97
|
+
"""
|
98
|
+
runner = Runner()
|
99
|
+
extraction_prompt = f"Please extract information from this email:\n\n{email_text}"
|
100
|
+
result = await runner.run(
|
101
|
+
email_extractor,
|
102
|
+
extraction_prompt
|
103
|
+
)
|
104
|
+
return result.final_output
|
105
|
+
|
106
|
+
sample_email = """
|
107
|
+
From: Alex Johnson <alex.j@techcorp.com>
|
108
|
+
To: Team Development <team-dev@techcorp.com>
|
109
|
+
CC: Sarah Wong <sarah.w@techcorp.com>, Miguel Fernandez <miguel.f@techcorp.com>
|
110
|
+
Subject: Project Phoenix Update and Next Steps
|
111
|
+
|
112
|
+
Hi team,
|
113
|
+
|
114
|
+
I wanted to follow up on yesterday's discussion about Project Phoenix and outline our next steps.
|
115
|
+
|
116
|
+
Key points from our discussion:
|
117
|
+
- The beta testing phase has shown promising results with 85% positive feedback
|
118
|
+
- We're still facing some performance issues on mobile devices
|
119
|
+
- The client has requested additional features for the dashboard
|
120
|
+
|
121
|
+
Let's schedule a follow-up meeting this Friday, June 15th at 2:00 PM in Conference Room B. The meeting should last about 1.5 hours, and we'll need to prepare the updated project timeline.
|
122
|
+
|
123
|
+
Action items:
|
124
|
+
1. Sarah to address the mobile performance issues by June 20th (High priority)
|
125
|
+
2. Miguel to create mock-ups for the new dashboard features by next Monday
|
126
|
+
3. Everyone to review the beta testing feedback document and add comments by EOD tomorrow
|
127
|
+
|
128
|
+
If you have any questions before Friday's meeting, feel free to reach out.
|
129
|
+
|
130
|
+
Best regards,
|
131
|
+
Alex Johnson
|
132
|
+
Senior Project Manager
|
133
|
+
(555) 123-4567
|
134
|
+
"""
|
135
|
+
|
136
|
+
def display_email_data(email_data: EmailData):
|
137
|
+
"""
|
138
|
+
Display the extracted email data in a formatted way.
|
139
|
+
|
140
|
+
Args:
|
141
|
+
email_data: The structured EmailData object to display
|
142
|
+
"""
|
143
|
+
print(f"Subject: {email_data.subject}")
|
144
|
+
print(f"From: {email_data.sender.name} ({email_data.sender.role})")
|
145
|
+
|
146
|
+
print("\nMain points:")
|
147
|
+
for point in email_data.main_points:
|
148
|
+
print(f"- {point}")
|
149
|
+
|
150
|
+
print("\nMeetings:")
|
151
|
+
for meeting in email_data.meetings:
|
152
|
+
print(f"- {meeting.date} at {meeting.time}, Location: {meeting.location}")
|
153
|
+
|
154
|
+
print("\nTasks:")
|
155
|
+
for task in email_data.tasks:
|
156
|
+
print(f"- {task.description}")
|
157
|
+
print(
|
158
|
+
f" Assignee: {task.assignee}, Deadline: {task.deadline}, Priority: {task.priority}"
|
159
|
+
)
|
160
|
+
|
161
|
+
if email_data.next_steps:
|
162
|
+
print(f"\nNext Steps: {email_data.next_steps}")
|
163
|
+
|
164
|
+
async def process_email(email_text: str):
|
165
|
+
"""
|
166
|
+
Process an email to extract structured data and display the results.
|
167
|
+
|
168
|
+
Args:
|
169
|
+
email_text: The raw email text to process
|
170
|
+
|
171
|
+
Returns:
|
172
|
+
The structured EmailData object
|
173
|
+
"""
|
174
|
+
if os.getenv('CATALYST_ACCESS_KEY'):
|
175
|
+
initialize_catalyst()
|
176
|
+
|
177
|
+
start_time = time.time()
|
178
|
+
email_data = await extract_email_data(email_text)
|
179
|
+
duration = time.time() - start_time
|
180
|
+
|
181
|
+
print(f"Email processing completed in {duration:.2f} seconds")
|
182
|
+
display_email_data(email_data)
|
183
|
+
|
184
|
+
return email_data
|
185
|
+
|
186
|
+
if __name__ == "__main__":
|
187
|
+
import asyncio
|
188
|
+
|
189
|
+
asyncio.run(process_email(sample_email))
|
@@ -6,9 +6,9 @@ build-backend = "setuptools.build_meta"
|
|
6
6
|
name = "ragaai_catalyst"
|
7
7
|
description = "RAGA AI CATALYST"
|
8
8
|
readme = "README.md"
|
9
|
-
requires-python = ">=3.
|
9
|
+
requires-python = ">=3.10,<3.13"
|
10
10
|
# license = {file = "LICENSE"}
|
11
|
-
version = "2.1.6.
|
11
|
+
version = "2.1.6.1"
|
12
12
|
authors = [
|
13
13
|
{name = "Kiran Scaria", email = "kiran.scaria@raga.ai"},
|
14
14
|
{name = "Kedar Gaikwad", email = "kedar.gaikwad@raga.ai"},
|
@@ -28,8 +28,8 @@ dependencies = [
|
|
28
28
|
"groq>=0.11.0",
|
29
29
|
"pypdf>=5.3.1",
|
30
30
|
"google-genai>=1.3.0",
|
31
|
-
"Markdown>=3.7",
|
32
|
-
"litellm>=1.51.1",
|
31
|
+
"Markdown>=3.7",
|
32
|
+
"litellm>=1.51.1",
|
33
33
|
"tenacity==8.3.0",
|
34
34
|
"tqdm>=4.66.5",
|
35
35
|
"llama-index>=0.10.0",
|
@@ -53,11 +53,11 @@ dependencies = [
|
|
53
53
|
"openinference-instrumentation-bedrock",
|
54
54
|
"openinference-instrumentation-crewai",
|
55
55
|
"openinference-instrumentation-haystack",
|
56
|
-
"openinference-instrumentation-
|
56
|
+
"openinference-instrumentation-openai-agents",
|
57
57
|
"openinference-instrumentation-smolagents",
|
58
58
|
"opentelemetry-sdk",
|
59
59
|
"opentelemetry-exporter-otlp",
|
60
|
-
"opentelemetry-proto>=1.12.0"
|
60
|
+
"opentelemetry-proto>=1.12.0",
|
61
61
|
]
|
62
62
|
|
63
63
|
[project.optional-dependencies]
|
@@ -164,6 +164,7 @@ class GuardExecutor:
|
|
164
164
|
return doc
|
165
165
|
|
166
166
|
def execute_input_guardrails(self, prompt, prompt_params):
|
167
|
+
self.current_trace_id =None
|
167
168
|
doc = self.set_variables(prompt,prompt_params)
|
168
169
|
deployment_response = self.execute_deployment(self.input_deployment_id,doc)
|
169
170
|
self.current_trace_id = deployment_response['data']['results'][0]['executionId']
|
@@ -92,6 +92,7 @@ class BaseTracer:
|
|
92
92
|
self._upload_tasks = []
|
93
93
|
self._is_uploading = False
|
94
94
|
self._upload_completed_callback = None
|
95
|
+
self.timeout = self.user_details.get("timeout", 120)
|
95
96
|
|
96
97
|
ensure_uploader_running()
|
97
98
|
|
@@ -314,7 +315,8 @@ class BaseTracer:
|
|
314
315
|
project_id=self.project_id,
|
315
316
|
dataset_name=self.dataset_name,
|
316
317
|
user_details=self.user_details,
|
317
|
-
base_url=self.base_url
|
318
|
+
base_url=self.base_url,
|
319
|
+
timeout=self.timeout
|
318
320
|
)
|
319
321
|
|
320
322
|
# For backward compatibility
|
@@ -48,7 +48,7 @@ from ragaai_catalyst.tracers.upload_traces import UploadTraces
|
|
48
48
|
class AgenticTracing(
|
49
49
|
BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMixin, CustomTracerMixin
|
50
50
|
):
|
51
|
-
def __init__(self, user_detail, auto_instrumentation=None):
|
51
|
+
def __init__(self, user_detail, auto_instrumentation=None, timeout=120):
|
52
52
|
# Initialize all parent classes
|
53
53
|
self.user_interaction_tracer = UserInteractionTracer()
|
54
54
|
LLMTracerMixin.__init__(self)
|
@@ -60,7 +60,7 @@ class AgenticTracing(
|
|
60
60
|
self.project_id = user_detail["project_id"]
|
61
61
|
self.trace_user_detail = user_detail["trace_user_detail"]
|
62
62
|
self.base_url = f"{RagaAICatalyst.BASE_URL}"
|
63
|
-
self.timeout =
|
63
|
+
self.timeout = timeout
|
64
64
|
|
65
65
|
# Add warning flag
|
66
66
|
self._warning_shown = False
|
@@ -77,7 +77,7 @@ def get_executor():
|
|
77
77
|
|
78
78
|
def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
79
79
|
project_name: str, project_id: str, dataset_name: str,
|
80
|
-
user_details: Dict[str, Any], base_url: str) -> Dict[str, Any]:
|
80
|
+
user_details: Dict[str, Any], base_url: str, timeout=120) -> Dict[str, Any]:
|
81
81
|
"""
|
82
82
|
Process a single upload task
|
83
83
|
|
@@ -147,7 +147,8 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
147
147
|
json_file_path=filepath,
|
148
148
|
dataset_name=dataset_name,
|
149
149
|
project_name=project_name,
|
150
|
-
base_url=base_url
|
150
|
+
base_url=base_url,
|
151
|
+
timeout=timeout
|
151
152
|
)
|
152
153
|
logger.info(f"Trace metrics uploaded: {response}")
|
153
154
|
except Exception as e:
|
@@ -167,6 +168,7 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
167
168
|
dataset_name=dataset_name,
|
168
169
|
user_detail=user_details,
|
169
170
|
base_url=base_url,
|
171
|
+
timeout=timeout
|
170
172
|
)
|
171
173
|
upload_traces.upload_agentic_traces()
|
172
174
|
logger.info("Agentic traces uploaded successfully")
|
@@ -185,7 +187,8 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
185
187
|
zip_path=zip_path,
|
186
188
|
project_name=project_name,
|
187
189
|
dataset_name=dataset_name,
|
188
|
-
base_url=base_url
|
190
|
+
base_url=base_url,
|
191
|
+
timeout=timeout
|
189
192
|
)
|
190
193
|
logger.info(f"Code hash uploaded: {response}")
|
191
194
|
except Exception as e:
|
@@ -215,7 +218,7 @@ def save_task_status(task_status: Dict[str, Any]):
|
|
215
218
|
with open(status_path, "w") as f:
|
216
219
|
json.dump(task_status, f, indent=2)
|
217
220
|
|
218
|
-
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url):
|
221
|
+
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url, timeout=120):
|
219
222
|
"""
|
220
223
|
Submit a new upload task using futures.
|
221
224
|
|
@@ -259,7 +262,8 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
259
262
|
project_id=project_id,
|
260
263
|
dataset_name=dataset_name,
|
261
264
|
user_details=user_details,
|
262
|
-
base_url=base_url
|
265
|
+
base_url=base_url,
|
266
|
+
timeout=timeout
|
263
267
|
)
|
264
268
|
|
265
269
|
# Store the future for later status checks
|
@@ -9,19 +9,19 @@ logger = logging.getLogger(__name__)
|
|
9
9
|
from urllib.parse import urlparse, urlunparse
|
10
10
|
import re
|
11
11
|
|
12
|
-
def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None):
|
13
|
-
code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url)
|
12
|
+
def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None, timeout=120):
|
13
|
+
code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url, timeout=timeout)
|
14
14
|
|
15
15
|
if hash_id not in code_hashes_list:
|
16
|
-
presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url)
|
17
|
-
_put_zip_presigned_url(project_name, presigned_url, zip_path)
|
16
|
+
presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url, timeout=timeout)
|
17
|
+
_put_zip_presigned_url(project_name, presigned_url, zip_path, timeout=timeout)
|
18
18
|
|
19
|
-
response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url)
|
19
|
+
response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url, timeout=timeout)
|
20
20
|
return response
|
21
21
|
else:
|
22
22
|
return "Code already exists"
|
23
23
|
|
24
|
-
def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None):
|
24
|
+
def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None, timeout=120):
|
25
25
|
payload = {}
|
26
26
|
headers = {
|
27
27
|
"Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
@@ -36,7 +36,7 @@ def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None):
|
|
36
36
|
endpoint,
|
37
37
|
headers=headers,
|
38
38
|
data=payload,
|
39
|
-
timeout=
|
39
|
+
timeout=timeout)
|
40
40
|
elapsed_ms = (time.time() - start_time) * 1000
|
41
41
|
logger.debug(
|
42
42
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -66,7 +66,7 @@ def update_presigned_url(presigned_url, base_url):
|
|
66
66
|
return presigned_url
|
67
67
|
|
68
68
|
|
69
|
-
def _fetch_presigned_url(project_name, dataset_name, base_url=None):
|
69
|
+
def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120):
|
70
70
|
payload = json.dumps({
|
71
71
|
"datasetName": dataset_name,
|
72
72
|
"numFiles": 1,
|
@@ -87,7 +87,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None):
|
|
87
87
|
endpoint,
|
88
88
|
headers=headers,
|
89
89
|
data=payload,
|
90
|
-
timeout=
|
90
|
+
timeout=timeout)
|
91
91
|
elapsed_ms = (time.time() - start_time) * 1000
|
92
92
|
logger.debug(
|
93
93
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -102,7 +102,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None):
|
|
102
102
|
logger.error(f"Failed to list datasets: {e}")
|
103
103
|
raise
|
104
104
|
|
105
|
-
def _put_zip_presigned_url(project_name, presignedUrl, filename):
|
105
|
+
def _put_zip_presigned_url(project_name, presignedUrl, filename, timeout=120):
|
106
106
|
headers = {
|
107
107
|
"X-Project-Name": project_name,
|
108
108
|
"Content-Type": "application/zip",
|
@@ -119,14 +119,14 @@ def _put_zip_presigned_url(project_name, presignedUrl, filename):
|
|
119
119
|
presignedUrl,
|
120
120
|
headers=headers,
|
121
121
|
data=payload,
|
122
|
-
timeout=
|
122
|
+
timeout=timeout)
|
123
123
|
elapsed_ms = (time.time() - start_time) * 1000
|
124
124
|
logger.debug(
|
125
125
|
f"API Call: [PUT] {presignedUrl} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
126
126
|
if response.status_code != 200 or response.status_code != 201:
|
127
127
|
return response, response.status_code
|
128
128
|
|
129
|
-
def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None):
|
129
|
+
def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None, timeout=120):
|
130
130
|
payload = json.dumps({
|
131
131
|
"datasetName": dataset_name,
|
132
132
|
"codeHash": hash_id,
|
@@ -147,7 +147,7 @@ def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=No
|
|
147
147
|
endpoint,
|
148
148
|
headers=headers,
|
149
149
|
data=payload,
|
150
|
-
timeout=
|
150
|
+
timeout=timeout)
|
151
151
|
elapsed_ms = (time.time() - start_time) * 1000
|
152
152
|
logger.debug(
|
153
153
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -15,7 +15,7 @@ logging_level = (
|
|
15
15
|
)
|
16
16
|
|
17
17
|
|
18
|
-
def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None):
|
18
|
+
def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None, timeout=120):
|
19
19
|
try:
|
20
20
|
with open(json_file_path, "r") as f:
|
21
21
|
traces = json.load(f)
|
@@ -51,7 +51,7 @@ def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=Non
|
|
51
51
|
endpoint,
|
52
52
|
headers=headers,
|
53
53
|
data=payload,
|
54
|
-
timeout=
|
54
|
+
timeout=timeout)
|
55
55
|
elapsed_ms = (time.time() - start_time) * 1000
|
56
56
|
logger.debug(
|
57
57
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -14,7 +14,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
14
14
|
certain properties to be updated dynamically during execution.
|
15
15
|
"""
|
16
16
|
|
17
|
-
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
|
17
|
+
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120):
|
18
18
|
"""
|
19
19
|
Initialize the DynamicTraceExporter.
|
20
20
|
|
@@ -33,7 +33,8 @@ class DynamicTraceExporter(SpanExporter):
|
|
33
33
|
dataset_name=dataset_name,
|
34
34
|
user_details=user_details,
|
35
35
|
base_url=base_url,
|
36
|
-
custom_model_cost=custom_model_cost
|
36
|
+
custom_model_cost=custom_model_cost,
|
37
|
+
timeout=timeout
|
37
38
|
)
|
38
39
|
|
39
40
|
# Store the initial values
|
@@ -19,7 +19,7 @@ logging_level = (
|
|
19
19
|
|
20
20
|
|
21
21
|
class RAGATraceExporter(SpanExporter):
|
22
|
-
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
|
22
|
+
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120):
|
23
23
|
self.trace_spans = dict()
|
24
24
|
self.tmp_dir = tempfile.gettempdir()
|
25
25
|
self.files_to_zip = files_to_zip
|
@@ -30,6 +30,7 @@ class RAGATraceExporter(SpanExporter):
|
|
30
30
|
self.base_url = base_url
|
31
31
|
self.custom_model_cost = custom_model_cost
|
32
32
|
self.system_monitor = SystemMonitor(dataset_name)
|
33
|
+
self.timeout = timeout
|
33
34
|
|
34
35
|
def export(self, spans):
|
35
36
|
for span in spans:
|
@@ -122,7 +123,8 @@ class RAGATraceExporter(SpanExporter):
|
|
122
123
|
project_id=self.project_id,
|
123
124
|
dataset_name=self.dataset_name,
|
124
125
|
user_details=self.user_details,
|
125
|
-
base_url=self.base_url
|
126
|
+
base_url=self.base_url,
|
127
|
+
timeout=self.timeout
|
126
128
|
)
|
127
129
|
|
128
130
|
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|