nvidia-nat 1.2.0rc5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiq/agent/__init__.py +0 -0
- aiq/agent/base.py +239 -0
- aiq/agent/dual_node.py +67 -0
- aiq/agent/react_agent/__init__.py +0 -0
- aiq/agent/react_agent/agent.py +355 -0
- aiq/agent/react_agent/output_parser.py +104 -0
- aiq/agent/react_agent/prompt.py +41 -0
- aiq/agent/react_agent/register.py +149 -0
- aiq/agent/reasoning_agent/__init__.py +0 -0
- aiq/agent/reasoning_agent/reasoning_agent.py +225 -0
- aiq/agent/register.py +23 -0
- aiq/agent/rewoo_agent/__init__.py +0 -0
- aiq/agent/rewoo_agent/agent.py +411 -0
- aiq/agent/rewoo_agent/prompt.py +108 -0
- aiq/agent/rewoo_agent/register.py +158 -0
- aiq/agent/tool_calling_agent/__init__.py +0 -0
- aiq/agent/tool_calling_agent/agent.py +119 -0
- aiq/agent/tool_calling_agent/register.py +106 -0
- aiq/authentication/__init__.py +14 -0
- aiq/authentication/api_key/__init__.py +14 -0
- aiq/authentication/api_key/api_key_auth_provider.py +96 -0
- aiq/authentication/api_key/api_key_auth_provider_config.py +124 -0
- aiq/authentication/api_key/register.py +26 -0
- aiq/authentication/exceptions/__init__.py +14 -0
- aiq/authentication/exceptions/api_key_exceptions.py +38 -0
- aiq/authentication/http_basic_auth/__init__.py +0 -0
- aiq/authentication/http_basic_auth/http_basic_auth_provider.py +81 -0
- aiq/authentication/http_basic_auth/register.py +30 -0
- aiq/authentication/interfaces.py +93 -0
- aiq/authentication/oauth2/__init__.py +14 -0
- aiq/authentication/oauth2/oauth2_auth_code_flow_provider.py +107 -0
- aiq/authentication/oauth2/oauth2_auth_code_flow_provider_config.py +39 -0
- aiq/authentication/oauth2/register.py +25 -0
- aiq/authentication/register.py +21 -0
- aiq/builder/__init__.py +0 -0
- aiq/builder/builder.py +285 -0
- aiq/builder/component_utils.py +316 -0
- aiq/builder/context.py +264 -0
- aiq/builder/embedder.py +24 -0
- aiq/builder/eval_builder.py +161 -0
- aiq/builder/evaluator.py +29 -0
- aiq/builder/framework_enum.py +24 -0
- aiq/builder/front_end.py +73 -0
- aiq/builder/function.py +344 -0
- aiq/builder/function_base.py +380 -0
- aiq/builder/function_info.py +627 -0
- aiq/builder/intermediate_step_manager.py +174 -0
- aiq/builder/llm.py +25 -0
- aiq/builder/retriever.py +25 -0
- aiq/builder/user_interaction_manager.py +74 -0
- aiq/builder/workflow.py +148 -0
- aiq/builder/workflow_builder.py +1117 -0
- aiq/cli/__init__.py +14 -0
- aiq/cli/cli_utils/__init__.py +0 -0
- aiq/cli/cli_utils/config_override.py +231 -0
- aiq/cli/cli_utils/validation.py +37 -0
- aiq/cli/commands/__init__.py +0 -0
- aiq/cli/commands/configure/__init__.py +0 -0
- aiq/cli/commands/configure/channel/__init__.py +0 -0
- aiq/cli/commands/configure/channel/add.py +28 -0
- aiq/cli/commands/configure/channel/channel.py +36 -0
- aiq/cli/commands/configure/channel/remove.py +30 -0
- aiq/cli/commands/configure/channel/update.py +30 -0
- aiq/cli/commands/configure/configure.py +33 -0
- aiq/cli/commands/evaluate.py +139 -0
- aiq/cli/commands/info/__init__.py +14 -0
- aiq/cli/commands/info/info.py +39 -0
- aiq/cli/commands/info/list_channels.py +32 -0
- aiq/cli/commands/info/list_components.py +129 -0
- aiq/cli/commands/info/list_mcp.py +213 -0
- aiq/cli/commands/registry/__init__.py +14 -0
- aiq/cli/commands/registry/publish.py +88 -0
- aiq/cli/commands/registry/pull.py +118 -0
- aiq/cli/commands/registry/registry.py +38 -0
- aiq/cli/commands/registry/remove.py +108 -0
- aiq/cli/commands/registry/search.py +155 -0
- aiq/cli/commands/sizing/__init__.py +14 -0
- aiq/cli/commands/sizing/calc.py +297 -0
- aiq/cli/commands/sizing/sizing.py +27 -0
- aiq/cli/commands/start.py +246 -0
- aiq/cli/commands/uninstall.py +81 -0
- aiq/cli/commands/validate.py +47 -0
- aiq/cli/commands/workflow/__init__.py +14 -0
- aiq/cli/commands/workflow/templates/__init__.py.j2 +0 -0
- aiq/cli/commands/workflow/templates/config.yml.j2 +16 -0
- aiq/cli/commands/workflow/templates/pyproject.toml.j2 +22 -0
- aiq/cli/commands/workflow/templates/register.py.j2 +5 -0
- aiq/cli/commands/workflow/templates/workflow.py.j2 +36 -0
- aiq/cli/commands/workflow/workflow.py +37 -0
- aiq/cli/commands/workflow/workflow_commands.py +313 -0
- aiq/cli/entrypoint.py +135 -0
- aiq/cli/main.py +44 -0
- aiq/cli/register_workflow.py +488 -0
- aiq/cli/type_registry.py +1000 -0
- aiq/data_models/__init__.py +14 -0
- aiq/data_models/api_server.py +694 -0
- aiq/data_models/authentication.py +231 -0
- aiq/data_models/common.py +171 -0
- aiq/data_models/component.py +54 -0
- aiq/data_models/component_ref.py +168 -0
- aiq/data_models/config.py +406 -0
- aiq/data_models/dataset_handler.py +123 -0
- aiq/data_models/discovery_metadata.py +335 -0
- aiq/data_models/embedder.py +27 -0
- aiq/data_models/evaluate.py +127 -0
- aiq/data_models/evaluator.py +26 -0
- aiq/data_models/front_end.py +26 -0
- aiq/data_models/function.py +30 -0
- aiq/data_models/function_dependencies.py +72 -0
- aiq/data_models/interactive.py +246 -0
- aiq/data_models/intermediate_step.py +302 -0
- aiq/data_models/invocation_node.py +38 -0
- aiq/data_models/llm.py +27 -0
- aiq/data_models/logging.py +26 -0
- aiq/data_models/memory.py +27 -0
- aiq/data_models/object_store.py +44 -0
- aiq/data_models/profiler.py +54 -0
- aiq/data_models/registry_handler.py +26 -0
- aiq/data_models/retriever.py +30 -0
- aiq/data_models/retry_mixin.py +35 -0
- aiq/data_models/span.py +187 -0
- aiq/data_models/step_adaptor.py +64 -0
- aiq/data_models/streaming.py +33 -0
- aiq/data_models/swe_bench_model.py +54 -0
- aiq/data_models/telemetry_exporter.py +26 -0
- aiq/data_models/ttc_strategy.py +30 -0
- aiq/embedder/__init__.py +0 -0
- aiq/embedder/langchain_client.py +41 -0
- aiq/embedder/nim_embedder.py +59 -0
- aiq/embedder/openai_embedder.py +43 -0
- aiq/embedder/register.py +24 -0
- aiq/eval/__init__.py +14 -0
- aiq/eval/config.py +60 -0
- aiq/eval/dataset_handler/__init__.py +0 -0
- aiq/eval/dataset_handler/dataset_downloader.py +106 -0
- aiq/eval/dataset_handler/dataset_filter.py +52 -0
- aiq/eval/dataset_handler/dataset_handler.py +254 -0
- aiq/eval/evaluate.py +506 -0
- aiq/eval/evaluator/__init__.py +14 -0
- aiq/eval/evaluator/base_evaluator.py +73 -0
- aiq/eval/evaluator/evaluator_model.py +45 -0
- aiq/eval/intermediate_step_adapter.py +99 -0
- aiq/eval/rag_evaluator/__init__.py +0 -0
- aiq/eval/rag_evaluator/evaluate.py +178 -0
- aiq/eval/rag_evaluator/register.py +143 -0
- aiq/eval/register.py +23 -0
- aiq/eval/remote_workflow.py +133 -0
- aiq/eval/runners/__init__.py +14 -0
- aiq/eval/runners/config.py +39 -0
- aiq/eval/runners/multi_eval_runner.py +54 -0
- aiq/eval/runtime_event_subscriber.py +52 -0
- aiq/eval/swe_bench_evaluator/__init__.py +0 -0
- aiq/eval/swe_bench_evaluator/evaluate.py +215 -0
- aiq/eval/swe_bench_evaluator/register.py +36 -0
- aiq/eval/trajectory_evaluator/__init__.py +0 -0
- aiq/eval/trajectory_evaluator/evaluate.py +75 -0
- aiq/eval/trajectory_evaluator/register.py +40 -0
- aiq/eval/tunable_rag_evaluator/__init__.py +0 -0
- aiq/eval/tunable_rag_evaluator/evaluate.py +245 -0
- aiq/eval/tunable_rag_evaluator/register.py +52 -0
- aiq/eval/usage_stats.py +41 -0
- aiq/eval/utils/__init__.py +0 -0
- aiq/eval/utils/output_uploader.py +140 -0
- aiq/eval/utils/tqdm_position_registry.py +40 -0
- aiq/eval/utils/weave_eval.py +184 -0
- aiq/experimental/__init__.py +0 -0
- aiq/experimental/decorators/__init__.py +0 -0
- aiq/experimental/decorators/experimental_warning_decorator.py +130 -0
- aiq/experimental/test_time_compute/__init__.py +0 -0
- aiq/experimental/test_time_compute/editing/__init__.py +0 -0
- aiq/experimental/test_time_compute/editing/iterative_plan_refinement_editor.py +147 -0
- aiq/experimental/test_time_compute/editing/llm_as_a_judge_editor.py +204 -0
- aiq/experimental/test_time_compute/editing/motivation_aware_summarization.py +107 -0
- aiq/experimental/test_time_compute/functions/__init__.py +0 -0
- aiq/experimental/test_time_compute/functions/execute_score_select_function.py +105 -0
- aiq/experimental/test_time_compute/functions/its_tool_orchestration_function.py +205 -0
- aiq/experimental/test_time_compute/functions/its_tool_wrapper_function.py +146 -0
- aiq/experimental/test_time_compute/functions/plan_select_execute_function.py +224 -0
- aiq/experimental/test_time_compute/models/__init__.py +0 -0
- aiq/experimental/test_time_compute/models/editor_config.py +132 -0
- aiq/experimental/test_time_compute/models/scoring_config.py +112 -0
- aiq/experimental/test_time_compute/models/search_config.py +120 -0
- aiq/experimental/test_time_compute/models/selection_config.py +154 -0
- aiq/experimental/test_time_compute/models/stage_enums.py +43 -0
- aiq/experimental/test_time_compute/models/strategy_base.py +66 -0
- aiq/experimental/test_time_compute/models/tool_use_config.py +41 -0
- aiq/experimental/test_time_compute/models/ttc_item.py +48 -0
- aiq/experimental/test_time_compute/register.py +36 -0
- aiq/experimental/test_time_compute/scoring/__init__.py +0 -0
- aiq/experimental/test_time_compute/scoring/llm_based_agent_scorer.py +168 -0
- aiq/experimental/test_time_compute/scoring/llm_based_plan_scorer.py +168 -0
- aiq/experimental/test_time_compute/scoring/motivation_aware_scorer.py +111 -0
- aiq/experimental/test_time_compute/search/__init__.py +0 -0
- aiq/experimental/test_time_compute/search/multi_llm_planner.py +128 -0
- aiq/experimental/test_time_compute/search/multi_query_retrieval_search.py +122 -0
- aiq/experimental/test_time_compute/search/single_shot_multi_plan_planner.py +128 -0
- aiq/experimental/test_time_compute/selection/__init__.py +0 -0
- aiq/experimental/test_time_compute/selection/best_of_n_selector.py +63 -0
- aiq/experimental/test_time_compute/selection/llm_based_agent_output_selector.py +131 -0
- aiq/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +159 -0
- aiq/experimental/test_time_compute/selection/llm_based_plan_selector.py +128 -0
- aiq/experimental/test_time_compute/selection/threshold_selector.py +58 -0
- aiq/front_ends/__init__.py +14 -0
- aiq/front_ends/console/__init__.py +14 -0
- aiq/front_ends/console/authentication_flow_handler.py +233 -0
- aiq/front_ends/console/console_front_end_config.py +32 -0
- aiq/front_ends/console/console_front_end_plugin.py +96 -0
- aiq/front_ends/console/register.py +25 -0
- aiq/front_ends/cron/__init__.py +14 -0
- aiq/front_ends/fastapi/__init__.py +14 -0
- aiq/front_ends/fastapi/auth_flow_handlers/__init__.py +0 -0
- aiq/front_ends/fastapi/auth_flow_handlers/http_flow_handler.py +27 -0
- aiq/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +107 -0
- aiq/front_ends/fastapi/fastapi_front_end_config.py +234 -0
- aiq/front_ends/fastapi/fastapi_front_end_controller.py +68 -0
- aiq/front_ends/fastapi/fastapi_front_end_plugin.py +116 -0
- aiq/front_ends/fastapi/fastapi_front_end_plugin_worker.py +1092 -0
- aiq/front_ends/fastapi/html_snippets/__init__.py +14 -0
- aiq/front_ends/fastapi/html_snippets/auth_code_grant_success.py +35 -0
- aiq/front_ends/fastapi/intermediate_steps_subscriber.py +80 -0
- aiq/front_ends/fastapi/job_store.py +183 -0
- aiq/front_ends/fastapi/main.py +72 -0
- aiq/front_ends/fastapi/message_handler.py +298 -0
- aiq/front_ends/fastapi/message_validator.py +345 -0
- aiq/front_ends/fastapi/register.py +25 -0
- aiq/front_ends/fastapi/response_helpers.py +195 -0
- aiq/front_ends/fastapi/step_adaptor.py +321 -0
- aiq/front_ends/mcp/__init__.py +14 -0
- aiq/front_ends/mcp/mcp_front_end_config.py +32 -0
- aiq/front_ends/mcp/mcp_front_end_plugin.py +93 -0
- aiq/front_ends/mcp/register.py +27 -0
- aiq/front_ends/mcp/tool_converter.py +242 -0
- aiq/front_ends/register.py +22 -0
- aiq/front_ends/simple_base/__init__.py +14 -0
- aiq/front_ends/simple_base/simple_front_end_plugin_base.py +54 -0
- aiq/llm/__init__.py +0 -0
- aiq/llm/aws_bedrock_llm.py +57 -0
- aiq/llm/nim_llm.py +46 -0
- aiq/llm/openai_llm.py +46 -0
- aiq/llm/register.py +23 -0
- aiq/llm/utils/__init__.py +14 -0
- aiq/llm/utils/env_config_value.py +94 -0
- aiq/llm/utils/error.py +17 -0
- aiq/memory/__init__.py +20 -0
- aiq/memory/interfaces.py +183 -0
- aiq/memory/models.py +112 -0
- aiq/meta/module_to_distro.json +3 -0
- aiq/meta/pypi.md +58 -0
- aiq/object_store/__init__.py +20 -0
- aiq/object_store/in_memory_object_store.py +76 -0
- aiq/object_store/interfaces.py +84 -0
- aiq/object_store/models.py +36 -0
- aiq/object_store/register.py +20 -0
- aiq/observability/__init__.py +14 -0
- aiq/observability/exporter/__init__.py +14 -0
- aiq/observability/exporter/base_exporter.py +449 -0
- aiq/observability/exporter/exporter.py +78 -0
- aiq/observability/exporter/file_exporter.py +33 -0
- aiq/observability/exporter/processing_exporter.py +322 -0
- aiq/observability/exporter/raw_exporter.py +52 -0
- aiq/observability/exporter/span_exporter.py +265 -0
- aiq/observability/exporter_manager.py +335 -0
- aiq/observability/mixin/__init__.py +14 -0
- aiq/observability/mixin/batch_config_mixin.py +26 -0
- aiq/observability/mixin/collector_config_mixin.py +23 -0
- aiq/observability/mixin/file_mixin.py +288 -0
- aiq/observability/mixin/file_mode.py +23 -0
- aiq/observability/mixin/resource_conflict_mixin.py +134 -0
- aiq/observability/mixin/serialize_mixin.py +61 -0
- aiq/observability/mixin/type_introspection_mixin.py +183 -0
- aiq/observability/processor/__init__.py +14 -0
- aiq/observability/processor/batching_processor.py +310 -0
- aiq/observability/processor/callback_processor.py +42 -0
- aiq/observability/processor/intermediate_step_serializer.py +28 -0
- aiq/observability/processor/processor.py +71 -0
- aiq/observability/register.py +96 -0
- aiq/observability/utils/__init__.py +14 -0
- aiq/observability/utils/dict_utils.py +236 -0
- aiq/observability/utils/time_utils.py +31 -0
- aiq/plugins/.namespace +1 -0
- aiq/profiler/__init__.py +0 -0
- aiq/profiler/calc/__init__.py +14 -0
- aiq/profiler/calc/calc_runner.py +627 -0
- aiq/profiler/calc/calculations.py +288 -0
- aiq/profiler/calc/data_models.py +188 -0
- aiq/profiler/calc/plot.py +345 -0
- aiq/profiler/callbacks/__init__.py +0 -0
- aiq/profiler/callbacks/agno_callback_handler.py +295 -0
- aiq/profiler/callbacks/base_callback_class.py +20 -0
- aiq/profiler/callbacks/langchain_callback_handler.py +290 -0
- aiq/profiler/callbacks/llama_index_callback_handler.py +205 -0
- aiq/profiler/callbacks/semantic_kernel_callback_handler.py +238 -0
- aiq/profiler/callbacks/token_usage_base_model.py +27 -0
- aiq/profiler/data_frame_row.py +51 -0
- aiq/profiler/data_models.py +24 -0
- aiq/profiler/decorators/__init__.py +0 -0
- aiq/profiler/decorators/framework_wrapper.py +131 -0
- aiq/profiler/decorators/function_tracking.py +254 -0
- aiq/profiler/forecasting/__init__.py +0 -0
- aiq/profiler/forecasting/config.py +18 -0
- aiq/profiler/forecasting/model_trainer.py +75 -0
- aiq/profiler/forecasting/models/__init__.py +22 -0
- aiq/profiler/forecasting/models/forecasting_base_model.py +40 -0
- aiq/profiler/forecasting/models/linear_model.py +196 -0
- aiq/profiler/forecasting/models/random_forest_regressor.py +268 -0
- aiq/profiler/inference_metrics_model.py +28 -0
- aiq/profiler/inference_optimization/__init__.py +0 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/__init__.py +0 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +460 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +258 -0
- aiq/profiler/inference_optimization/data_models.py +386 -0
- aiq/profiler/inference_optimization/experimental/__init__.py +0 -0
- aiq/profiler/inference_optimization/experimental/concurrency_spike_analysis.py +468 -0
- aiq/profiler/inference_optimization/experimental/prefix_span_analysis.py +405 -0
- aiq/profiler/inference_optimization/llm_metrics.py +212 -0
- aiq/profiler/inference_optimization/prompt_caching.py +163 -0
- aiq/profiler/inference_optimization/token_uniqueness.py +107 -0
- aiq/profiler/inference_optimization/workflow_runtimes.py +72 -0
- aiq/profiler/intermediate_property_adapter.py +102 -0
- aiq/profiler/profile_runner.py +473 -0
- aiq/profiler/utils.py +184 -0
- aiq/registry_handlers/__init__.py +0 -0
- aiq/registry_handlers/local/__init__.py +0 -0
- aiq/registry_handlers/local/local_handler.py +176 -0
- aiq/registry_handlers/local/register_local.py +37 -0
- aiq/registry_handlers/metadata_factory.py +60 -0
- aiq/registry_handlers/package_utils.py +567 -0
- aiq/registry_handlers/pypi/__init__.py +0 -0
- aiq/registry_handlers/pypi/pypi_handler.py +251 -0
- aiq/registry_handlers/pypi/register_pypi.py +40 -0
- aiq/registry_handlers/register.py +21 -0
- aiq/registry_handlers/registry_handler_base.py +157 -0
- aiq/registry_handlers/rest/__init__.py +0 -0
- aiq/registry_handlers/rest/register_rest.py +56 -0
- aiq/registry_handlers/rest/rest_handler.py +237 -0
- aiq/registry_handlers/schemas/__init__.py +0 -0
- aiq/registry_handlers/schemas/headers.py +42 -0
- aiq/registry_handlers/schemas/package.py +68 -0
- aiq/registry_handlers/schemas/publish.py +63 -0
- aiq/registry_handlers/schemas/pull.py +82 -0
- aiq/registry_handlers/schemas/remove.py +36 -0
- aiq/registry_handlers/schemas/search.py +91 -0
- aiq/registry_handlers/schemas/status.py +47 -0
- aiq/retriever/__init__.py +0 -0
- aiq/retriever/interface.py +37 -0
- aiq/retriever/milvus/__init__.py +14 -0
- aiq/retriever/milvus/register.py +81 -0
- aiq/retriever/milvus/retriever.py +228 -0
- aiq/retriever/models.py +74 -0
- aiq/retriever/nemo_retriever/__init__.py +14 -0
- aiq/retriever/nemo_retriever/register.py +60 -0
- aiq/retriever/nemo_retriever/retriever.py +190 -0
- aiq/retriever/register.py +22 -0
- aiq/runtime/__init__.py +14 -0
- aiq/runtime/loader.py +215 -0
- aiq/runtime/runner.py +190 -0
- aiq/runtime/session.py +158 -0
- aiq/runtime/user_metadata.py +130 -0
- aiq/settings/__init__.py +0 -0
- aiq/settings/global_settings.py +318 -0
- aiq/test/.namespace +1 -0
- aiq/tool/__init__.py +0 -0
- aiq/tool/chat_completion.py +74 -0
- aiq/tool/code_execution/README.md +151 -0
- aiq/tool/code_execution/__init__.py +0 -0
- aiq/tool/code_execution/code_sandbox.py +267 -0
- aiq/tool/code_execution/local_sandbox/.gitignore +1 -0
- aiq/tool/code_execution/local_sandbox/Dockerfile.sandbox +60 -0
- aiq/tool/code_execution/local_sandbox/__init__.py +13 -0
- aiq/tool/code_execution/local_sandbox/local_sandbox_server.py +198 -0
- aiq/tool/code_execution/local_sandbox/sandbox.requirements.txt +6 -0
- aiq/tool/code_execution/local_sandbox/start_local_sandbox.sh +50 -0
- aiq/tool/code_execution/register.py +74 -0
- aiq/tool/code_execution/test_code_execution_sandbox.py +414 -0
- aiq/tool/code_execution/utils.py +100 -0
- aiq/tool/datetime_tools.py +42 -0
- aiq/tool/document_search.py +141 -0
- aiq/tool/github_tools/__init__.py +0 -0
- aiq/tool/github_tools/create_github_commit.py +133 -0
- aiq/tool/github_tools/create_github_issue.py +87 -0
- aiq/tool/github_tools/create_github_pr.py +106 -0
- aiq/tool/github_tools/get_github_file.py +106 -0
- aiq/tool/github_tools/get_github_issue.py +166 -0
- aiq/tool/github_tools/get_github_pr.py +256 -0
- aiq/tool/github_tools/update_github_issue.py +100 -0
- aiq/tool/mcp/__init__.py +14 -0
- aiq/tool/mcp/exceptions.py +142 -0
- aiq/tool/mcp/mcp_client.py +255 -0
- aiq/tool/mcp/mcp_tool.py +96 -0
- aiq/tool/memory_tools/__init__.py +0 -0
- aiq/tool/memory_tools/add_memory_tool.py +79 -0
- aiq/tool/memory_tools/delete_memory_tool.py +67 -0
- aiq/tool/memory_tools/get_memory_tool.py +72 -0
- aiq/tool/nvidia_rag.py +95 -0
- aiq/tool/register.py +38 -0
- aiq/tool/retriever.py +89 -0
- aiq/tool/server_tools.py +66 -0
- aiq/utils/__init__.py +0 -0
- aiq/utils/data_models/__init__.py +0 -0
- aiq/utils/data_models/schema_validator.py +58 -0
- aiq/utils/debugging_utils.py +43 -0
- aiq/utils/dump_distro_mapping.py +32 -0
- aiq/utils/exception_handlers/__init__.py +0 -0
- aiq/utils/exception_handlers/automatic_retries.py +289 -0
- aiq/utils/exception_handlers/mcp.py +211 -0
- aiq/utils/exception_handlers/schemas.py +114 -0
- aiq/utils/io/__init__.py +0 -0
- aiq/utils/io/model_processing.py +28 -0
- aiq/utils/io/yaml_tools.py +119 -0
- aiq/utils/log_utils.py +37 -0
- aiq/utils/metadata_utils.py +74 -0
- aiq/utils/optional_imports.py +142 -0
- aiq/utils/producer_consumer_queue.py +178 -0
- aiq/utils/reactive/__init__.py +0 -0
- aiq/utils/reactive/base/__init__.py +0 -0
- aiq/utils/reactive/base/observable_base.py +65 -0
- aiq/utils/reactive/base/observer_base.py +55 -0
- aiq/utils/reactive/base/subject_base.py +79 -0
- aiq/utils/reactive/observable.py +59 -0
- aiq/utils/reactive/observer.py +76 -0
- aiq/utils/reactive/subject.py +131 -0
- aiq/utils/reactive/subscription.py +49 -0
- aiq/utils/settings/__init__.py +0 -0
- aiq/utils/settings/global_settings.py +197 -0
- aiq/utils/string_utils.py +38 -0
- aiq/utils/type_converter.py +290 -0
- aiq/utils/type_utils.py +484 -0
- aiq/utils/url_utils.py +27 -0
- nvidia_nat-1.2.0rc5.dist-info/METADATA +363 -0
- nvidia_nat-1.2.0rc5.dist-info/RECORD +435 -0
- nvidia_nat-1.2.0rc5.dist-info/WHEEL +5 -0
- nvidia_nat-1.2.0rc5.dist-info/entry_points.txt +20 -0
- nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE-3rd-party.txt +3686 -0
- nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE.md +201 -0
- nvidia_nat-1.2.0rc5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import asyncio
|
|
17
|
+
import logging
|
|
18
|
+
import time
|
|
19
|
+
from collections import deque
|
|
20
|
+
from collections.abc import Awaitable
|
|
21
|
+
from collections.abc import Callable
|
|
22
|
+
from typing import Any
|
|
23
|
+
from typing import Generic
|
|
24
|
+
from typing import TypeVar
|
|
25
|
+
|
|
26
|
+
from aiq.observability.processor.callback_processor import CallbackProcessor
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
T = TypeVar('T')
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class BatchingProcessor(CallbackProcessor[T, list[T]], Generic[T]):
|
|
34
|
+
"""Pass-through batching processor that accumulates items and outputs batched lists.
|
|
35
|
+
|
|
36
|
+
This processor extends CallbackProcessor[T, List[T]] to provide batching functionality.
|
|
37
|
+
It accumulates individual items and outputs them as batches when size or time thresholds
|
|
38
|
+
are met. The batched output continues through the processing pipeline.
|
|
39
|
+
|
|
40
|
+
CRITICAL: Implements proper cleanup to ensure NO ITEMS ARE LOST during shutdown.
|
|
41
|
+
The ProcessingExporter._cleanup() method calls shutdown() on all processors.
|
|
42
|
+
|
|
43
|
+
Key Features:
|
|
44
|
+
- Pass-through design: Processor[T, List[T]]
|
|
45
|
+
- Size-based and time-based batching
|
|
46
|
+
- Pipeline flow: batches continue through downstream processors
|
|
47
|
+
- GUARANTEED: No items lost during cleanup
|
|
48
|
+
- Comprehensive statistics and monitoring
|
|
49
|
+
- Proper cleanup and shutdown handling
|
|
50
|
+
- High-performance async implementation
|
|
51
|
+
- Back-pressure handling with queue limits
|
|
52
|
+
|
|
53
|
+
Pipeline Flow:
|
|
54
|
+
Normal processing: Individual items → BatchingProcessor → List[items] → downstream processors → export
|
|
55
|
+
Time-based flush: Scheduled batches automatically continue through remaining pipeline
|
|
56
|
+
Shutdown: Final batch immediately routed through remaining pipeline
|
|
57
|
+
|
|
58
|
+
Cleanup Guarantee:
|
|
59
|
+
When shutdown() is called, this processor:
|
|
60
|
+
1. Stops accepting new items
|
|
61
|
+
2. Creates final batch from all queued items
|
|
62
|
+
3. Immediately routes final batch through remaining pipeline via callback
|
|
63
|
+
4. Ensures zero data loss with no external coordination needed
|
|
64
|
+
|
|
65
|
+
Usage in Pipeline:
|
|
66
|
+
```python
|
|
67
|
+
# Individual spans → Batched spans → Continue through downstream processors
|
|
68
|
+
exporter.add_processor(BatchingProcessor[Span](batch_size=100)) # Auto-wired with pipeline callback
|
|
69
|
+
exporter.add_processor(FilterProcessor()) # Processes List[Span] from batching
|
|
70
|
+
exporter.add_processor(TransformProcessor()) # Further processing
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
batch_size: Maximum items per batch (default: 100)
|
|
75
|
+
flush_interval: Max seconds to wait before flushing (default: 5.0)
|
|
76
|
+
max_queue_size: Maximum items to queue before blocking (default: 1000)
|
|
77
|
+
drop_on_overflow: If True, drop items when queue is full (default: False)
|
|
78
|
+
shutdown_timeout: Max seconds to wait for final batch processing (default: 10.0)
|
|
79
|
+
|
|
80
|
+
Note:
|
|
81
|
+
The done_callback for pipeline integration is automatically set by ProcessingExporter
|
|
82
|
+
when the processor is added to a pipeline. For standalone usage, call set_done_callback().
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
def __init__(self,
|
|
86
|
+
batch_size: int = 100,
|
|
87
|
+
flush_interval: float = 5.0,
|
|
88
|
+
max_queue_size: int = 1000,
|
|
89
|
+
drop_on_overflow: bool = False,
|
|
90
|
+
shutdown_timeout: float = 10.0):
|
|
91
|
+
self._batch_size = batch_size
|
|
92
|
+
self._flush_interval = flush_interval
|
|
93
|
+
self._max_queue_size = max_queue_size
|
|
94
|
+
self._drop_on_overflow = drop_on_overflow
|
|
95
|
+
self._shutdown_timeout = shutdown_timeout
|
|
96
|
+
self._done_callback: Callable[[list[T]], Awaitable[None]] | None = None
|
|
97
|
+
|
|
98
|
+
# Batching state
|
|
99
|
+
self._batch_queue: deque[T] = deque()
|
|
100
|
+
self._last_flush_time = time.time()
|
|
101
|
+
self._flush_task: asyncio.Task | None = None
|
|
102
|
+
self._batch_lock = asyncio.Lock()
|
|
103
|
+
self._shutdown_requested = False
|
|
104
|
+
self._shutdown_complete = False
|
|
105
|
+
self._shutdown_complete_event = asyncio.Event()
|
|
106
|
+
|
|
107
|
+
# Callback for immediate export of scheduled batches
|
|
108
|
+
self._done = None
|
|
109
|
+
|
|
110
|
+
# Statistics
|
|
111
|
+
self._batches_created = 0
|
|
112
|
+
self._items_processed = 0
|
|
113
|
+
self._items_dropped = 0
|
|
114
|
+
self._queue_overflows = 0
|
|
115
|
+
self._shutdown_batches = 0
|
|
116
|
+
|
|
117
|
+
async def process(self, item: T) -> list[T]:
|
|
118
|
+
"""Process an item by adding it to the batch queue.
|
|
119
|
+
|
|
120
|
+
Returns a batch when batching conditions are met, otherwise returns empty list.
|
|
121
|
+
This maintains the Processor[T, List[T]] contract while handling batching logic.
|
|
122
|
+
|
|
123
|
+
During shutdown, immediately returns items as single-item batches to ensure
|
|
124
|
+
no data loss.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
item: The item to add to the current batch
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
List[T]: A batch of items when ready, empty list otherwise
|
|
131
|
+
"""
|
|
132
|
+
if self._shutdown_requested:
|
|
133
|
+
# During shutdown, return item immediately as single-item batch
|
|
134
|
+
# This ensures no items are lost even if shutdown is in progress
|
|
135
|
+
self._items_processed += 1
|
|
136
|
+
self._shutdown_batches += 1
|
|
137
|
+
logger.debug("Shutdown mode: returning single-item batch for item %s", item)
|
|
138
|
+
return [item]
|
|
139
|
+
|
|
140
|
+
async with self._batch_lock:
|
|
141
|
+
# Handle queue overflow
|
|
142
|
+
if len(self._batch_queue) >= self._max_queue_size:
|
|
143
|
+
self._queue_overflows += 1
|
|
144
|
+
|
|
145
|
+
if self._drop_on_overflow:
|
|
146
|
+
# Drop the item and return empty
|
|
147
|
+
self._items_dropped += 1
|
|
148
|
+
logger.warning("Dropping item due to queue overflow (dropped: %d)", self._items_dropped)
|
|
149
|
+
return []
|
|
150
|
+
# Force flush to make space, then add item
|
|
151
|
+
logger.warning("Queue overflow, forcing flush of %d items", len(self._batch_queue))
|
|
152
|
+
forced_batch = await self._create_batch()
|
|
153
|
+
if forced_batch:
|
|
154
|
+
# Add current item to queue and return the forced batch
|
|
155
|
+
self._batch_queue.append(item)
|
|
156
|
+
self._items_processed += 1
|
|
157
|
+
return forced_batch
|
|
158
|
+
|
|
159
|
+
# Add item to batch queue
|
|
160
|
+
self._batch_queue.append(item)
|
|
161
|
+
self._items_processed += 1
|
|
162
|
+
|
|
163
|
+
# Check flush conditions
|
|
164
|
+
should_flush = (len(self._batch_queue) >= self._batch_size
|
|
165
|
+
or (time.time() - self._last_flush_time) >= self._flush_interval)
|
|
166
|
+
|
|
167
|
+
if should_flush:
|
|
168
|
+
return await self._create_batch()
|
|
169
|
+
# Schedule a time-based flush if not already scheduled
|
|
170
|
+
if self._flush_task is None or self._flush_task.done():
|
|
171
|
+
self._flush_task = asyncio.create_task(self._schedule_flush())
|
|
172
|
+
return []
|
|
173
|
+
|
|
174
|
+
def set_done_callback(self, callback: Callable[[list[T]], Awaitable[None]]):
|
|
175
|
+
"""Set callback function for routing batches through the remaining pipeline.
|
|
176
|
+
|
|
177
|
+
This is automatically set by ProcessingExporter.add_processor() to continue
|
|
178
|
+
batches through downstream processors before final export.
|
|
179
|
+
"""
|
|
180
|
+
self._done_callback = callback
|
|
181
|
+
|
|
182
|
+
async def _schedule_flush(self):
|
|
183
|
+
"""Schedule a flush after the flush interval."""
|
|
184
|
+
try:
|
|
185
|
+
await asyncio.sleep(self._flush_interval)
|
|
186
|
+
async with self._batch_lock:
|
|
187
|
+
if not self._shutdown_requested and self._batch_queue:
|
|
188
|
+
batch = await self._create_batch()
|
|
189
|
+
if batch:
|
|
190
|
+
# Route scheduled batches through pipeline via callback
|
|
191
|
+
if self._done_callback is not None:
|
|
192
|
+
try:
|
|
193
|
+
await self._done_callback(batch)
|
|
194
|
+
logger.debug("Scheduled flush routed batch of %d items through pipeline", len(batch))
|
|
195
|
+
except Exception as e:
|
|
196
|
+
logger.error("Error routing scheduled batch through pipeline: %s", e, exc_info=True)
|
|
197
|
+
else:
|
|
198
|
+
logger.warning("Scheduled flush created batch of %d items but no pipeline callback set",
|
|
199
|
+
len(batch))
|
|
200
|
+
except asyncio.CancelledError:
|
|
201
|
+
pass
|
|
202
|
+
except Exception as e:
|
|
203
|
+
logger.error("Error in scheduled flush: %s", e, exc_info=True)
|
|
204
|
+
|
|
205
|
+
async def _create_batch(self) -> list[T]:
|
|
206
|
+
"""Create a batch from the current queue."""
|
|
207
|
+
if not self._batch_queue:
|
|
208
|
+
return []
|
|
209
|
+
|
|
210
|
+
batch = list(self._batch_queue)
|
|
211
|
+
self._batch_queue.clear()
|
|
212
|
+
self._last_flush_time = time.time()
|
|
213
|
+
self._batches_created += 1
|
|
214
|
+
|
|
215
|
+
logger.debug("Created batch of %d items (total: %d items in %d batches)",
|
|
216
|
+
len(batch),
|
|
217
|
+
self._items_processed,
|
|
218
|
+
self._batches_created)
|
|
219
|
+
|
|
220
|
+
return batch
|
|
221
|
+
|
|
222
|
+
async def force_flush(self) -> list[T]:
|
|
223
|
+
"""Force an immediate flush of all queued items.
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
List[T]: The current batch, empty list if no items queued
|
|
227
|
+
"""
|
|
228
|
+
async with self._batch_lock:
|
|
229
|
+
return await self._create_batch()
|
|
230
|
+
|
|
231
|
+
async def shutdown(self) -> None:
|
|
232
|
+
"""Shutdown the processor and ensure all items are processed.
|
|
233
|
+
|
|
234
|
+
CRITICAL: This method is called by ProcessingExporter._cleanup() to ensure
|
|
235
|
+
no items are lost during shutdown. It immediately routes any remaining
|
|
236
|
+
items as a final batch through the rest of the processing pipeline.
|
|
237
|
+
"""
|
|
238
|
+
if self._shutdown_requested:
|
|
239
|
+
logger.debug("Shutdown already requested, waiting for completion")
|
|
240
|
+
# Wait for shutdown to complete using event instead of polling
|
|
241
|
+
try:
|
|
242
|
+
await asyncio.wait_for(self._shutdown_complete_event.wait(), timeout=self._shutdown_timeout)
|
|
243
|
+
logger.debug("Shutdown completion detected via event")
|
|
244
|
+
except asyncio.TimeoutError:
|
|
245
|
+
logger.warning("Shutdown completion timeout exceeded (%s seconds)", self._shutdown_timeout)
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
logger.debug("Starting shutdown of BatchingProcessor (queue size: %d)", len(self._batch_queue))
|
|
249
|
+
self._shutdown_requested = True
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
# Cancel scheduled flush task
|
|
253
|
+
if self._flush_task and not self._flush_task.done():
|
|
254
|
+
self._flush_task.cancel()
|
|
255
|
+
try:
|
|
256
|
+
await self._flush_task
|
|
257
|
+
except asyncio.CancelledError:
|
|
258
|
+
pass
|
|
259
|
+
|
|
260
|
+
# Create and route final batch through pipeline
|
|
261
|
+
async with self._batch_lock:
|
|
262
|
+
if self._batch_queue:
|
|
263
|
+
final_batch = await self._create_batch()
|
|
264
|
+
logger.debug("Created final batch of %d items during shutdown", len(final_batch))
|
|
265
|
+
|
|
266
|
+
# Route final batch through pipeline via callback
|
|
267
|
+
if self._done_callback is not None:
|
|
268
|
+
try:
|
|
269
|
+
await self._done_callback(final_batch)
|
|
270
|
+
logger.debug(
|
|
271
|
+
"Successfully flushed final batch of %d items through pipeline during shutdown",
|
|
272
|
+
len(final_batch))
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error("Error routing final batch through pipeline during shutdown: %s",
|
|
275
|
+
e,
|
|
276
|
+
exc_info=True)
|
|
277
|
+
else:
|
|
278
|
+
logger.warning("Final batch of %d items created during shutdown but no pipeline callback set",
|
|
279
|
+
len(final_batch))
|
|
280
|
+
else:
|
|
281
|
+
logger.debug("No items remaining during shutdown")
|
|
282
|
+
|
|
283
|
+
self._shutdown_complete = True
|
|
284
|
+
self._shutdown_complete_event.set()
|
|
285
|
+
logger.debug("BatchingProcessor shutdown completed successfully")
|
|
286
|
+
|
|
287
|
+
except Exception as e:
|
|
288
|
+
logger.error("Error during BatchingProcessor shutdown: %s", e, exc_info=True)
|
|
289
|
+
self._shutdown_complete = True
|
|
290
|
+
self._shutdown_complete_event.set()
|
|
291
|
+
|
|
292
|
+
def get_stats(self) -> dict[str, Any]:
|
|
293
|
+
"""Get comprehensive batching statistics."""
|
|
294
|
+
return {
|
|
295
|
+
"current_queue_size": len(self._batch_queue),
|
|
296
|
+
"batch_size_limit": self._batch_size,
|
|
297
|
+
"flush_interval": self._flush_interval,
|
|
298
|
+
"max_queue_size": self._max_queue_size,
|
|
299
|
+
"drop_on_overflow": self._drop_on_overflow,
|
|
300
|
+
"shutdown_timeout": self._shutdown_timeout,
|
|
301
|
+
"batches_created": self._batches_created,
|
|
302
|
+
"items_processed": self._items_processed,
|
|
303
|
+
"items_dropped": self._items_dropped,
|
|
304
|
+
"queue_overflows": self._queue_overflows,
|
|
305
|
+
"shutdown_batches": self._shutdown_batches,
|
|
306
|
+
"shutdown_requested": self._shutdown_requested,
|
|
307
|
+
"shutdown_complete": self._shutdown_complete,
|
|
308
|
+
"avg_items_per_batch": self._items_processed / max(1, self._batches_created),
|
|
309
|
+
"drop_rate": self._items_dropped / max(1, self._items_processed) * 100 if self._items_processed > 0 else 0
|
|
310
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from abc import abstractmethod
|
|
17
|
+
from collections.abc import Awaitable
|
|
18
|
+
from collections.abc import Callable
|
|
19
|
+
from typing import Any
|
|
20
|
+
from typing import TypeVar
|
|
21
|
+
|
|
22
|
+
from aiq.observability.processor.processor import Processor
|
|
23
|
+
|
|
24
|
+
InputT = TypeVar('InputT')
|
|
25
|
+
OutputT = TypeVar('OutputT')
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class CallbackProcessor(Processor[InputT, OutputT]):
|
|
29
|
+
"""Abstract base class for processors that support done callbacks.
|
|
30
|
+
|
|
31
|
+
Processors inheriting from this class can register callbacks that are
|
|
32
|
+
invoked when items are ready for further processing or export.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
def set_done_callback(self, callback: Callable[[Any], Awaitable[None]]) -> None:
|
|
37
|
+
"""Set a callback function to be invoked when items are processed.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
callback (Callable[[Any], Awaitable[None]]): Function to call with processed items
|
|
41
|
+
"""
|
|
42
|
+
pass
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from aiq.data_models.intermediate_step import IntermediateStep
|
|
17
|
+
from aiq.observability.mixin.serialize_mixin import SerializeMixin
|
|
18
|
+
from aiq.observability.processor.processor import Processor
|
|
19
|
+
from aiq.utils.type_utils import override
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class IntermediateStepSerializer(SerializeMixin, Processor[IntermediateStep, str]):
|
|
23
|
+
"""A File processor that exports telemetry traces to a local file."""
|
|
24
|
+
|
|
25
|
+
@override
|
|
26
|
+
async def process(self, item: IntermediateStep) -> str:
|
|
27
|
+
serialized_payload, _ = self._serialize_payload(item)
|
|
28
|
+
return serialized_payload
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from abc import ABC
|
|
17
|
+
from abc import abstractmethod
|
|
18
|
+
from typing import Generic
|
|
19
|
+
from typing import TypeVar
|
|
20
|
+
|
|
21
|
+
from aiq.observability.mixin.type_introspection_mixin import TypeIntrospectionMixin
|
|
22
|
+
|
|
23
|
+
InputT = TypeVar('InputT')
|
|
24
|
+
OutputT = TypeVar('OutputT')
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Processor(Generic[InputT, OutputT], TypeIntrospectionMixin, ABC):
|
|
28
|
+
"""Generic protocol for processors that can convert between types in export pipelines.
|
|
29
|
+
|
|
30
|
+
Processors are the building blocks of processing pipelines in exporters. They can
|
|
31
|
+
transform data from one type to another, enabling flexible data processing chains.
|
|
32
|
+
|
|
33
|
+
The generic types work as follows:
|
|
34
|
+
- InputT: The type of items that this processor accepts
|
|
35
|
+
- OutputT: The type of items that this processor produces
|
|
36
|
+
|
|
37
|
+
Key Features:
|
|
38
|
+
- Type-safe transformations through generics
|
|
39
|
+
- Type introspection capabilities via TypeIntrospectionMixin
|
|
40
|
+
- Async processing support
|
|
41
|
+
- Chainable in processing pipelines
|
|
42
|
+
|
|
43
|
+
Inheritance Structure:
|
|
44
|
+
- Inherits from TypeIntrospectionMixin for type introspection capabilities
|
|
45
|
+
- Implements Generic[InputT, OutputT] for type safety
|
|
46
|
+
- Abstract base class requiring implementation of process()
|
|
47
|
+
|
|
48
|
+
Example:
|
|
49
|
+
.. code-block:: python
|
|
50
|
+
|
|
51
|
+
class SpanToOtelProcessor(Processor[Span, OtelSpan]):
|
|
52
|
+
async def process(self, item: Span) -> OtelSpan:
|
|
53
|
+
return convert_span_to_otel(item)
|
|
54
|
+
|
|
55
|
+
Note:
|
|
56
|
+
Processors are typically added to ProcessingExporter instances to create
|
|
57
|
+
transformation pipelines. The exporter validates type compatibility between
|
|
58
|
+
chained processors.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
@abstractmethod
|
|
62
|
+
async def process(self, item: InputT) -> OutputT:
|
|
63
|
+
"""Process an item and return a potentially different type.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
item (InputT): The item to process
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
OutputT: The processed item
|
|
70
|
+
"""
|
|
71
|
+
pass
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import logging
|
|
17
|
+
|
|
18
|
+
from pydantic import Field
|
|
19
|
+
|
|
20
|
+
from aiq.builder.builder import Builder
|
|
21
|
+
from aiq.cli.register_workflow import register_logging_method
|
|
22
|
+
from aiq.cli.register_workflow import register_telemetry_exporter
|
|
23
|
+
from aiq.data_models.logging import LoggingBaseConfig
|
|
24
|
+
from aiq.data_models.telemetry_exporter import TelemetryExporterBaseConfig
|
|
25
|
+
from aiq.observability.mixin.file_mode import FileMode
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class FileTelemetryExporterConfig(TelemetryExporterBaseConfig, name="file"):
|
|
31
|
+
"""A telemetry exporter that writes runtime traces to local files with optional rolling."""
|
|
32
|
+
|
|
33
|
+
output_path: str = Field(description="Output path for logs. When rolling is disabled: exact file path. "
|
|
34
|
+
"When rolling is enabled: directory path or file path (directory + base name).")
|
|
35
|
+
project: str = Field(description="Name to affiliate with this application.")
|
|
36
|
+
mode: FileMode = Field(
|
|
37
|
+
default=FileMode.APPEND,
|
|
38
|
+
description="File write mode: 'append' to add to existing file or 'overwrite' to start fresh.")
|
|
39
|
+
enable_rolling: bool = Field(default=False, description="Enable rolling log files based on size limits.")
|
|
40
|
+
max_file_size: int = Field(
|
|
41
|
+
default=10 * 1024 * 1024, # 10MB
|
|
42
|
+
description="Maximum file size in bytes before rolling to a new file.")
|
|
43
|
+
max_files: int = Field(default=5, description="Maximum number of rolled files to keep.")
|
|
44
|
+
cleanup_on_init: bool = Field(default=False, description="Clean up old files during initialization.")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@register_telemetry_exporter(config_type=FileTelemetryExporterConfig)
|
|
48
|
+
async def file_telemetry_exporter(config: FileTelemetryExporterConfig, builder: Builder): # pylint: disable=W0613
|
|
49
|
+
"""
|
|
50
|
+
Build and return a FileExporter for file-based telemetry export with optional rolling.
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
from aiq.observability.exporter.file_exporter import FileExporter
|
|
54
|
+
|
|
55
|
+
yield FileExporter(output_path=config.output_path,
|
|
56
|
+
project=config.project,
|
|
57
|
+
mode=config.mode,
|
|
58
|
+
enable_rolling=config.enable_rolling,
|
|
59
|
+
max_file_size=config.max_file_size,
|
|
60
|
+
max_files=config.max_files,
|
|
61
|
+
cleanup_on_init=config.cleanup_on_init)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class ConsoleLoggingMethodConfig(LoggingBaseConfig, name="console"):
|
|
65
|
+
"""A logger to write runtime logs to the console."""
|
|
66
|
+
|
|
67
|
+
level: str = Field(description="The logging level of console logger.")
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@register_logging_method(config_type=ConsoleLoggingMethodConfig)
|
|
71
|
+
async def console_logging_method(config: ConsoleLoggingMethodConfig, builder: Builder): # pylint: disable=W0613
|
|
72
|
+
"""
|
|
73
|
+
Build and return a StreamHandler for console-based logging.
|
|
74
|
+
"""
|
|
75
|
+
level = getattr(logging, config.level.upper(), logging.INFO)
|
|
76
|
+
handler = logging.StreamHandler()
|
|
77
|
+
handler.setLevel(level)
|
|
78
|
+
yield handler
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class FileLoggingMethod(LoggingBaseConfig, name="file"):
|
|
82
|
+
"""A logger to write runtime logs to a file."""
|
|
83
|
+
|
|
84
|
+
path: str = Field(description="The file path to save the logging output.")
|
|
85
|
+
level: str = Field(description="The logging level of file logger.")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@register_logging_method(config_type=FileLoggingMethod)
|
|
89
|
+
async def file_logging_method(config: FileLoggingMethod, builder: Builder): # pylint: disable=W0613
|
|
90
|
+
"""
|
|
91
|
+
Build and return a FileHandler for file-based logging.
|
|
92
|
+
"""
|
|
93
|
+
level = getattr(logging, config.level.upper(), logging.INFO)
|
|
94
|
+
handler = logging.FileHandler(filename=config.path, mode="a", encoding="utf-8")
|
|
95
|
+
handler.setLevel(level)
|
|
96
|
+
yield handler
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|