nvidia-nat 1.1.0a20251020__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiq/__init__.py +66 -0
- nat/agent/__init__.py +0 -0
- nat/agent/base.py +265 -0
- nat/agent/dual_node.py +72 -0
- nat/agent/prompt_optimizer/__init__.py +0 -0
- nat/agent/prompt_optimizer/prompt.py +68 -0
- nat/agent/prompt_optimizer/register.py +149 -0
- nat/agent/react_agent/__init__.py +0 -0
- nat/agent/react_agent/agent.py +394 -0
- nat/agent/react_agent/output_parser.py +104 -0
- nat/agent/react_agent/prompt.py +44 -0
- nat/agent/react_agent/register.py +168 -0
- nat/agent/reasoning_agent/__init__.py +0 -0
- nat/agent/reasoning_agent/reasoning_agent.py +227 -0
- nat/agent/register.py +23 -0
- nat/agent/rewoo_agent/__init__.py +0 -0
- nat/agent/rewoo_agent/agent.py +593 -0
- nat/agent/rewoo_agent/prompt.py +107 -0
- nat/agent/rewoo_agent/register.py +175 -0
- nat/agent/tool_calling_agent/__init__.py +0 -0
- nat/agent/tool_calling_agent/agent.py +246 -0
- nat/agent/tool_calling_agent/register.py +129 -0
- nat/authentication/__init__.py +14 -0
- nat/authentication/api_key/__init__.py +14 -0
- nat/authentication/api_key/api_key_auth_provider.py +96 -0
- nat/authentication/api_key/api_key_auth_provider_config.py +124 -0
- nat/authentication/api_key/register.py +26 -0
- nat/authentication/credential_validator/__init__.py +14 -0
- nat/authentication/credential_validator/bearer_token_validator.py +557 -0
- nat/authentication/exceptions/__init__.py +14 -0
- nat/authentication/exceptions/api_key_exceptions.py +38 -0
- nat/authentication/http_basic_auth/__init__.py +0 -0
- nat/authentication/http_basic_auth/http_basic_auth_provider.py +81 -0
- nat/authentication/http_basic_auth/register.py +30 -0
- nat/authentication/interfaces.py +96 -0
- nat/authentication/oauth2/__init__.py +14 -0
- nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +140 -0
- nat/authentication/oauth2/oauth2_auth_code_flow_provider_config.py +39 -0
- nat/authentication/oauth2/oauth2_resource_server_config.py +124 -0
- nat/authentication/oauth2/register.py +25 -0
- nat/authentication/register.py +20 -0
- nat/builder/__init__.py +0 -0
- nat/builder/builder.py +317 -0
- nat/builder/component_utils.py +320 -0
- nat/builder/context.py +321 -0
- nat/builder/embedder.py +24 -0
- nat/builder/eval_builder.py +166 -0
- nat/builder/evaluator.py +29 -0
- nat/builder/framework_enum.py +25 -0
- nat/builder/front_end.py +73 -0
- nat/builder/function.py +714 -0
- nat/builder/function_base.py +380 -0
- nat/builder/function_info.py +625 -0
- nat/builder/intermediate_step_manager.py +206 -0
- nat/builder/llm.py +25 -0
- nat/builder/retriever.py +25 -0
- nat/builder/user_interaction_manager.py +78 -0
- nat/builder/workflow.py +160 -0
- nat/builder/workflow_builder.py +1365 -0
- nat/cli/__init__.py +14 -0
- nat/cli/cli_utils/__init__.py +0 -0
- nat/cli/cli_utils/config_override.py +231 -0
- nat/cli/cli_utils/validation.py +37 -0
- nat/cli/commands/__init__.py +0 -0
- nat/cli/commands/configure/__init__.py +0 -0
- nat/cli/commands/configure/channel/__init__.py +0 -0
- nat/cli/commands/configure/channel/add.py +28 -0
- nat/cli/commands/configure/channel/channel.py +34 -0
- nat/cli/commands/configure/channel/remove.py +30 -0
- nat/cli/commands/configure/channel/update.py +30 -0
- nat/cli/commands/configure/configure.py +33 -0
- nat/cli/commands/evaluate.py +139 -0
- nat/cli/commands/info/__init__.py +14 -0
- nat/cli/commands/info/info.py +47 -0
- nat/cli/commands/info/list_channels.py +32 -0
- nat/cli/commands/info/list_components.py +128 -0
- nat/cli/commands/mcp/__init__.py +14 -0
- nat/cli/commands/mcp/mcp.py +986 -0
- nat/cli/commands/object_store/__init__.py +14 -0
- nat/cli/commands/object_store/object_store.py +227 -0
- nat/cli/commands/optimize.py +90 -0
- nat/cli/commands/registry/__init__.py +14 -0
- nat/cli/commands/registry/publish.py +88 -0
- nat/cli/commands/registry/pull.py +118 -0
- nat/cli/commands/registry/registry.py +36 -0
- nat/cli/commands/registry/remove.py +108 -0
- nat/cli/commands/registry/search.py +153 -0
- nat/cli/commands/sizing/__init__.py +14 -0
- nat/cli/commands/sizing/calc.py +297 -0
- nat/cli/commands/sizing/sizing.py +27 -0
- nat/cli/commands/start.py +257 -0
- nat/cli/commands/uninstall.py +81 -0
- nat/cli/commands/validate.py +47 -0
- nat/cli/commands/workflow/__init__.py +14 -0
- nat/cli/commands/workflow/templates/__init__.py.j2 +0 -0
- nat/cli/commands/workflow/templates/config.yml.j2 +17 -0
- nat/cli/commands/workflow/templates/pyproject.toml.j2 +25 -0
- nat/cli/commands/workflow/templates/register.py.j2 +4 -0
- nat/cli/commands/workflow/templates/workflow.py.j2 +50 -0
- nat/cli/commands/workflow/workflow.py +37 -0
- nat/cli/commands/workflow/workflow_commands.py +403 -0
- nat/cli/entrypoint.py +141 -0
- nat/cli/main.py +60 -0
- nat/cli/register_workflow.py +522 -0
- nat/cli/type_registry.py +1069 -0
- nat/control_flow/__init__.py +0 -0
- nat/control_flow/register.py +20 -0
- nat/control_flow/router_agent/__init__.py +0 -0
- nat/control_flow/router_agent/agent.py +329 -0
- nat/control_flow/router_agent/prompt.py +48 -0
- nat/control_flow/router_agent/register.py +91 -0
- nat/control_flow/sequential_executor.py +166 -0
- nat/data_models/__init__.py +14 -0
- nat/data_models/agent.py +34 -0
- nat/data_models/api_server.py +843 -0
- nat/data_models/authentication.py +245 -0
- nat/data_models/common.py +171 -0
- nat/data_models/component.py +60 -0
- nat/data_models/component_ref.py +179 -0
- nat/data_models/config.py +434 -0
- nat/data_models/dataset_handler.py +169 -0
- nat/data_models/discovery_metadata.py +305 -0
- nat/data_models/embedder.py +27 -0
- nat/data_models/evaluate.py +130 -0
- nat/data_models/evaluator.py +26 -0
- nat/data_models/front_end.py +26 -0
- nat/data_models/function.py +64 -0
- nat/data_models/function_dependencies.py +80 -0
- nat/data_models/gated_field_mixin.py +242 -0
- nat/data_models/interactive.py +246 -0
- nat/data_models/intermediate_step.py +302 -0
- nat/data_models/invocation_node.py +38 -0
- nat/data_models/llm.py +27 -0
- nat/data_models/logging.py +26 -0
- nat/data_models/memory.py +27 -0
- nat/data_models/object_store.py +44 -0
- nat/data_models/optimizable.py +119 -0
- nat/data_models/optimizer.py +149 -0
- nat/data_models/profiler.py +54 -0
- nat/data_models/registry_handler.py +26 -0
- nat/data_models/retriever.py +30 -0
- nat/data_models/retry_mixin.py +35 -0
- nat/data_models/span.py +228 -0
- nat/data_models/step_adaptor.py +64 -0
- nat/data_models/streaming.py +33 -0
- nat/data_models/swe_bench_model.py +54 -0
- nat/data_models/telemetry_exporter.py +26 -0
- nat/data_models/temperature_mixin.py +44 -0
- nat/data_models/thinking_mixin.py +86 -0
- nat/data_models/top_p_mixin.py +44 -0
- nat/data_models/ttc_strategy.py +30 -0
- nat/embedder/__init__.py +0 -0
- nat/embedder/azure_openai_embedder.py +46 -0
- nat/embedder/nim_embedder.py +59 -0
- nat/embedder/openai_embedder.py +42 -0
- nat/embedder/register.py +22 -0
- nat/eval/__init__.py +14 -0
- nat/eval/config.py +62 -0
- nat/eval/dataset_handler/__init__.py +0 -0
- nat/eval/dataset_handler/dataset_downloader.py +106 -0
- nat/eval/dataset_handler/dataset_filter.py +52 -0
- nat/eval/dataset_handler/dataset_handler.py +431 -0
- nat/eval/evaluate.py +565 -0
- nat/eval/evaluator/__init__.py +14 -0
- nat/eval/evaluator/base_evaluator.py +77 -0
- nat/eval/evaluator/evaluator_model.py +58 -0
- nat/eval/intermediate_step_adapter.py +99 -0
- nat/eval/rag_evaluator/__init__.py +0 -0
- nat/eval/rag_evaluator/evaluate.py +178 -0
- nat/eval/rag_evaluator/register.py +143 -0
- nat/eval/register.py +26 -0
- nat/eval/remote_workflow.py +133 -0
- nat/eval/runners/__init__.py +14 -0
- nat/eval/runners/config.py +39 -0
- nat/eval/runners/multi_eval_runner.py +54 -0
- nat/eval/runtime_evaluator/__init__.py +14 -0
- nat/eval/runtime_evaluator/evaluate.py +123 -0
- nat/eval/runtime_evaluator/register.py +100 -0
- nat/eval/runtime_event_subscriber.py +52 -0
- nat/eval/swe_bench_evaluator/__init__.py +0 -0
- nat/eval/swe_bench_evaluator/evaluate.py +215 -0
- nat/eval/swe_bench_evaluator/register.py +36 -0
- nat/eval/trajectory_evaluator/__init__.py +0 -0
- nat/eval/trajectory_evaluator/evaluate.py +75 -0
- nat/eval/trajectory_evaluator/register.py +40 -0
- nat/eval/tunable_rag_evaluator/__init__.py +0 -0
- nat/eval/tunable_rag_evaluator/evaluate.py +242 -0
- nat/eval/tunable_rag_evaluator/register.py +52 -0
- nat/eval/usage_stats.py +41 -0
- nat/eval/utils/__init__.py +0 -0
- nat/eval/utils/eval_trace_ctx.py +89 -0
- nat/eval/utils/output_uploader.py +140 -0
- nat/eval/utils/tqdm_position_registry.py +40 -0
- nat/eval/utils/weave_eval.py +193 -0
- nat/experimental/__init__.py +0 -0
- nat/experimental/decorators/__init__.py +0 -0
- nat/experimental/decorators/experimental_warning_decorator.py +154 -0
- nat/experimental/test_time_compute/__init__.py +0 -0
- nat/experimental/test_time_compute/editing/__init__.py +0 -0
- nat/experimental/test_time_compute/editing/iterative_plan_refinement_editor.py +147 -0
- nat/experimental/test_time_compute/editing/llm_as_a_judge_editor.py +204 -0
- nat/experimental/test_time_compute/editing/motivation_aware_summarization.py +107 -0
- nat/experimental/test_time_compute/functions/__init__.py +0 -0
- nat/experimental/test_time_compute/functions/execute_score_select_function.py +105 -0
- nat/experimental/test_time_compute/functions/plan_select_execute_function.py +228 -0
- nat/experimental/test_time_compute/functions/ttc_tool_orchestration_function.py +205 -0
- nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +146 -0
- nat/experimental/test_time_compute/models/__init__.py +0 -0
- nat/experimental/test_time_compute/models/editor_config.py +132 -0
- nat/experimental/test_time_compute/models/scoring_config.py +112 -0
- nat/experimental/test_time_compute/models/search_config.py +120 -0
- nat/experimental/test_time_compute/models/selection_config.py +154 -0
- nat/experimental/test_time_compute/models/stage_enums.py +43 -0
- nat/experimental/test_time_compute/models/strategy_base.py +67 -0
- nat/experimental/test_time_compute/models/tool_use_config.py +41 -0
- nat/experimental/test_time_compute/models/ttc_item.py +48 -0
- nat/experimental/test_time_compute/register.py +35 -0
- nat/experimental/test_time_compute/scoring/__init__.py +0 -0
- nat/experimental/test_time_compute/scoring/llm_based_agent_scorer.py +168 -0
- nat/experimental/test_time_compute/scoring/llm_based_plan_scorer.py +168 -0
- nat/experimental/test_time_compute/scoring/motivation_aware_scorer.py +111 -0
- nat/experimental/test_time_compute/search/__init__.py +0 -0
- nat/experimental/test_time_compute/search/multi_llm_planner.py +128 -0
- nat/experimental/test_time_compute/search/multi_query_retrieval_search.py +122 -0
- nat/experimental/test_time_compute/search/single_shot_multi_plan_planner.py +128 -0
- nat/experimental/test_time_compute/selection/__init__.py +0 -0
- nat/experimental/test_time_compute/selection/best_of_n_selector.py +63 -0
- nat/experimental/test_time_compute/selection/llm_based_agent_output_selector.py +131 -0
- nat/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +157 -0
- nat/experimental/test_time_compute/selection/llm_based_plan_selector.py +128 -0
- nat/experimental/test_time_compute/selection/threshold_selector.py +58 -0
- nat/front_ends/__init__.py +14 -0
- nat/front_ends/console/__init__.py +14 -0
- nat/front_ends/console/authentication_flow_handler.py +285 -0
- nat/front_ends/console/console_front_end_config.py +32 -0
- nat/front_ends/console/console_front_end_plugin.py +108 -0
- nat/front_ends/console/register.py +25 -0
- nat/front_ends/cron/__init__.py +14 -0
- nat/front_ends/fastapi/__init__.py +14 -0
- nat/front_ends/fastapi/auth_flow_handlers/__init__.py +0 -0
- nat/front_ends/fastapi/auth_flow_handlers/http_flow_handler.py +27 -0
- nat/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +142 -0
- nat/front_ends/fastapi/dask_client_mixin.py +65 -0
- nat/front_ends/fastapi/fastapi_front_end_config.py +272 -0
- nat/front_ends/fastapi/fastapi_front_end_controller.py +68 -0
- nat/front_ends/fastapi/fastapi_front_end_plugin.py +247 -0
- nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +1257 -0
- nat/front_ends/fastapi/html_snippets/__init__.py +14 -0
- nat/front_ends/fastapi/html_snippets/auth_code_grant_success.py +35 -0
- nat/front_ends/fastapi/intermediate_steps_subscriber.py +80 -0
- nat/front_ends/fastapi/job_store.py +602 -0
- nat/front_ends/fastapi/main.py +64 -0
- nat/front_ends/fastapi/message_handler.py +344 -0
- nat/front_ends/fastapi/message_validator.py +351 -0
- nat/front_ends/fastapi/register.py +25 -0
- nat/front_ends/fastapi/response_helpers.py +195 -0
- nat/front_ends/fastapi/step_adaptor.py +319 -0
- nat/front_ends/fastapi/utils.py +57 -0
- nat/front_ends/mcp/__init__.py +14 -0
- nat/front_ends/mcp/introspection_token_verifier.py +73 -0
- nat/front_ends/mcp/mcp_front_end_config.py +90 -0
- nat/front_ends/mcp/mcp_front_end_plugin.py +113 -0
- nat/front_ends/mcp/mcp_front_end_plugin_worker.py +268 -0
- nat/front_ends/mcp/memory_profiler.py +320 -0
- nat/front_ends/mcp/register.py +27 -0
- nat/front_ends/mcp/tool_converter.py +290 -0
- nat/front_ends/register.py +21 -0
- nat/front_ends/simple_base/__init__.py +14 -0
- nat/front_ends/simple_base/simple_front_end_plugin_base.py +56 -0
- nat/llm/__init__.py +0 -0
- nat/llm/aws_bedrock_llm.py +69 -0
- nat/llm/azure_openai_llm.py +57 -0
- nat/llm/litellm_llm.py +69 -0
- nat/llm/nim_llm.py +58 -0
- nat/llm/openai_llm.py +54 -0
- nat/llm/register.py +27 -0
- nat/llm/utils/__init__.py +14 -0
- nat/llm/utils/env_config_value.py +93 -0
- nat/llm/utils/error.py +17 -0
- nat/llm/utils/thinking.py +215 -0
- nat/memory/__init__.py +20 -0
- nat/memory/interfaces.py +183 -0
- nat/memory/models.py +112 -0
- nat/meta/pypi.md +58 -0
- nat/object_store/__init__.py +20 -0
- nat/object_store/in_memory_object_store.py +76 -0
- nat/object_store/interfaces.py +84 -0
- nat/object_store/models.py +38 -0
- nat/object_store/register.py +19 -0
- nat/observability/__init__.py +14 -0
- nat/observability/exporter/__init__.py +14 -0
- nat/observability/exporter/base_exporter.py +449 -0
- nat/observability/exporter/exporter.py +78 -0
- nat/observability/exporter/file_exporter.py +33 -0
- nat/observability/exporter/processing_exporter.py +550 -0
- nat/observability/exporter/raw_exporter.py +52 -0
- nat/observability/exporter/span_exporter.py +308 -0
- nat/observability/exporter_manager.py +335 -0
- nat/observability/mixin/__init__.py +14 -0
- nat/observability/mixin/batch_config_mixin.py +26 -0
- nat/observability/mixin/collector_config_mixin.py +23 -0
- nat/observability/mixin/file_mixin.py +288 -0
- nat/observability/mixin/file_mode.py +23 -0
- nat/observability/mixin/redaction_config_mixin.py +42 -0
- nat/observability/mixin/resource_conflict_mixin.py +134 -0
- nat/observability/mixin/serialize_mixin.py +61 -0
- nat/observability/mixin/tagging_config_mixin.py +62 -0
- nat/observability/mixin/type_introspection_mixin.py +496 -0
- nat/observability/processor/__init__.py +14 -0
- nat/observability/processor/batching_processor.py +308 -0
- nat/observability/processor/callback_processor.py +42 -0
- nat/observability/processor/falsy_batch_filter_processor.py +55 -0
- nat/observability/processor/intermediate_step_serializer.py +28 -0
- nat/observability/processor/processor.py +74 -0
- nat/observability/processor/processor_factory.py +70 -0
- nat/observability/processor/redaction/__init__.py +24 -0
- nat/observability/processor/redaction/contextual_redaction_processor.py +125 -0
- nat/observability/processor/redaction/contextual_span_redaction_processor.py +66 -0
- nat/observability/processor/redaction/redaction_processor.py +177 -0
- nat/observability/processor/redaction/span_header_redaction_processor.py +92 -0
- nat/observability/processor/span_tagging_processor.py +68 -0
- nat/observability/register.py +114 -0
- nat/observability/utils/__init__.py +14 -0
- nat/observability/utils/dict_utils.py +236 -0
- nat/observability/utils/time_utils.py +31 -0
- nat/plugins/.namespace +1 -0
- nat/profiler/__init__.py +0 -0
- nat/profiler/calc/__init__.py +14 -0
- nat/profiler/calc/calc_runner.py +626 -0
- nat/profiler/calc/calculations.py +288 -0
- nat/profiler/calc/data_models.py +188 -0
- nat/profiler/calc/plot.py +345 -0
- nat/profiler/callbacks/__init__.py +0 -0
- nat/profiler/callbacks/agno_callback_handler.py +295 -0
- nat/profiler/callbacks/base_callback_class.py +20 -0
- nat/profiler/callbacks/langchain_callback_handler.py +297 -0
- nat/profiler/callbacks/llama_index_callback_handler.py +205 -0
- nat/profiler/callbacks/semantic_kernel_callback_handler.py +238 -0
- nat/profiler/callbacks/token_usage_base_model.py +27 -0
- nat/profiler/data_frame_row.py +51 -0
- nat/profiler/data_models.py +24 -0
- nat/profiler/decorators/__init__.py +0 -0
- nat/profiler/decorators/framework_wrapper.py +180 -0
- nat/profiler/decorators/function_tracking.py +411 -0
- nat/profiler/forecasting/__init__.py +0 -0
- nat/profiler/forecasting/config.py +18 -0
- nat/profiler/forecasting/model_trainer.py +75 -0
- nat/profiler/forecasting/models/__init__.py +22 -0
- nat/profiler/forecasting/models/forecasting_base_model.py +42 -0
- nat/profiler/forecasting/models/linear_model.py +197 -0
- nat/profiler/forecasting/models/random_forest_regressor.py +269 -0
- nat/profiler/inference_metrics_model.py +28 -0
- nat/profiler/inference_optimization/__init__.py +0 -0
- nat/profiler/inference_optimization/bottleneck_analysis/__init__.py +0 -0
- nat/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +460 -0
- nat/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +258 -0
- nat/profiler/inference_optimization/data_models.py +386 -0
- nat/profiler/inference_optimization/experimental/__init__.py +0 -0
- nat/profiler/inference_optimization/experimental/concurrency_spike_analysis.py +468 -0
- nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +404 -0
- nat/profiler/inference_optimization/llm_metrics.py +212 -0
- nat/profiler/inference_optimization/prompt_caching.py +163 -0
- nat/profiler/inference_optimization/token_uniqueness.py +107 -0
- nat/profiler/inference_optimization/workflow_runtimes.py +72 -0
- nat/profiler/intermediate_property_adapter.py +102 -0
- nat/profiler/parameter_optimization/__init__.py +0 -0
- nat/profiler/parameter_optimization/optimizable_utils.py +93 -0
- nat/profiler/parameter_optimization/optimizer_runtime.py +67 -0
- nat/profiler/parameter_optimization/parameter_optimizer.py +153 -0
- nat/profiler/parameter_optimization/parameter_selection.py +107 -0
- nat/profiler/parameter_optimization/pareto_visualizer.py +380 -0
- nat/profiler/parameter_optimization/prompt_optimizer.py +384 -0
- nat/profiler/parameter_optimization/update_helpers.py +66 -0
- nat/profiler/profile_runner.py +478 -0
- nat/profiler/utils.py +186 -0
- nat/registry_handlers/__init__.py +0 -0
- nat/registry_handlers/local/__init__.py +0 -0
- nat/registry_handlers/local/local_handler.py +176 -0
- nat/registry_handlers/local/register_local.py +37 -0
- nat/registry_handlers/metadata_factory.py +60 -0
- nat/registry_handlers/package_utils.py +570 -0
- nat/registry_handlers/pypi/__init__.py +0 -0
- nat/registry_handlers/pypi/pypi_handler.py +248 -0
- nat/registry_handlers/pypi/register_pypi.py +40 -0
- nat/registry_handlers/register.py +20 -0
- nat/registry_handlers/registry_handler_base.py +157 -0
- nat/registry_handlers/rest/__init__.py +0 -0
- nat/registry_handlers/rest/register_rest.py +56 -0
- nat/registry_handlers/rest/rest_handler.py +236 -0
- nat/registry_handlers/schemas/__init__.py +0 -0
- nat/registry_handlers/schemas/headers.py +42 -0
- nat/registry_handlers/schemas/package.py +68 -0
- nat/registry_handlers/schemas/publish.py +68 -0
- nat/registry_handlers/schemas/pull.py +82 -0
- nat/registry_handlers/schemas/remove.py +36 -0
- nat/registry_handlers/schemas/search.py +91 -0
- nat/registry_handlers/schemas/status.py +47 -0
- nat/retriever/__init__.py +0 -0
- nat/retriever/interface.py +41 -0
- nat/retriever/milvus/__init__.py +14 -0
- nat/retriever/milvus/register.py +81 -0
- nat/retriever/milvus/retriever.py +228 -0
- nat/retriever/models.py +77 -0
- nat/retriever/nemo_retriever/__init__.py +14 -0
- nat/retriever/nemo_retriever/register.py +60 -0
- nat/retriever/nemo_retriever/retriever.py +190 -0
- nat/retriever/register.py +21 -0
- nat/runtime/__init__.py +14 -0
- nat/runtime/loader.py +220 -0
- nat/runtime/runner.py +292 -0
- nat/runtime/session.py +223 -0
- nat/runtime/user_metadata.py +130 -0
- nat/settings/__init__.py +0 -0
- nat/settings/global_settings.py +329 -0
- nat/test/.namespace +1 -0
- nat/tool/__init__.py +0 -0
- nat/tool/chat_completion.py +77 -0
- nat/tool/code_execution/README.md +151 -0
- nat/tool/code_execution/__init__.py +0 -0
- nat/tool/code_execution/code_sandbox.py +267 -0
- nat/tool/code_execution/local_sandbox/.gitignore +1 -0
- nat/tool/code_execution/local_sandbox/Dockerfile.sandbox +60 -0
- nat/tool/code_execution/local_sandbox/__init__.py +13 -0
- nat/tool/code_execution/local_sandbox/local_sandbox_server.py +198 -0
- nat/tool/code_execution/local_sandbox/sandbox.requirements.txt +6 -0
- nat/tool/code_execution/local_sandbox/start_local_sandbox.sh +50 -0
- nat/tool/code_execution/register.py +74 -0
- nat/tool/code_execution/test_code_execution_sandbox.py +414 -0
- nat/tool/code_execution/utils.py +100 -0
- nat/tool/datetime_tools.py +82 -0
- nat/tool/document_search.py +141 -0
- nat/tool/github_tools.py +450 -0
- nat/tool/memory_tools/__init__.py +0 -0
- nat/tool/memory_tools/add_memory_tool.py +79 -0
- nat/tool/memory_tools/delete_memory_tool.py +66 -0
- nat/tool/memory_tools/get_memory_tool.py +72 -0
- nat/tool/nvidia_rag.py +95 -0
- nat/tool/register.py +31 -0
- nat/tool/retriever.py +95 -0
- nat/tool/server_tools.py +66 -0
- nat/utils/__init__.py +0 -0
- nat/utils/callable_utils.py +70 -0
- nat/utils/data_models/__init__.py +0 -0
- nat/utils/data_models/schema_validator.py +58 -0
- nat/utils/debugging_utils.py +43 -0
- nat/utils/decorators.py +210 -0
- nat/utils/dump_distro_mapping.py +32 -0
- nat/utils/exception_handlers/__init__.py +0 -0
- nat/utils/exception_handlers/automatic_retries.py +342 -0
- nat/utils/exception_handlers/schemas.py +114 -0
- nat/utils/io/__init__.py +0 -0
- nat/utils/io/model_processing.py +28 -0
- nat/utils/io/yaml_tools.py +119 -0
- nat/utils/log_levels.py +25 -0
- nat/utils/log_utils.py +37 -0
- nat/utils/metadata_utils.py +74 -0
- nat/utils/optional_imports.py +142 -0
- nat/utils/producer_consumer_queue.py +178 -0
- nat/utils/reactive/__init__.py +0 -0
- nat/utils/reactive/base/__init__.py +0 -0
- nat/utils/reactive/base/observable_base.py +65 -0
- nat/utils/reactive/base/observer_base.py +55 -0
- nat/utils/reactive/base/subject_base.py +79 -0
- nat/utils/reactive/observable.py +59 -0
- nat/utils/reactive/observer.py +76 -0
- nat/utils/reactive/subject.py +131 -0
- nat/utils/reactive/subscription.py +49 -0
- nat/utils/settings/__init__.py +0 -0
- nat/utils/settings/global_settings.py +195 -0
- nat/utils/string_utils.py +38 -0
- nat/utils/type_converter.py +299 -0
- nat/utils/type_utils.py +488 -0
- nat/utils/url_utils.py +27 -0
- nvidia_nat-1.1.0a20251020.dist-info/METADATA +195 -0
- nvidia_nat-1.1.0a20251020.dist-info/RECORD +480 -0
- nvidia_nat-1.1.0a20251020.dist-info/WHEEL +5 -0
- nvidia_nat-1.1.0a20251020.dist-info/entry_points.txt +22 -0
- nvidia_nat-1.1.0a20251020.dist-info/licenses/LICENSE-3rd-party.txt +5478 -0
- nvidia_nat-1.1.0a20251020.dist-info/licenses/LICENSE.md +201 -0
- nvidia_nat-1.1.0a20251020.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,550 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import asyncio
|
|
17
|
+
import logging
|
|
18
|
+
from abc import abstractmethod
|
|
19
|
+
from collections.abc import Coroutine
|
|
20
|
+
from typing import Any
|
|
21
|
+
from typing import Generic
|
|
22
|
+
from typing import TypeVar
|
|
23
|
+
|
|
24
|
+
from nat.builder.context import ContextState
|
|
25
|
+
from nat.data_models.intermediate_step import IntermediateStep
|
|
26
|
+
from nat.observability.exporter.base_exporter import BaseExporter
|
|
27
|
+
from nat.observability.mixin.type_introspection_mixin import TypeIntrospectionMixin
|
|
28
|
+
from nat.observability.processor.callback_processor import CallbackProcessor
|
|
29
|
+
from nat.observability.processor.processor import Processor
|
|
30
|
+
from nat.utils.type_utils import DecomposedType
|
|
31
|
+
from nat.utils.type_utils import override
|
|
32
|
+
|
|
33
|
+
PipelineInputT = TypeVar("PipelineInputT")
|
|
34
|
+
PipelineOutputT = TypeVar("PipelineOutputT")
|
|
35
|
+
|
|
36
|
+
logger = logging.getLogger(__name__)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter, TypeIntrospectionMixin):
|
|
40
|
+
"""A base class for telemetry exporters with processing pipeline support.
|
|
41
|
+
|
|
42
|
+
This class extends BaseExporter to add processor pipeline functionality.
|
|
43
|
+
It manages a chain of processors that can transform items before export.
|
|
44
|
+
|
|
45
|
+
The generic types work as follows:
|
|
46
|
+
- PipelineInputT: The type of items that enter the processing pipeline (e.g., Span)
|
|
47
|
+
- PipelineOutputT: The type of items after processing through the pipeline (e.g., converted format)
|
|
48
|
+
|
|
49
|
+
Key Features:
|
|
50
|
+
- Processor pipeline management (add, remove, clear)
|
|
51
|
+
- Type compatibility validation between processors
|
|
52
|
+
- Pipeline processing with error handling
|
|
53
|
+
- Configurable None filtering: processors returning None can drop items from pipeline
|
|
54
|
+
- Automatic type validation before export
|
|
55
|
+
"""
|
|
56
|
+
# All ProcessingExporter instances automatically use this for signature checking
|
|
57
|
+
_signature_method = '_process_pipeline'
|
|
58
|
+
|
|
59
|
+
def __init__(self, context_state: ContextState | None = None, drop_nones: bool = True):
|
|
60
|
+
"""Initialize the processing exporter.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
context_state (ContextState | None): The context state to use for the exporter.
|
|
64
|
+
drop_nones (bool): Whether to drop items when processors return None (default: True).
|
|
65
|
+
"""
|
|
66
|
+
super().__init__(context_state)
|
|
67
|
+
self._processors: list[Processor] = [] # List of processors that implement process(item) -> item
|
|
68
|
+
self._processor_names: dict[str, int] = {} # Maps processor names to their positions
|
|
69
|
+
self._pipeline_locked: bool = False # Prevents modifications after startup
|
|
70
|
+
self._drop_nones: bool = drop_nones # Whether to drop None values between processors
|
|
71
|
+
|
|
72
|
+
def add_processor(self,
|
|
73
|
+
processor: Processor,
|
|
74
|
+
name: str | None = None,
|
|
75
|
+
position: int | None = None,
|
|
76
|
+
before: str | None = None,
|
|
77
|
+
after: str | None = None) -> None:
|
|
78
|
+
"""Add a processor to the processing pipeline.
|
|
79
|
+
|
|
80
|
+
Processors are executed in the order they are added. Processes can transform between any types (T -> U).
|
|
81
|
+
Supports flexible positioning using names, positions, or relative placement.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
processor (Processor): The processor to add to the pipeline
|
|
85
|
+
name (str | None): Name for the processor (for later reference). Must be unique.
|
|
86
|
+
position (int | None): Specific position to insert at (0-based index, -1 for append)
|
|
87
|
+
before (str | None): Insert before the named processor
|
|
88
|
+
after (str | None): Insert after the named processor
|
|
89
|
+
|
|
90
|
+
Raises:
|
|
91
|
+
RuntimeError: If pipeline is locked (after startup)
|
|
92
|
+
ValueError: If positioning arguments conflict or named processor not found
|
|
93
|
+
"""
|
|
94
|
+
self._check_pipeline_locked()
|
|
95
|
+
|
|
96
|
+
# Determine insertion position
|
|
97
|
+
insert_position = self._calculate_insertion_position(position, before, after)
|
|
98
|
+
|
|
99
|
+
# Validate type compatibility at insertion point
|
|
100
|
+
self._validate_insertion_compatibility(processor, insert_position)
|
|
101
|
+
|
|
102
|
+
# Pre-validate name (no side effects yet)
|
|
103
|
+
if name is not None:
|
|
104
|
+
if not isinstance(name, str):
|
|
105
|
+
raise TypeError(f"Processor name must be a string, got {type(name).__name__}")
|
|
106
|
+
if name in self._processor_names:
|
|
107
|
+
raise ValueError(f"Processor name '{name}' already exists")
|
|
108
|
+
|
|
109
|
+
# Shift existing name positions (do this before list mutation)
|
|
110
|
+
for proc_name, pos in list(self._processor_names.items()):
|
|
111
|
+
if pos >= insert_position:
|
|
112
|
+
self._processor_names[proc_name] = pos + 1
|
|
113
|
+
|
|
114
|
+
# Insert the processor
|
|
115
|
+
if insert_position == len(self._processors):
|
|
116
|
+
self._processors.append(processor)
|
|
117
|
+
else:
|
|
118
|
+
self._processors.insert(insert_position, processor)
|
|
119
|
+
|
|
120
|
+
# Record the new processor name, if provided
|
|
121
|
+
if name is not None:
|
|
122
|
+
self._processor_names[name] = insert_position
|
|
123
|
+
|
|
124
|
+
# Set up pipeline continuation callback for processors that support it
|
|
125
|
+
if isinstance(processor, CallbackProcessor):
|
|
126
|
+
# Create a callback that continues processing through the rest of the pipeline
|
|
127
|
+
async def pipeline_callback(item):
|
|
128
|
+
await self._continue_pipeline_after(processor, item)
|
|
129
|
+
|
|
130
|
+
processor.set_done_callback(pipeline_callback)
|
|
131
|
+
|
|
132
|
+
def remove_processor(self, processor: Processor | str | int) -> None:
|
|
133
|
+
"""Remove a processor from the processing pipeline.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
processor (Processor | str | int): The processor to remove (by name, position, or object).
|
|
137
|
+
|
|
138
|
+
Raises:
|
|
139
|
+
RuntimeError: If pipeline is locked (after startup)
|
|
140
|
+
ValueError: If named processor or position not found
|
|
141
|
+
TypeError: If processor argument has invalid type
|
|
142
|
+
"""
|
|
143
|
+
self._check_pipeline_locked()
|
|
144
|
+
|
|
145
|
+
# Determine processor and position to remove
|
|
146
|
+
if isinstance(processor, str):
|
|
147
|
+
# Remove by name
|
|
148
|
+
if processor not in self._processor_names:
|
|
149
|
+
raise ValueError(f"Processor '{processor}' not found in pipeline")
|
|
150
|
+
position = self._processor_names[processor]
|
|
151
|
+
processor_obj = self._processors[position]
|
|
152
|
+
elif isinstance(processor, int):
|
|
153
|
+
# Remove by position
|
|
154
|
+
if not (0 <= processor < len(self._processors)):
|
|
155
|
+
raise ValueError(f"Position {processor} is out of range [0, {len(self._processors) - 1}]")
|
|
156
|
+
position = processor
|
|
157
|
+
processor_obj = self._processors[position]
|
|
158
|
+
elif isinstance(processor, Processor):
|
|
159
|
+
# Remove by object (existing behavior)
|
|
160
|
+
if processor not in self._processors:
|
|
161
|
+
return # Silently ignore if not found (existing behavior)
|
|
162
|
+
position = self._processors.index(processor)
|
|
163
|
+
processor_obj = processor
|
|
164
|
+
else:
|
|
165
|
+
raise TypeError(f"Processor must be a Processor object, string name, or int position, "
|
|
166
|
+
f"got {type(processor).__name__}")
|
|
167
|
+
|
|
168
|
+
# Remove the processor
|
|
169
|
+
self._processors.remove(processor_obj)
|
|
170
|
+
|
|
171
|
+
# Remove from name mapping and update positions
|
|
172
|
+
name_to_remove = None
|
|
173
|
+
for name, pos in self._processor_names.items():
|
|
174
|
+
if pos == position:
|
|
175
|
+
name_to_remove = name
|
|
176
|
+
break
|
|
177
|
+
|
|
178
|
+
if name_to_remove:
|
|
179
|
+
del self._processor_names[name_to_remove]
|
|
180
|
+
|
|
181
|
+
# Update positions for processors that shifted
|
|
182
|
+
for name, pos in self._processor_names.items():
|
|
183
|
+
if pos > position:
|
|
184
|
+
self._processor_names[name] = pos - 1
|
|
185
|
+
|
|
186
|
+
def clear_processors(self) -> None:
|
|
187
|
+
"""Clear all processors from the pipeline."""
|
|
188
|
+
self._check_pipeline_locked()
|
|
189
|
+
self._processors.clear()
|
|
190
|
+
self._processor_names.clear()
|
|
191
|
+
|
|
192
|
+
def reset_pipeline(self) -> None:
|
|
193
|
+
"""Reset the pipeline to allow modifications.
|
|
194
|
+
|
|
195
|
+
This unlocks the pipeline and clears all processors, allowing
|
|
196
|
+
the pipeline to be reconfigured. Can only be called when the
|
|
197
|
+
exporter is stopped.
|
|
198
|
+
|
|
199
|
+
Raises:
|
|
200
|
+
RuntimeError: If exporter is currently running
|
|
201
|
+
"""
|
|
202
|
+
if self._running:
|
|
203
|
+
raise RuntimeError("Cannot reset pipeline while exporter is running. "
|
|
204
|
+
"Call stop() first, then reset_pipeline().")
|
|
205
|
+
|
|
206
|
+
self._pipeline_locked = False
|
|
207
|
+
self._processors.clear()
|
|
208
|
+
self._processor_names.clear()
|
|
209
|
+
logger.debug("Pipeline reset - unlocked and cleared all processors")
|
|
210
|
+
|
|
211
|
+
def get_processor_by_name(self, name: str) -> Processor | None:
|
|
212
|
+
"""Get a processor by its name.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
name (str): The name of the processor to retrieve
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
Processor | None: The processor with the given name, or None if not found
|
|
219
|
+
"""
|
|
220
|
+
if not isinstance(name, str):
|
|
221
|
+
raise TypeError(f"Processor name must be a string, got {type(name).__name__}")
|
|
222
|
+
if name in self._processor_names:
|
|
223
|
+
position = self._processor_names[name]
|
|
224
|
+
return self._processors[position]
|
|
225
|
+
logger.debug("Processor '%s' not found in pipeline", name)
|
|
226
|
+
return None
|
|
227
|
+
|
|
228
|
+
def _check_pipeline_locked(self) -> None:
|
|
229
|
+
"""Check if pipeline is locked and raise error if it is."""
|
|
230
|
+
if self._pipeline_locked:
|
|
231
|
+
raise RuntimeError("Cannot modify processor pipeline after exporter has started. "
|
|
232
|
+
"Pipeline must be fully configured before calling start().")
|
|
233
|
+
|
|
234
|
+
def _calculate_insertion_position(self, position: int | None, before: str | None, after: str | None) -> int:
|
|
235
|
+
"""Calculate the insertion position based on provided arguments.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
position (int | None): Explicit position (0-based index, -1 for append)
|
|
239
|
+
before (str | None): Insert before this named processor
|
|
240
|
+
after (str | None): Insert after this named processor
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
int: The calculated insertion position
|
|
244
|
+
|
|
245
|
+
Raises:
|
|
246
|
+
ValueError: If arguments conflict or named processor not found
|
|
247
|
+
"""
|
|
248
|
+
# Check for conflicting arguments
|
|
249
|
+
args_provided = sum(x is not None for x in [position, before, after])
|
|
250
|
+
if args_provided > 1:
|
|
251
|
+
raise ValueError("Only one of position, before, or after can be specified")
|
|
252
|
+
|
|
253
|
+
# Default to append
|
|
254
|
+
if args_provided == 0:
|
|
255
|
+
return len(self._processors)
|
|
256
|
+
|
|
257
|
+
# Handle explicit position
|
|
258
|
+
if position is not None:
|
|
259
|
+
if position == -1:
|
|
260
|
+
return len(self._processors)
|
|
261
|
+
if 0 <= position <= len(self._processors):
|
|
262
|
+
return position
|
|
263
|
+
raise ValueError(f"Position {position} is out of range [0, {len(self._processors)}]")
|
|
264
|
+
|
|
265
|
+
# Handle before/after named processors
|
|
266
|
+
if before is not None:
|
|
267
|
+
if not isinstance(before, str):
|
|
268
|
+
raise TypeError(f"'before' parameter must be a string, got {type(before).__name__}")
|
|
269
|
+
if before not in self._processor_names:
|
|
270
|
+
raise ValueError(f"Processor '{before}' not found in pipeline")
|
|
271
|
+
return self._processor_names[before]
|
|
272
|
+
|
|
273
|
+
if after is not None:
|
|
274
|
+
if not isinstance(after, str):
|
|
275
|
+
raise TypeError(f"'after' parameter must be a string, got {type(after).__name__}")
|
|
276
|
+
if after not in self._processor_names:
|
|
277
|
+
raise ValueError(f"Processor '{after}' not found in pipeline")
|
|
278
|
+
return self._processor_names[after] + 1
|
|
279
|
+
|
|
280
|
+
# Should never reach here
|
|
281
|
+
return len(self._processors)
|
|
282
|
+
|
|
283
|
+
def _validate_insertion_compatibility(self, processor: Processor, position: int) -> None:
|
|
284
|
+
"""Validate type compatibility for processor insertion.
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
processor (Processor): The processor to insert
|
|
288
|
+
position (int): The position where it will be inserted
|
|
289
|
+
|
|
290
|
+
Raises:
|
|
291
|
+
ValueError: If processor is not compatible with neighbors
|
|
292
|
+
"""
|
|
293
|
+
# Check compatibility with neighbors
|
|
294
|
+
if position > 0:
|
|
295
|
+
predecessor = self._processors[position - 1]
|
|
296
|
+
self._check_processor_compatibility(predecessor,
|
|
297
|
+
processor,
|
|
298
|
+
"predecessor",
|
|
299
|
+
str(predecessor.output_type),
|
|
300
|
+
str(processor.input_type))
|
|
301
|
+
|
|
302
|
+
if position < len(self._processors):
|
|
303
|
+
successor = self._processors[position]
|
|
304
|
+
self._check_processor_compatibility(processor,
|
|
305
|
+
successor,
|
|
306
|
+
"successor",
|
|
307
|
+
str(processor.output_type),
|
|
308
|
+
str(successor.input_type))
|
|
309
|
+
|
|
310
|
+
def _check_processor_compatibility(self,
|
|
311
|
+
source_processor: Processor,
|
|
312
|
+
target_processor: Processor,
|
|
313
|
+
relationship: str,
|
|
314
|
+
source_type: str,
|
|
315
|
+
target_type: str) -> None:
|
|
316
|
+
"""Check type compatibility between two processors using Pydantic validation.
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
source_processor (Processor): The processor providing output
|
|
320
|
+
target_processor (Processor): The processor receiving input
|
|
321
|
+
relationship (str): Description of relationship ("predecessor" or "successor")
|
|
322
|
+
source_type (str): String representation of source type
|
|
323
|
+
target_type (str): String representation of target type
|
|
324
|
+
"""
|
|
325
|
+
# Use Pydantic-based type compatibility checking
|
|
326
|
+
if not source_processor.is_output_compatible_with(target_processor.input_type):
|
|
327
|
+
raise ValueError(f"Processor {target_processor.__class__.__name__} input type {target_type} "
|
|
328
|
+
f"is not compatible with {relationship} {source_processor.__class__.__name__} "
|
|
329
|
+
f"output type {source_type}")
|
|
330
|
+
|
|
331
|
+
async def _pre_start(self) -> None:
|
|
332
|
+
|
|
333
|
+
# Validate that the pipeline is compatible with the exporter
|
|
334
|
+
if len(self._processors) > 0:
|
|
335
|
+
first_processor = self._processors[0]
|
|
336
|
+
last_processor = self._processors[-1]
|
|
337
|
+
|
|
338
|
+
# validate that the first processor's input type is compatible with the exporter's input type
|
|
339
|
+
if not first_processor.is_compatible_with_input(self.input_type):
|
|
340
|
+
logger.error("First processor %s input=%s incompatible with exporter input=%s",
|
|
341
|
+
first_processor.__class__.__name__,
|
|
342
|
+
first_processor.input_type,
|
|
343
|
+
self.input_type)
|
|
344
|
+
raise ValueError("First processor incompatible with exporter input")
|
|
345
|
+
# Validate that the last processor's output type is compatible with the exporter's output type
|
|
346
|
+
# Use DecomposedType.is_type_compatible for the final export stage to allow batch compatibility
|
|
347
|
+
# This enables BatchingProcessor[T] -> Exporter[T] patterns where the exporter handles both T and list[T]
|
|
348
|
+
if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
|
|
349
|
+
logger.error("Last processor %s output=%s incompatible with exporter output=%s",
|
|
350
|
+
last_processor.__class__.__name__,
|
|
351
|
+
last_processor.output_type,
|
|
352
|
+
self.output_type)
|
|
353
|
+
raise ValueError("Last processor incompatible with exporter output")
|
|
354
|
+
|
|
355
|
+
# Lock the pipeline to prevent further modifications
|
|
356
|
+
self._pipeline_locked = True
|
|
357
|
+
|
|
358
|
+
async def _process_pipeline(self, item: PipelineInputT) -> PipelineOutputT | None:
|
|
359
|
+
"""Process item through all registered processors.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
item (PipelineInputT): The item to process (starts as PipelineInputT, can transform to PipelineOutputT)
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
PipelineOutputT | None: The processed item after running through all processors
|
|
366
|
+
"""
|
|
367
|
+
return await self._process_through_processors(self._processors, item) # type: ignore
|
|
368
|
+
|
|
369
|
+
async def _process_through_processors(self, processors: list[Processor], item: Any) -> Any:
|
|
370
|
+
"""Process an item through a list of processors.
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
processors (list[Processor]): List of processors to run the item through
|
|
374
|
+
item (Any): The item to process
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
Any: The processed item after running through all processors, or None if
|
|
378
|
+
drop_nones is True and any processor returned None
|
|
379
|
+
"""
|
|
380
|
+
processed_item = item
|
|
381
|
+
for processor in processors:
|
|
382
|
+
try:
|
|
383
|
+
processed_item = await processor.process(processed_item)
|
|
384
|
+
# Drop None values between processors if configured to do so
|
|
385
|
+
if self._drop_nones and processed_item is None:
|
|
386
|
+
logger.debug("Processor %s returned None, dropping item from pipeline",
|
|
387
|
+
processor.__class__.__name__)
|
|
388
|
+
return None
|
|
389
|
+
except Exception as e:
|
|
390
|
+
logger.exception("Error in processor %s: %s", processor.__class__.__name__, e)
|
|
391
|
+
# Continue with unprocessed item rather than failing
|
|
392
|
+
return processed_item
|
|
393
|
+
|
|
394
|
+
async def _export_final_item(self, processed_item: Any, raise_on_invalid: bool = False) -> None:
|
|
395
|
+
"""Export a processed item with proper type handling.
|
|
396
|
+
|
|
397
|
+
Args:
|
|
398
|
+
processed_item (Any): The item to export
|
|
399
|
+
raise_on_invalid (bool): If True, raise ValueError for invalid types instead of logging warning
|
|
400
|
+
"""
|
|
401
|
+
if isinstance(processed_item, list):
|
|
402
|
+
if len(processed_item) > 0:
|
|
403
|
+
await self.export_processed(processed_item)
|
|
404
|
+
else:
|
|
405
|
+
logger.debug("Skipping export of empty batch")
|
|
406
|
+
elif self.validate_output_type(processed_item):
|
|
407
|
+
await self.export_processed(processed_item)
|
|
408
|
+
else:
|
|
409
|
+
if raise_on_invalid:
|
|
410
|
+
logger.error("Invalid processed item type for export: %s (expected %s or list[%s])",
|
|
411
|
+
type(processed_item),
|
|
412
|
+
self.output_type,
|
|
413
|
+
self.output_type)
|
|
414
|
+
raise ValueError("Invalid processed item type for export")
|
|
415
|
+
logger.warning("Processed item %s is not a valid output type for export", processed_item)
|
|
416
|
+
|
|
417
|
+
async def _continue_pipeline_after(self, source_processor: Processor, item: Any) -> None:
|
|
418
|
+
"""Continue processing an item through the pipeline after a specific processor.
|
|
419
|
+
|
|
420
|
+
This is used when processors (like BatchingProcessor) need to inject items
|
|
421
|
+
back into the pipeline flow to continue through downstream processors.
|
|
422
|
+
|
|
423
|
+
Args:
|
|
424
|
+
source_processor (Processor): The processor that generated the item
|
|
425
|
+
item (Any): The item to continue processing through the remaining pipeline
|
|
426
|
+
"""
|
|
427
|
+
try:
|
|
428
|
+
# Find the source processor's position
|
|
429
|
+
try:
|
|
430
|
+
source_index = self._processors.index(source_processor)
|
|
431
|
+
except ValueError:
|
|
432
|
+
logger.exception("Source processor %s not found in pipeline", source_processor.__class__.__name__)
|
|
433
|
+
return
|
|
434
|
+
|
|
435
|
+
# Process through remaining processors (skip the source processor)
|
|
436
|
+
remaining_processors = self._processors[source_index + 1:]
|
|
437
|
+
processed_item = await self._process_through_processors(remaining_processors, item)
|
|
438
|
+
|
|
439
|
+
# Skip export if remaining pipeline dropped the item (returned None)
|
|
440
|
+
if processed_item is None:
|
|
441
|
+
logger.debug("Item was dropped by remaining processor pipeline, skipping export")
|
|
442
|
+
return
|
|
443
|
+
|
|
444
|
+
# Export the final result
|
|
445
|
+
await self._export_final_item(processed_item)
|
|
446
|
+
|
|
447
|
+
except Exception as e:
|
|
448
|
+
logger.exception("Failed to continue pipeline processing after %s: %s",
|
|
449
|
+
source_processor.__class__.__name__,
|
|
450
|
+
e)
|
|
451
|
+
|
|
452
|
+
async def _export_with_processing(self, item: PipelineInputT) -> None:
|
|
453
|
+
"""Export an item after processing it through the pipeline.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
item (PipelineInputT): The item to export
|
|
457
|
+
"""
|
|
458
|
+
try:
|
|
459
|
+
# Then, run through the processor pipeline
|
|
460
|
+
final_item: PipelineOutputT | None = await self._process_pipeline(item)
|
|
461
|
+
|
|
462
|
+
# Skip export if pipeline dropped the item (returned None)
|
|
463
|
+
if final_item is None:
|
|
464
|
+
logger.debug("Item was dropped by processor pipeline, skipping export")
|
|
465
|
+
return
|
|
466
|
+
|
|
467
|
+
# Handle different output types from batch processors
|
|
468
|
+
if isinstance(final_item, list) and len(final_item) == 0:
|
|
469
|
+
logger.debug("Skipping export of empty batch from processor pipeline")
|
|
470
|
+
return
|
|
471
|
+
|
|
472
|
+
await self._export_final_item(final_item, raise_on_invalid=True)
|
|
473
|
+
|
|
474
|
+
except Exception as e:
|
|
475
|
+
logger.error("Failed to export item '%s': %s", item, e)
|
|
476
|
+
raise
|
|
477
|
+
|
|
478
|
+
@override
|
|
479
|
+
def export(self, event: IntermediateStep) -> None:
|
|
480
|
+
"""Export an IntermediateStep event through the processing pipeline.
|
|
481
|
+
|
|
482
|
+
This method converts the IntermediateStep to the expected PipelineInputT type,
|
|
483
|
+
processes it through the pipeline, and exports the result.
|
|
484
|
+
|
|
485
|
+
Args:
|
|
486
|
+
event (IntermediateStep): The event to be exported.
|
|
487
|
+
"""
|
|
488
|
+
# Convert IntermediateStep to PipelineInputT and create export task
|
|
489
|
+
if self.validate_input_type(event):
|
|
490
|
+
input_item: PipelineInputT = event # type: ignore
|
|
491
|
+
coro = self._export_with_processing(input_item)
|
|
492
|
+
self._create_export_task(coro)
|
|
493
|
+
else:
|
|
494
|
+
logger.warning("Event %s is not compatible with input type %s", event, self.input_type)
|
|
495
|
+
|
|
496
|
+
@abstractmethod
|
|
497
|
+
async def export_processed(self, item: PipelineOutputT | list[PipelineOutputT]) -> None:
|
|
498
|
+
"""Export the processed item.
|
|
499
|
+
|
|
500
|
+
This method must be implemented by concrete exporters to handle
|
|
501
|
+
the actual export logic after the item has been processed through the pipeline.
|
|
502
|
+
|
|
503
|
+
Args:
|
|
504
|
+
item (PipelineOutputT | list[PipelineOutputT]): The processed item to export (PipelineOutputT type)
|
|
505
|
+
"""
|
|
506
|
+
pass
|
|
507
|
+
|
|
508
|
+
def _create_export_task(self, coro: Coroutine) -> None:
|
|
509
|
+
"""Create task with minimal overhead but proper tracking.
|
|
510
|
+
|
|
511
|
+
Args:
|
|
512
|
+
coro: The coroutine to create a task for
|
|
513
|
+
"""
|
|
514
|
+
if not self._running:
|
|
515
|
+
logger.warning("%s: Attempted to create export task while not running", self.name)
|
|
516
|
+
return
|
|
517
|
+
|
|
518
|
+
try:
|
|
519
|
+
task = asyncio.create_task(coro)
|
|
520
|
+
self._tasks.add(task)
|
|
521
|
+
task.add_done_callback(self._tasks.discard)
|
|
522
|
+
|
|
523
|
+
except Exception as e:
|
|
524
|
+
logger.error("%s: Failed to create task: %s", self.name, e)
|
|
525
|
+
raise
|
|
526
|
+
|
|
527
|
+
@override
|
|
528
|
+
async def _cleanup(self) -> None:
|
|
529
|
+
"""Enhanced cleanup that shuts down all shutdown-aware processors.
|
|
530
|
+
|
|
531
|
+
Each processor is responsible for its own cleanup, including routing
|
|
532
|
+
any final batches through the remaining pipeline via their done callbacks.
|
|
533
|
+
"""
|
|
534
|
+
# Shutdown all processors that support it
|
|
535
|
+
shutdown_tasks = []
|
|
536
|
+
for processor in getattr(self, '_processors', []):
|
|
537
|
+
shutdown_method = getattr(processor, 'shutdown', None)
|
|
538
|
+
if shutdown_method:
|
|
539
|
+
logger.debug("Shutting down processor: %s", processor.__class__.__name__)
|
|
540
|
+
shutdown_tasks.append(shutdown_method())
|
|
541
|
+
|
|
542
|
+
if shutdown_tasks:
|
|
543
|
+
try:
|
|
544
|
+
await asyncio.gather(*shutdown_tasks, return_exceptions=True)
|
|
545
|
+
logger.debug("Successfully shut down %d processors", len(shutdown_tasks))
|
|
546
|
+
except Exception as e:
|
|
547
|
+
logger.exception("Error shutting down processors: %s", e)
|
|
548
|
+
|
|
549
|
+
# Call parent cleanup
|
|
550
|
+
await super()._cleanup()
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import logging
|
|
17
|
+
from abc import abstractmethod
|
|
18
|
+
from typing import TypeVar
|
|
19
|
+
|
|
20
|
+
from nat.data_models.intermediate_step import IntermediateStep
|
|
21
|
+
from nat.observability.exporter.processing_exporter import ProcessingExporter
|
|
22
|
+
from nat.utils.type_utils import override
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
InputT = TypeVar("InputT")
|
|
27
|
+
OutputT = TypeVar("OutputT")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class RawExporter(ProcessingExporter[InputT, OutputT]):
|
|
31
|
+
"""A base class for exporting raw intermediate steps.
|
|
32
|
+
|
|
33
|
+
This class provides a base implementation for telemetry exporters that
|
|
34
|
+
work directly with IntermediateStep objects. It can optionally process
|
|
35
|
+
them through a pipeline before export.
|
|
36
|
+
|
|
37
|
+
The flow is: IntermediateStep -> [Processing Pipeline] -> OutputT -> Export
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
context_state (ContextState, optional): The context state to use for the exporter. Defaults to None.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
@abstractmethod
|
|
44
|
+
async def export_processed(self, item: OutputT):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
@override
|
|
48
|
+
def export(self, event: IntermediateStep):
|
|
49
|
+
if not isinstance(event, IntermediateStep):
|
|
50
|
+
return
|
|
51
|
+
|
|
52
|
+
self._create_export_task(self._export_with_processing(event)) # type: ignore
|