nvidia-nat 1.2.0a20250813__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nat/agent/__init__.py +0 -0
- nat/agent/base.py +239 -0
- nat/agent/dual_node.py +67 -0
- nat/agent/react_agent/__init__.py +0 -0
- nat/agent/react_agent/agent.py +355 -0
- nat/agent/react_agent/output_parser.py +104 -0
- nat/agent/react_agent/prompt.py +41 -0
- nat/agent/react_agent/register.py +149 -0
- nat/agent/reasoning_agent/__init__.py +0 -0
- nat/agent/reasoning_agent/reasoning_agent.py +225 -0
- nat/agent/register.py +23 -0
- nat/agent/rewoo_agent/__init__.py +0 -0
- nat/agent/rewoo_agent/agent.py +411 -0
- nat/agent/rewoo_agent/prompt.py +108 -0
- nat/agent/rewoo_agent/register.py +158 -0
- nat/agent/tool_calling_agent/__init__.py +0 -0
- nat/agent/tool_calling_agent/agent.py +119 -0
- nat/agent/tool_calling_agent/register.py +106 -0
- nat/authentication/__init__.py +14 -0
- nat/authentication/api_key/__init__.py +14 -0
- nat/authentication/api_key/api_key_auth_provider.py +96 -0
- nat/authentication/api_key/api_key_auth_provider_config.py +124 -0
- nat/authentication/api_key/register.py +26 -0
- nat/authentication/exceptions/__init__.py +14 -0
- nat/authentication/exceptions/api_key_exceptions.py +38 -0
- nat/authentication/http_basic_auth/__init__.py +0 -0
- nat/authentication/http_basic_auth/http_basic_auth_provider.py +81 -0
- nat/authentication/http_basic_auth/register.py +30 -0
- nat/authentication/interfaces.py +93 -0
- nat/authentication/oauth2/__init__.py +14 -0
- nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +107 -0
- nat/authentication/oauth2/oauth2_auth_code_flow_provider_config.py +39 -0
- nat/authentication/oauth2/register.py +25 -0
- nat/authentication/register.py +21 -0
- nat/builder/__init__.py +0 -0
- nat/builder/builder.py +285 -0
- nat/builder/component_utils.py +316 -0
- nat/builder/context.py +270 -0
- nat/builder/embedder.py +24 -0
- nat/builder/eval_builder.py +161 -0
- nat/builder/evaluator.py +29 -0
- nat/builder/framework_enum.py +24 -0
- nat/builder/front_end.py +73 -0
- nat/builder/function.py +344 -0
- nat/builder/function_base.py +380 -0
- nat/builder/function_info.py +627 -0
- nat/builder/intermediate_step_manager.py +174 -0
- nat/builder/llm.py +25 -0
- nat/builder/retriever.py +25 -0
- nat/builder/user_interaction_manager.py +78 -0
- nat/builder/workflow.py +148 -0
- nat/builder/workflow_builder.py +1117 -0
- nat/cli/__init__.py +14 -0
- nat/cli/cli_utils/__init__.py +0 -0
- nat/cli/cli_utils/config_override.py +231 -0
- nat/cli/cli_utils/validation.py +37 -0
- nat/cli/commands/__init__.py +0 -0
- nat/cli/commands/configure/__init__.py +0 -0
- nat/cli/commands/configure/channel/__init__.py +0 -0
- nat/cli/commands/configure/channel/add.py +28 -0
- nat/cli/commands/configure/channel/channel.py +36 -0
- nat/cli/commands/configure/channel/remove.py +30 -0
- nat/cli/commands/configure/channel/update.py +30 -0
- nat/cli/commands/configure/configure.py +33 -0
- nat/cli/commands/evaluate.py +139 -0
- nat/cli/commands/info/__init__.py +14 -0
- nat/cli/commands/info/info.py +39 -0
- nat/cli/commands/info/list_channels.py +32 -0
- nat/cli/commands/info/list_components.py +129 -0
- nat/cli/commands/info/list_mcp.py +304 -0
- nat/cli/commands/registry/__init__.py +14 -0
- nat/cli/commands/registry/publish.py +88 -0
- nat/cli/commands/registry/pull.py +118 -0
- nat/cli/commands/registry/registry.py +38 -0
- nat/cli/commands/registry/remove.py +108 -0
- nat/cli/commands/registry/search.py +155 -0
- nat/cli/commands/sizing/__init__.py +14 -0
- nat/cli/commands/sizing/calc.py +297 -0
- nat/cli/commands/sizing/sizing.py +27 -0
- nat/cli/commands/start.py +246 -0
- nat/cli/commands/uninstall.py +81 -0
- nat/cli/commands/validate.py +47 -0
- nat/cli/commands/workflow/__init__.py +14 -0
- nat/cli/commands/workflow/templates/__init__.py.j2 +0 -0
- nat/cli/commands/workflow/templates/config.yml.j2 +16 -0
- nat/cli/commands/workflow/templates/pyproject.toml.j2 +22 -0
- nat/cli/commands/workflow/templates/register.py.j2 +5 -0
- nat/cli/commands/workflow/templates/workflow.py.j2 +36 -0
- nat/cli/commands/workflow/workflow.py +37 -0
- nat/cli/commands/workflow/workflow_commands.py +317 -0
- nat/cli/entrypoint.py +135 -0
- nat/cli/main.py +57 -0
- nat/cli/register_workflow.py +488 -0
- nat/cli/type_registry.py +1000 -0
- nat/data_models/__init__.py +14 -0
- nat/data_models/api_server.py +709 -0
- nat/data_models/authentication.py +231 -0
- nat/data_models/common.py +171 -0
- nat/data_models/component.py +58 -0
- nat/data_models/component_ref.py +168 -0
- nat/data_models/config.py +410 -0
- nat/data_models/dataset_handler.py +123 -0
- nat/data_models/discovery_metadata.py +334 -0
- nat/data_models/embedder.py +27 -0
- nat/data_models/evaluate.py +127 -0
- nat/data_models/evaluator.py +26 -0
- nat/data_models/front_end.py +26 -0
- nat/data_models/function.py +30 -0
- nat/data_models/function_dependencies.py +72 -0
- nat/data_models/interactive.py +246 -0
- nat/data_models/intermediate_step.py +302 -0
- nat/data_models/invocation_node.py +38 -0
- nat/data_models/llm.py +27 -0
- nat/data_models/logging.py +26 -0
- nat/data_models/memory.py +27 -0
- nat/data_models/object_store.py +44 -0
- nat/data_models/profiler.py +54 -0
- nat/data_models/registry_handler.py +26 -0
- nat/data_models/retriever.py +30 -0
- nat/data_models/retry_mixin.py +35 -0
- nat/data_models/span.py +190 -0
- nat/data_models/step_adaptor.py +64 -0
- nat/data_models/streaming.py +33 -0
- nat/data_models/swe_bench_model.py +54 -0
- nat/data_models/telemetry_exporter.py +26 -0
- nat/data_models/ttc_strategy.py +30 -0
- nat/embedder/__init__.py +0 -0
- nat/embedder/langchain_client.py +41 -0
- nat/embedder/nim_embedder.py +59 -0
- nat/embedder/openai_embedder.py +43 -0
- nat/embedder/register.py +24 -0
- nat/eval/__init__.py +14 -0
- nat/eval/config.py +60 -0
- nat/eval/dataset_handler/__init__.py +0 -0
- nat/eval/dataset_handler/dataset_downloader.py +106 -0
- nat/eval/dataset_handler/dataset_filter.py +52 -0
- nat/eval/dataset_handler/dataset_handler.py +254 -0
- nat/eval/evaluate.py +510 -0
- nat/eval/evaluator/__init__.py +14 -0
- nat/eval/evaluator/base_evaluator.py +77 -0
- nat/eval/evaluator/evaluator_model.py +45 -0
- nat/eval/intermediate_step_adapter.py +99 -0
- nat/eval/rag_evaluator/__init__.py +0 -0
- nat/eval/rag_evaluator/evaluate.py +178 -0
- nat/eval/rag_evaluator/register.py +143 -0
- nat/eval/register.py +23 -0
- nat/eval/remote_workflow.py +133 -0
- nat/eval/runners/__init__.py +14 -0
- nat/eval/runners/config.py +39 -0
- nat/eval/runners/multi_eval_runner.py +54 -0
- nat/eval/runtime_event_subscriber.py +52 -0
- nat/eval/swe_bench_evaluator/__init__.py +0 -0
- nat/eval/swe_bench_evaluator/evaluate.py +215 -0
- nat/eval/swe_bench_evaluator/register.py +36 -0
- nat/eval/trajectory_evaluator/__init__.py +0 -0
- nat/eval/trajectory_evaluator/evaluate.py +75 -0
- nat/eval/trajectory_evaluator/register.py +40 -0
- nat/eval/tunable_rag_evaluator/__init__.py +0 -0
- nat/eval/tunable_rag_evaluator/evaluate.py +245 -0
- nat/eval/tunable_rag_evaluator/register.py +52 -0
- nat/eval/usage_stats.py +41 -0
- nat/eval/utils/__init__.py +0 -0
- nat/eval/utils/output_uploader.py +140 -0
- nat/eval/utils/tqdm_position_registry.py +40 -0
- nat/eval/utils/weave_eval.py +184 -0
- nat/experimental/__init__.py +0 -0
- nat/experimental/decorators/__init__.py +0 -0
- nat/experimental/decorators/experimental_warning_decorator.py +134 -0
- nat/experimental/test_time_compute/__init__.py +0 -0
- nat/experimental/test_time_compute/editing/__init__.py +0 -0
- nat/experimental/test_time_compute/editing/iterative_plan_refinement_editor.py +147 -0
- nat/experimental/test_time_compute/editing/llm_as_a_judge_editor.py +204 -0
- nat/experimental/test_time_compute/editing/motivation_aware_summarization.py +107 -0
- nat/experimental/test_time_compute/functions/__init__.py +0 -0
- nat/experimental/test_time_compute/functions/execute_score_select_function.py +105 -0
- nat/experimental/test_time_compute/functions/plan_select_execute_function.py +224 -0
- nat/experimental/test_time_compute/functions/ttc_tool_orchestration_function.py +205 -0
- nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +146 -0
- nat/experimental/test_time_compute/models/__init__.py +0 -0
- nat/experimental/test_time_compute/models/editor_config.py +132 -0
- nat/experimental/test_time_compute/models/scoring_config.py +112 -0
- nat/experimental/test_time_compute/models/search_config.py +120 -0
- nat/experimental/test_time_compute/models/selection_config.py +154 -0
- nat/experimental/test_time_compute/models/stage_enums.py +43 -0
- nat/experimental/test_time_compute/models/strategy_base.py +66 -0
- nat/experimental/test_time_compute/models/tool_use_config.py +41 -0
- nat/experimental/test_time_compute/models/ttc_item.py +48 -0
- nat/experimental/test_time_compute/register.py +36 -0
- nat/experimental/test_time_compute/scoring/__init__.py +0 -0
- nat/experimental/test_time_compute/scoring/llm_based_agent_scorer.py +168 -0
- nat/experimental/test_time_compute/scoring/llm_based_plan_scorer.py +168 -0
- nat/experimental/test_time_compute/scoring/motivation_aware_scorer.py +111 -0
- nat/experimental/test_time_compute/search/__init__.py +0 -0
- nat/experimental/test_time_compute/search/multi_llm_planner.py +128 -0
- nat/experimental/test_time_compute/search/multi_query_retrieval_search.py +122 -0
- nat/experimental/test_time_compute/search/single_shot_multi_plan_planner.py +128 -0
- nat/experimental/test_time_compute/selection/__init__.py +0 -0
- nat/experimental/test_time_compute/selection/best_of_n_selector.py +63 -0
- nat/experimental/test_time_compute/selection/llm_based_agent_output_selector.py +131 -0
- nat/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +159 -0
- nat/experimental/test_time_compute/selection/llm_based_plan_selector.py +128 -0
- nat/experimental/test_time_compute/selection/threshold_selector.py +58 -0
- nat/front_ends/__init__.py +14 -0
- nat/front_ends/console/__init__.py +14 -0
- nat/front_ends/console/authentication_flow_handler.py +233 -0
- nat/front_ends/console/console_front_end_config.py +32 -0
- nat/front_ends/console/console_front_end_plugin.py +96 -0
- nat/front_ends/console/register.py +25 -0
- nat/front_ends/cron/__init__.py +14 -0
- nat/front_ends/fastapi/__init__.py +14 -0
- nat/front_ends/fastapi/auth_flow_handlers/__init__.py +0 -0
- nat/front_ends/fastapi/auth_flow_handlers/http_flow_handler.py +27 -0
- nat/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +107 -0
- nat/front_ends/fastapi/fastapi_front_end_config.py +242 -0
- nat/front_ends/fastapi/fastapi_front_end_controller.py +68 -0
- nat/front_ends/fastapi/fastapi_front_end_plugin.py +116 -0
- nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +1087 -0
- nat/front_ends/fastapi/html_snippets/__init__.py +14 -0
- nat/front_ends/fastapi/html_snippets/auth_code_grant_success.py +35 -0
- nat/front_ends/fastapi/intermediate_steps_subscriber.py +80 -0
- nat/front_ends/fastapi/job_store.py +183 -0
- nat/front_ends/fastapi/main.py +72 -0
- nat/front_ends/fastapi/message_handler.py +309 -0
- nat/front_ends/fastapi/message_validator.py +354 -0
- nat/front_ends/fastapi/register.py +25 -0
- nat/front_ends/fastapi/response_helpers.py +195 -0
- nat/front_ends/fastapi/step_adaptor.py +319 -0
- nat/front_ends/mcp/__init__.py +14 -0
- nat/front_ends/mcp/mcp_front_end_config.py +35 -0
- nat/front_ends/mcp/mcp_front_end_plugin.py +81 -0
- nat/front_ends/mcp/mcp_front_end_plugin_worker.py +143 -0
- nat/front_ends/mcp/register.py +27 -0
- nat/front_ends/mcp/tool_converter.py +242 -0
- nat/front_ends/register.py +22 -0
- nat/front_ends/simple_base/__init__.py +14 -0
- nat/front_ends/simple_base/simple_front_end_plugin_base.py +54 -0
- nat/llm/__init__.py +0 -0
- nat/llm/aws_bedrock_llm.py +57 -0
- nat/llm/nim_llm.py +46 -0
- nat/llm/openai_llm.py +46 -0
- nat/llm/register.py +23 -0
- nat/llm/utils/__init__.py +14 -0
- nat/llm/utils/env_config_value.py +94 -0
- nat/llm/utils/error.py +17 -0
- nat/memory/__init__.py +20 -0
- nat/memory/interfaces.py +183 -0
- nat/memory/models.py +112 -0
- nat/meta/module_to_distro.json +4 -0
- nat/meta/pypi.md +58 -0
- nat/object_store/__init__.py +20 -0
- nat/object_store/in_memory_object_store.py +76 -0
- nat/object_store/interfaces.py +84 -0
- nat/object_store/models.py +36 -0
- nat/object_store/register.py +20 -0
- nat/observability/__init__.py +14 -0
- nat/observability/exporter/__init__.py +14 -0
- nat/observability/exporter/base_exporter.py +449 -0
- nat/observability/exporter/exporter.py +78 -0
- nat/observability/exporter/file_exporter.py +33 -0
- nat/observability/exporter/processing_exporter.py +322 -0
- nat/observability/exporter/raw_exporter.py +52 -0
- nat/observability/exporter/span_exporter.py +288 -0
- nat/observability/exporter_manager.py +335 -0
- nat/observability/mixin/__init__.py +14 -0
- nat/observability/mixin/batch_config_mixin.py +26 -0
- nat/observability/mixin/collector_config_mixin.py +23 -0
- nat/observability/mixin/file_mixin.py +288 -0
- nat/observability/mixin/file_mode.py +23 -0
- nat/observability/mixin/resource_conflict_mixin.py +134 -0
- nat/observability/mixin/serialize_mixin.py +61 -0
- nat/observability/mixin/type_introspection_mixin.py +183 -0
- nat/observability/processor/__init__.py +14 -0
- nat/observability/processor/batching_processor.py +310 -0
- nat/observability/processor/callback_processor.py +42 -0
- nat/observability/processor/intermediate_step_serializer.py +28 -0
- nat/observability/processor/processor.py +71 -0
- nat/observability/register.py +96 -0
- nat/observability/utils/__init__.py +14 -0
- nat/observability/utils/dict_utils.py +236 -0
- nat/observability/utils/time_utils.py +31 -0
- nat/plugins/.namespace +1 -0
- nat/profiler/__init__.py +0 -0
- nat/profiler/calc/__init__.py +14 -0
- nat/profiler/calc/calc_runner.py +627 -0
- nat/profiler/calc/calculations.py +288 -0
- nat/profiler/calc/data_models.py +188 -0
- nat/profiler/calc/plot.py +345 -0
- nat/profiler/callbacks/__init__.py +0 -0
- nat/profiler/callbacks/agno_callback_handler.py +295 -0
- nat/profiler/callbacks/base_callback_class.py +20 -0
- nat/profiler/callbacks/langchain_callback_handler.py +290 -0
- nat/profiler/callbacks/llama_index_callback_handler.py +205 -0
- nat/profiler/callbacks/semantic_kernel_callback_handler.py +238 -0
- nat/profiler/callbacks/token_usage_base_model.py +27 -0
- nat/profiler/data_frame_row.py +51 -0
- nat/profiler/data_models.py +24 -0
- nat/profiler/decorators/__init__.py +0 -0
- nat/profiler/decorators/framework_wrapper.py +131 -0
- nat/profiler/decorators/function_tracking.py +254 -0
- nat/profiler/forecasting/__init__.py +0 -0
- nat/profiler/forecasting/config.py +18 -0
- nat/profiler/forecasting/model_trainer.py +75 -0
- nat/profiler/forecasting/models/__init__.py +22 -0
- nat/profiler/forecasting/models/forecasting_base_model.py +40 -0
- nat/profiler/forecasting/models/linear_model.py +196 -0
- nat/profiler/forecasting/models/random_forest_regressor.py +268 -0
- nat/profiler/inference_metrics_model.py +28 -0
- nat/profiler/inference_optimization/__init__.py +0 -0
- nat/profiler/inference_optimization/bottleneck_analysis/__init__.py +0 -0
- nat/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +460 -0
- nat/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +258 -0
- nat/profiler/inference_optimization/data_models.py +386 -0
- nat/profiler/inference_optimization/experimental/__init__.py +0 -0
- nat/profiler/inference_optimization/experimental/concurrency_spike_analysis.py +468 -0
- nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +405 -0
- nat/profiler/inference_optimization/llm_metrics.py +212 -0
- nat/profiler/inference_optimization/prompt_caching.py +163 -0
- nat/profiler/inference_optimization/token_uniqueness.py +107 -0
- nat/profiler/inference_optimization/workflow_runtimes.py +72 -0
- nat/profiler/intermediate_property_adapter.py +102 -0
- nat/profiler/profile_runner.py +473 -0
- nat/profiler/utils.py +184 -0
- nat/registry_handlers/__init__.py +0 -0
- nat/registry_handlers/local/__init__.py +0 -0
- nat/registry_handlers/local/local_handler.py +176 -0
- nat/registry_handlers/local/register_local.py +37 -0
- nat/registry_handlers/metadata_factory.py +60 -0
- nat/registry_handlers/package_utils.py +571 -0
- nat/registry_handlers/pypi/__init__.py +0 -0
- nat/registry_handlers/pypi/pypi_handler.py +251 -0
- nat/registry_handlers/pypi/register_pypi.py +40 -0
- nat/registry_handlers/register.py +21 -0
- nat/registry_handlers/registry_handler_base.py +157 -0
- nat/registry_handlers/rest/__init__.py +0 -0
- nat/registry_handlers/rest/register_rest.py +56 -0
- nat/registry_handlers/rest/rest_handler.py +237 -0
- nat/registry_handlers/schemas/__init__.py +0 -0
- nat/registry_handlers/schemas/headers.py +42 -0
- nat/registry_handlers/schemas/package.py +68 -0
- nat/registry_handlers/schemas/publish.py +68 -0
- nat/registry_handlers/schemas/pull.py +82 -0
- nat/registry_handlers/schemas/remove.py +36 -0
- nat/registry_handlers/schemas/search.py +91 -0
- nat/registry_handlers/schemas/status.py +47 -0
- nat/retriever/__init__.py +0 -0
- nat/retriever/interface.py +41 -0
- nat/retriever/milvus/__init__.py +14 -0
- nat/retriever/milvus/register.py +81 -0
- nat/retriever/milvus/retriever.py +228 -0
- nat/retriever/models.py +77 -0
- nat/retriever/nemo_retriever/__init__.py +14 -0
- nat/retriever/nemo_retriever/register.py +60 -0
- nat/retriever/nemo_retriever/retriever.py +190 -0
- nat/retriever/register.py +22 -0
- nat/runtime/__init__.py +14 -0
- nat/runtime/loader.py +219 -0
- nat/runtime/runner.py +195 -0
- nat/runtime/session.py +162 -0
- nat/runtime/user_metadata.py +130 -0
- nat/settings/__init__.py +0 -0
- nat/settings/global_settings.py +318 -0
- nat/test/.namespace +1 -0
- nat/tool/__init__.py +0 -0
- nat/tool/chat_completion.py +74 -0
- nat/tool/code_execution/README.md +151 -0
- nat/tool/code_execution/__init__.py +0 -0
- nat/tool/code_execution/code_sandbox.py +267 -0
- nat/tool/code_execution/local_sandbox/.gitignore +1 -0
- nat/tool/code_execution/local_sandbox/Dockerfile.sandbox +60 -0
- nat/tool/code_execution/local_sandbox/__init__.py +13 -0
- nat/tool/code_execution/local_sandbox/local_sandbox_server.py +198 -0
- nat/tool/code_execution/local_sandbox/sandbox.requirements.txt +6 -0
- nat/tool/code_execution/local_sandbox/start_local_sandbox.sh +50 -0
- nat/tool/code_execution/register.py +74 -0
- nat/tool/code_execution/test_code_execution_sandbox.py +414 -0
- nat/tool/code_execution/utils.py +100 -0
- nat/tool/datetime_tools.py +42 -0
- nat/tool/document_search.py +141 -0
- nat/tool/github_tools/__init__.py +0 -0
- nat/tool/github_tools/create_github_commit.py +133 -0
- nat/tool/github_tools/create_github_issue.py +87 -0
- nat/tool/github_tools/create_github_pr.py +106 -0
- nat/tool/github_tools/get_github_file.py +106 -0
- nat/tool/github_tools/get_github_issue.py +166 -0
- nat/tool/github_tools/get_github_pr.py +256 -0
- nat/tool/github_tools/update_github_issue.py +100 -0
- nat/tool/mcp/__init__.py +14 -0
- nat/tool/mcp/exceptions.py +142 -0
- nat/tool/mcp/mcp_client.py +255 -0
- nat/tool/mcp/mcp_tool.py +96 -0
- nat/tool/memory_tools/__init__.py +0 -0
- nat/tool/memory_tools/add_memory_tool.py +79 -0
- nat/tool/memory_tools/delete_memory_tool.py +67 -0
- nat/tool/memory_tools/get_memory_tool.py +72 -0
- nat/tool/nvidia_rag.py +95 -0
- nat/tool/register.py +38 -0
- nat/tool/retriever.py +94 -0
- nat/tool/server_tools.py +66 -0
- nat/utils/__init__.py +0 -0
- nat/utils/data_models/__init__.py +0 -0
- nat/utils/data_models/schema_validator.py +58 -0
- nat/utils/debugging_utils.py +43 -0
- nat/utils/dump_distro_mapping.py +32 -0
- nat/utils/exception_handlers/__init__.py +0 -0
- nat/utils/exception_handlers/automatic_retries.py +289 -0
- nat/utils/exception_handlers/mcp.py +211 -0
- nat/utils/exception_handlers/schemas.py +114 -0
- nat/utils/io/__init__.py +0 -0
- nat/utils/io/model_processing.py +28 -0
- nat/utils/io/yaml_tools.py +119 -0
- nat/utils/log_utils.py +37 -0
- nat/utils/metadata_utils.py +74 -0
- nat/utils/optional_imports.py +142 -0
- nat/utils/producer_consumer_queue.py +178 -0
- nat/utils/reactive/__init__.py +0 -0
- nat/utils/reactive/base/__init__.py +0 -0
- nat/utils/reactive/base/observable_base.py +65 -0
- nat/utils/reactive/base/observer_base.py +55 -0
- nat/utils/reactive/base/subject_base.py +79 -0
- nat/utils/reactive/observable.py +59 -0
- nat/utils/reactive/observer.py +76 -0
- nat/utils/reactive/subject.py +131 -0
- nat/utils/reactive/subscription.py +49 -0
- nat/utils/settings/__init__.py +0 -0
- nat/utils/settings/global_settings.py +197 -0
- nat/utils/string_utils.py +38 -0
- nat/utils/type_converter.py +290 -0
- nat/utils/type_utils.py +484 -0
- nat/utils/url_utils.py +27 -0
- nvidia_nat-1.2.0a20250813.dist-info/METADATA +363 -0
- nvidia_nat-1.2.0a20250813.dist-info/RECORD +436 -0
- nvidia_nat-1.2.0a20250813.dist-info/WHEEL +5 -0
- nvidia_nat-1.2.0a20250813.dist-info/entry_points.txt +21 -0
- nvidia_nat-1.2.0a20250813.dist-info/licenses/LICENSE-3rd-party.txt +3686 -0
- nvidia_nat-1.2.0a20250813.dist-info/licenses/LICENSE.md +201 -0
- nvidia_nat-1.2.0a20250813.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from functools import lru_cache
|
|
17
|
+
from typing import Any
|
|
18
|
+
from typing import get_args
|
|
19
|
+
from typing import get_origin
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class TypeIntrospectionMixin:
|
|
23
|
+
"""Mixin class providing type introspection capabilities for generic classes.
|
|
24
|
+
|
|
25
|
+
This mixin extracts type information from generic class definitions,
|
|
26
|
+
allowing classes to determine their InputT and OutputT types at runtime.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def _find_generic_types(self) -> tuple[type[Any], type[Any]] | None:
|
|
30
|
+
"""
|
|
31
|
+
Recursively search through the inheritance hierarchy to find generic type parameters.
|
|
32
|
+
|
|
33
|
+
This method handles cases where a class inherits from a generic parent class,
|
|
34
|
+
resolving the concrete types through the inheritance chain.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
tuple[type[Any], type[Any]] | None: (input_type, output_type) if found, None otherwise
|
|
38
|
+
"""
|
|
39
|
+
# First, try to find types directly in this class's __orig_bases__
|
|
40
|
+
for base_cls in getattr(self.__class__, '__orig_bases__', []):
|
|
41
|
+
base_cls_args = get_args(base_cls)
|
|
42
|
+
|
|
43
|
+
# Direct case: MyClass[InputT, OutputT]
|
|
44
|
+
if len(base_cls_args) >= 2:
|
|
45
|
+
return base_cls_args[0], base_cls_args[1]
|
|
46
|
+
|
|
47
|
+
# Indirect case: MyClass[SomeGeneric[ConcreteType]]
|
|
48
|
+
# Need to resolve the generic parent's types
|
|
49
|
+
if len(base_cls_args) == 1:
|
|
50
|
+
base_origin = get_origin(base_cls)
|
|
51
|
+
if base_origin and hasattr(base_origin, '__orig_bases__'):
|
|
52
|
+
# Look at the parent's generic definition
|
|
53
|
+
for parent_base in getattr(base_origin, '__orig_bases__', []):
|
|
54
|
+
parent_args = get_args(parent_base)
|
|
55
|
+
if len(parent_args) >= 2:
|
|
56
|
+
# Found the pattern: ParentClass[T, list[T]]
|
|
57
|
+
# Substitute T with our concrete type
|
|
58
|
+
concrete_type = base_cls_args[0]
|
|
59
|
+
input_type = self._substitute_type_var(parent_args[0], concrete_type)
|
|
60
|
+
output_type = self._substitute_type_var(parent_args[1], concrete_type)
|
|
61
|
+
return input_type, output_type
|
|
62
|
+
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
def _substitute_type_var(self, type_expr: Any, concrete_type: type) -> type[Any]:
|
|
66
|
+
"""
|
|
67
|
+
Substitute TypeVar in a type expression with a concrete type.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
type_expr: The type expression potentially containing TypeVars
|
|
71
|
+
concrete_type: The concrete type to substitute
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
The type expression with TypeVars substituted
|
|
75
|
+
"""
|
|
76
|
+
from typing import TypeVar
|
|
77
|
+
|
|
78
|
+
# If it's a TypeVar, substitute it
|
|
79
|
+
if isinstance(type_expr, TypeVar):
|
|
80
|
+
return concrete_type
|
|
81
|
+
|
|
82
|
+
# If it's a generic type like list[T], substitute the args
|
|
83
|
+
origin = get_origin(type_expr)
|
|
84
|
+
args = get_args(type_expr)
|
|
85
|
+
|
|
86
|
+
if origin and args:
|
|
87
|
+
# Recursively substitute in the arguments
|
|
88
|
+
new_args = tuple(self._substitute_type_var(arg, concrete_type) for arg in args)
|
|
89
|
+
# Reconstruct the generic type
|
|
90
|
+
return origin[new_args]
|
|
91
|
+
|
|
92
|
+
# Otherwise, return as-is
|
|
93
|
+
return type_expr
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
@lru_cache
|
|
97
|
+
def input_type(self) -> type[Any]:
|
|
98
|
+
"""
|
|
99
|
+
Get the input type of the class. The input type is determined by the generic parameters of the class.
|
|
100
|
+
|
|
101
|
+
For example, if a class is defined as `MyClass[list[int], str]`, the `input_type` is `list[int]`.
|
|
102
|
+
|
|
103
|
+
Returns
|
|
104
|
+
-------
|
|
105
|
+
type[Any]
|
|
106
|
+
The input type specified in the generic parameters
|
|
107
|
+
|
|
108
|
+
Raises
|
|
109
|
+
------
|
|
110
|
+
ValueError
|
|
111
|
+
If the input type cannot be determined from the class definition
|
|
112
|
+
"""
|
|
113
|
+
types = self._find_generic_types()
|
|
114
|
+
if types:
|
|
115
|
+
return types[0]
|
|
116
|
+
|
|
117
|
+
raise ValueError(f"Could not find input type for {self.__class__.__name__}")
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
@lru_cache
|
|
121
|
+
def output_type(self) -> type[Any]:
|
|
122
|
+
"""
|
|
123
|
+
Get the output type of the class. The output type is determined by the generic parameters of the class.
|
|
124
|
+
|
|
125
|
+
For example, if a class is defined as `MyClass[list[int], str]`, the `output_type` is `str`.
|
|
126
|
+
|
|
127
|
+
Returns
|
|
128
|
+
-------
|
|
129
|
+
type[Any]
|
|
130
|
+
The output type specified in the generic parameters
|
|
131
|
+
|
|
132
|
+
Raises
|
|
133
|
+
------
|
|
134
|
+
ValueError
|
|
135
|
+
If the output type cannot be determined from the class definition
|
|
136
|
+
"""
|
|
137
|
+
types = self._find_generic_types()
|
|
138
|
+
if types:
|
|
139
|
+
return types[1]
|
|
140
|
+
|
|
141
|
+
raise ValueError(f"Could not find output type for {self.__class__.__name__}")
|
|
142
|
+
|
|
143
|
+
@property
|
|
144
|
+
@lru_cache
|
|
145
|
+
def input_class(self) -> type:
|
|
146
|
+
"""
|
|
147
|
+
Get the python class of the input type. This is the class that can be used to check if a value is an
|
|
148
|
+
instance of the input type. It removes any generic or annotation information from the input type.
|
|
149
|
+
|
|
150
|
+
For example, if the input type is `list[int]`, the `input_class` is `list`.
|
|
151
|
+
|
|
152
|
+
Returns
|
|
153
|
+
-------
|
|
154
|
+
type
|
|
155
|
+
The python type of the input type
|
|
156
|
+
"""
|
|
157
|
+
input_origin = get_origin(self.input_type)
|
|
158
|
+
|
|
159
|
+
if input_origin is None:
|
|
160
|
+
return self.input_type
|
|
161
|
+
|
|
162
|
+
return input_origin
|
|
163
|
+
|
|
164
|
+
@property
|
|
165
|
+
@lru_cache
|
|
166
|
+
def output_class(self) -> type:
|
|
167
|
+
"""
|
|
168
|
+
Get the python class of the output type. This is the class that can be used to check if a value is an
|
|
169
|
+
instance of the output type. It removes any generic or annotation information from the output type.
|
|
170
|
+
|
|
171
|
+
For example, if the output type is `list[int]`, the `output_class` is `list`.
|
|
172
|
+
|
|
173
|
+
Returns
|
|
174
|
+
-------
|
|
175
|
+
type
|
|
176
|
+
The python type of the output type
|
|
177
|
+
"""
|
|
178
|
+
output_origin = get_origin(self.output_type)
|
|
179
|
+
|
|
180
|
+
if output_origin is None:
|
|
181
|
+
return self.output_type
|
|
182
|
+
|
|
183
|
+
return output_origin
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import asyncio
|
|
17
|
+
import logging
|
|
18
|
+
import time
|
|
19
|
+
from collections import deque
|
|
20
|
+
from collections.abc import Awaitable
|
|
21
|
+
from collections.abc import Callable
|
|
22
|
+
from typing import Any
|
|
23
|
+
from typing import Generic
|
|
24
|
+
from typing import TypeVar
|
|
25
|
+
|
|
26
|
+
from nat.observability.processor.callback_processor import CallbackProcessor
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
T = TypeVar('T')
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class BatchingProcessor(CallbackProcessor[T, list[T]], Generic[T]):
|
|
34
|
+
"""Pass-through batching processor that accumulates items and outputs batched lists.
|
|
35
|
+
|
|
36
|
+
This processor extends CallbackProcessor[T, List[T]] to provide batching functionality.
|
|
37
|
+
It accumulates individual items and outputs them as batches when size or time thresholds
|
|
38
|
+
are met. The batched output continues through the processing pipeline.
|
|
39
|
+
|
|
40
|
+
CRITICAL: Implements proper cleanup to ensure NO ITEMS ARE LOST during shutdown.
|
|
41
|
+
The ProcessingExporter._cleanup() method calls shutdown() on all processors.
|
|
42
|
+
|
|
43
|
+
Key Features:
|
|
44
|
+
- Pass-through design: Processor[T, List[T]]
|
|
45
|
+
- Size-based and time-based batching
|
|
46
|
+
- Pipeline flow: batches continue through downstream processors
|
|
47
|
+
- GUARANTEED: No items lost during cleanup
|
|
48
|
+
- Comprehensive statistics and monitoring
|
|
49
|
+
- Proper cleanup and shutdown handling
|
|
50
|
+
- High-performance async implementation
|
|
51
|
+
- Back-pressure handling with queue limits
|
|
52
|
+
|
|
53
|
+
Pipeline Flow:
|
|
54
|
+
Normal processing: Individual items → BatchingProcessor → List[items] → downstream processors → export
|
|
55
|
+
Time-based flush: Scheduled batches automatically continue through remaining pipeline
|
|
56
|
+
Shutdown: Final batch immediately routed through remaining pipeline
|
|
57
|
+
|
|
58
|
+
Cleanup Guarantee:
|
|
59
|
+
When shutdown() is called, this processor:
|
|
60
|
+
1. Stops accepting new items
|
|
61
|
+
2. Creates final batch from all queued items
|
|
62
|
+
3. Immediately routes final batch through remaining pipeline via callback
|
|
63
|
+
4. Ensures zero data loss with no external coordination needed
|
|
64
|
+
|
|
65
|
+
Usage in Pipeline:
|
|
66
|
+
```python
|
|
67
|
+
# Individual spans → Batched spans → Continue through downstream processors
|
|
68
|
+
exporter.add_processor(BatchingProcessor[Span](batch_size=100)) # Auto-wired with pipeline callback
|
|
69
|
+
exporter.add_processor(FilterProcessor()) # Processes List[Span] from batching
|
|
70
|
+
exporter.add_processor(TransformProcessor()) # Further processing
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
batch_size: Maximum items per batch (default: 100)
|
|
75
|
+
flush_interval: Max seconds to wait before flushing (default: 5.0)
|
|
76
|
+
max_queue_size: Maximum items to queue before blocking (default: 1000)
|
|
77
|
+
drop_on_overflow: If True, drop items when queue is full (default: False)
|
|
78
|
+
shutdown_timeout: Max seconds to wait for final batch processing (default: 10.0)
|
|
79
|
+
|
|
80
|
+
Note:
|
|
81
|
+
The done_callback for pipeline integration is automatically set by ProcessingExporter
|
|
82
|
+
when the processor is added to a pipeline. For standalone usage, call set_done_callback().
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
def __init__(self,
|
|
86
|
+
batch_size: int = 100,
|
|
87
|
+
flush_interval: float = 5.0,
|
|
88
|
+
max_queue_size: int = 1000,
|
|
89
|
+
drop_on_overflow: bool = False,
|
|
90
|
+
shutdown_timeout: float = 10.0):
|
|
91
|
+
self._batch_size = batch_size
|
|
92
|
+
self._flush_interval = flush_interval
|
|
93
|
+
self._max_queue_size = max_queue_size
|
|
94
|
+
self._drop_on_overflow = drop_on_overflow
|
|
95
|
+
self._shutdown_timeout = shutdown_timeout
|
|
96
|
+
self._done_callback: Callable[[list[T]], Awaitable[None]] | None = None
|
|
97
|
+
|
|
98
|
+
# Batching state
|
|
99
|
+
self._batch_queue: deque[T] = deque()
|
|
100
|
+
self._last_flush_time = time.time()
|
|
101
|
+
self._flush_task: asyncio.Task | None = None
|
|
102
|
+
self._batch_lock = asyncio.Lock()
|
|
103
|
+
self._shutdown_requested = False
|
|
104
|
+
self._shutdown_complete = False
|
|
105
|
+
self._shutdown_complete_event = asyncio.Event()
|
|
106
|
+
|
|
107
|
+
# Callback for immediate export of scheduled batches
|
|
108
|
+
self._done = None
|
|
109
|
+
|
|
110
|
+
# Statistics
|
|
111
|
+
self._batches_created = 0
|
|
112
|
+
self._items_processed = 0
|
|
113
|
+
self._items_dropped = 0
|
|
114
|
+
self._queue_overflows = 0
|
|
115
|
+
self._shutdown_batches = 0
|
|
116
|
+
|
|
117
|
+
async def process(self, item: T) -> list[T]:
|
|
118
|
+
"""Process an item by adding it to the batch queue.
|
|
119
|
+
|
|
120
|
+
Returns a batch when batching conditions are met, otherwise returns empty list.
|
|
121
|
+
This maintains the Processor[T, List[T]] contract while handling batching logic.
|
|
122
|
+
|
|
123
|
+
During shutdown, immediately returns items as single-item batches to ensure
|
|
124
|
+
no data loss.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
item: The item to add to the current batch
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
List[T]: A batch of items when ready, empty list otherwise
|
|
131
|
+
"""
|
|
132
|
+
if self._shutdown_requested:
|
|
133
|
+
# During shutdown, return item immediately as single-item batch
|
|
134
|
+
# This ensures no items are lost even if shutdown is in progress
|
|
135
|
+
self._items_processed += 1
|
|
136
|
+
self._shutdown_batches += 1
|
|
137
|
+
logger.debug("Shutdown mode: returning single-item batch for item %s", item)
|
|
138
|
+
return [item]
|
|
139
|
+
|
|
140
|
+
async with self._batch_lock:
|
|
141
|
+
# Handle queue overflow
|
|
142
|
+
if len(self._batch_queue) >= self._max_queue_size:
|
|
143
|
+
self._queue_overflows += 1
|
|
144
|
+
|
|
145
|
+
if self._drop_on_overflow:
|
|
146
|
+
# Drop the item and return empty
|
|
147
|
+
self._items_dropped += 1
|
|
148
|
+
logger.warning("Dropping item due to queue overflow (dropped: %d)", self._items_dropped)
|
|
149
|
+
return []
|
|
150
|
+
# Force flush to make space, then add item
|
|
151
|
+
logger.warning("Queue overflow, forcing flush of %d items", len(self._batch_queue))
|
|
152
|
+
forced_batch = await self._create_batch()
|
|
153
|
+
if forced_batch:
|
|
154
|
+
# Add current item to queue and return the forced batch
|
|
155
|
+
self._batch_queue.append(item)
|
|
156
|
+
self._items_processed += 1
|
|
157
|
+
return forced_batch
|
|
158
|
+
|
|
159
|
+
# Add item to batch queue
|
|
160
|
+
self._batch_queue.append(item)
|
|
161
|
+
self._items_processed += 1
|
|
162
|
+
|
|
163
|
+
# Check flush conditions
|
|
164
|
+
should_flush = (len(self._batch_queue) >= self._batch_size
|
|
165
|
+
or (time.time() - self._last_flush_time) >= self._flush_interval)
|
|
166
|
+
|
|
167
|
+
if should_flush:
|
|
168
|
+
return await self._create_batch()
|
|
169
|
+
# Schedule a time-based flush if not already scheduled
|
|
170
|
+
if self._flush_task is None or self._flush_task.done():
|
|
171
|
+
self._flush_task = asyncio.create_task(self._schedule_flush())
|
|
172
|
+
return []
|
|
173
|
+
|
|
174
|
+
def set_done_callback(self, callback: Callable[[list[T]], Awaitable[None]]):
|
|
175
|
+
"""Set callback function for routing batches through the remaining pipeline.
|
|
176
|
+
|
|
177
|
+
This is automatically set by ProcessingExporter.add_processor() to continue
|
|
178
|
+
batches through downstream processors before final export.
|
|
179
|
+
"""
|
|
180
|
+
self._done_callback = callback
|
|
181
|
+
|
|
182
|
+
async def _schedule_flush(self):
|
|
183
|
+
"""Schedule a flush after the flush interval."""
|
|
184
|
+
try:
|
|
185
|
+
await asyncio.sleep(self._flush_interval)
|
|
186
|
+
async with self._batch_lock:
|
|
187
|
+
if not self._shutdown_requested and self._batch_queue:
|
|
188
|
+
batch = await self._create_batch()
|
|
189
|
+
if batch:
|
|
190
|
+
# Route scheduled batches through pipeline via callback
|
|
191
|
+
if self._done_callback is not None:
|
|
192
|
+
try:
|
|
193
|
+
await self._done_callback(batch)
|
|
194
|
+
logger.debug("Scheduled flush routed batch of %d items through pipeline", len(batch))
|
|
195
|
+
except Exception as e:
|
|
196
|
+
logger.error("Error routing scheduled batch through pipeline: %s", e, exc_info=True)
|
|
197
|
+
else:
|
|
198
|
+
logger.warning("Scheduled flush created batch of %d items but no pipeline callback set",
|
|
199
|
+
len(batch))
|
|
200
|
+
except asyncio.CancelledError:
|
|
201
|
+
pass
|
|
202
|
+
except Exception as e:
|
|
203
|
+
logger.error("Error in scheduled flush: %s", e, exc_info=True)
|
|
204
|
+
|
|
205
|
+
async def _create_batch(self) -> list[T]:
|
|
206
|
+
"""Create a batch from the current queue."""
|
|
207
|
+
if not self._batch_queue:
|
|
208
|
+
return []
|
|
209
|
+
|
|
210
|
+
batch = list(self._batch_queue)
|
|
211
|
+
self._batch_queue.clear()
|
|
212
|
+
self._last_flush_time = time.time()
|
|
213
|
+
self._batches_created += 1
|
|
214
|
+
|
|
215
|
+
logger.debug("Created batch of %d items (total: %d items in %d batches)",
|
|
216
|
+
len(batch),
|
|
217
|
+
self._items_processed,
|
|
218
|
+
self._batches_created)
|
|
219
|
+
|
|
220
|
+
return batch
|
|
221
|
+
|
|
222
|
+
async def force_flush(self) -> list[T]:
|
|
223
|
+
"""Force an immediate flush of all queued items.
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
List[T]: The current batch, empty list if no items queued
|
|
227
|
+
"""
|
|
228
|
+
async with self._batch_lock:
|
|
229
|
+
return await self._create_batch()
|
|
230
|
+
|
|
231
|
+
async def shutdown(self) -> None:
|
|
232
|
+
"""Shutdown the processor and ensure all items are processed.
|
|
233
|
+
|
|
234
|
+
CRITICAL: This method is called by ProcessingExporter._cleanup() to ensure
|
|
235
|
+
no items are lost during shutdown. It immediately routes any remaining
|
|
236
|
+
items as a final batch through the rest of the processing pipeline.
|
|
237
|
+
"""
|
|
238
|
+
if self._shutdown_requested:
|
|
239
|
+
logger.debug("Shutdown already requested, waiting for completion")
|
|
240
|
+
# Wait for shutdown to complete using event instead of polling
|
|
241
|
+
try:
|
|
242
|
+
await asyncio.wait_for(self._shutdown_complete_event.wait(), timeout=self._shutdown_timeout)
|
|
243
|
+
logger.debug("Shutdown completion detected via event")
|
|
244
|
+
except asyncio.TimeoutError:
|
|
245
|
+
logger.warning("Shutdown completion timeout exceeded (%s seconds)", self._shutdown_timeout)
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
logger.debug("Starting shutdown of BatchingProcessor (queue size: %d)", len(self._batch_queue))
|
|
249
|
+
self._shutdown_requested = True
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
# Cancel scheduled flush task
|
|
253
|
+
if self._flush_task and not self._flush_task.done():
|
|
254
|
+
self._flush_task.cancel()
|
|
255
|
+
try:
|
|
256
|
+
await self._flush_task
|
|
257
|
+
except asyncio.CancelledError:
|
|
258
|
+
pass
|
|
259
|
+
|
|
260
|
+
# Create and route final batch through pipeline
|
|
261
|
+
async with self._batch_lock:
|
|
262
|
+
if self._batch_queue:
|
|
263
|
+
final_batch = await self._create_batch()
|
|
264
|
+
logger.debug("Created final batch of %d items during shutdown", len(final_batch))
|
|
265
|
+
|
|
266
|
+
# Route final batch through pipeline via callback
|
|
267
|
+
if self._done_callback is not None:
|
|
268
|
+
try:
|
|
269
|
+
await self._done_callback(final_batch)
|
|
270
|
+
logger.debug(
|
|
271
|
+
"Successfully flushed final batch of %d items through pipeline during shutdown",
|
|
272
|
+
len(final_batch))
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error("Error routing final batch through pipeline during shutdown: %s",
|
|
275
|
+
e,
|
|
276
|
+
exc_info=True)
|
|
277
|
+
else:
|
|
278
|
+
logger.warning("Final batch of %d items created during shutdown but no pipeline callback set",
|
|
279
|
+
len(final_batch))
|
|
280
|
+
else:
|
|
281
|
+
logger.debug("No items remaining during shutdown")
|
|
282
|
+
|
|
283
|
+
self._shutdown_complete = True
|
|
284
|
+
self._shutdown_complete_event.set()
|
|
285
|
+
logger.debug("BatchingProcessor shutdown completed successfully")
|
|
286
|
+
|
|
287
|
+
except Exception as e:
|
|
288
|
+
logger.error("Error during BatchingProcessor shutdown: %s", e, exc_info=True)
|
|
289
|
+
self._shutdown_complete = True
|
|
290
|
+
self._shutdown_complete_event.set()
|
|
291
|
+
|
|
292
|
+
def get_stats(self) -> dict[str, Any]:
|
|
293
|
+
"""Get comprehensive batching statistics."""
|
|
294
|
+
return {
|
|
295
|
+
"current_queue_size": len(self._batch_queue),
|
|
296
|
+
"batch_size_limit": self._batch_size,
|
|
297
|
+
"flush_interval": self._flush_interval,
|
|
298
|
+
"max_queue_size": self._max_queue_size,
|
|
299
|
+
"drop_on_overflow": self._drop_on_overflow,
|
|
300
|
+
"shutdown_timeout": self._shutdown_timeout,
|
|
301
|
+
"batches_created": self._batches_created,
|
|
302
|
+
"items_processed": self._items_processed,
|
|
303
|
+
"items_dropped": self._items_dropped,
|
|
304
|
+
"queue_overflows": self._queue_overflows,
|
|
305
|
+
"shutdown_batches": self._shutdown_batches,
|
|
306
|
+
"shutdown_requested": self._shutdown_requested,
|
|
307
|
+
"shutdown_complete": self._shutdown_complete,
|
|
308
|
+
"avg_items_per_batch": self._items_processed / max(1, self._batches_created),
|
|
309
|
+
"drop_rate": self._items_dropped / max(1, self._items_processed) * 100 if self._items_processed > 0 else 0
|
|
310
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from abc import abstractmethod
|
|
17
|
+
from collections.abc import Awaitable
|
|
18
|
+
from collections.abc import Callable
|
|
19
|
+
from typing import Any
|
|
20
|
+
from typing import TypeVar
|
|
21
|
+
|
|
22
|
+
from nat.observability.processor.processor import Processor
|
|
23
|
+
|
|
24
|
+
InputT = TypeVar('InputT')
|
|
25
|
+
OutputT = TypeVar('OutputT')
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class CallbackProcessor(Processor[InputT, OutputT]):
|
|
29
|
+
"""Abstract base class for processors that support done callbacks.
|
|
30
|
+
|
|
31
|
+
Processors inheriting from this class can register callbacks that are
|
|
32
|
+
invoked when items are ready for further processing or export.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
def set_done_callback(self, callback: Callable[[Any], Awaitable[None]]) -> None:
|
|
37
|
+
"""Set a callback function to be invoked when items are processed.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
callback (Callable[[Any], Awaitable[None]]): Function to call with processed items
|
|
41
|
+
"""
|
|
42
|
+
pass
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from nat.data_models.intermediate_step import IntermediateStep
|
|
17
|
+
from nat.observability.mixin.serialize_mixin import SerializeMixin
|
|
18
|
+
from nat.observability.processor.processor import Processor
|
|
19
|
+
from nat.utils.type_utils import override
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class IntermediateStepSerializer(SerializeMixin, Processor[IntermediateStep, str]):
|
|
23
|
+
"""A File processor that exports telemetry traces to a local file."""
|
|
24
|
+
|
|
25
|
+
@override
|
|
26
|
+
async def process(self, item: IntermediateStep) -> str:
|
|
27
|
+
serialized_payload, _ = self._serialize_payload(item)
|
|
28
|
+
return serialized_payload
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from abc import ABC
|
|
17
|
+
from abc import abstractmethod
|
|
18
|
+
from typing import Generic
|
|
19
|
+
from typing import TypeVar
|
|
20
|
+
|
|
21
|
+
from nat.observability.mixin.type_introspection_mixin import TypeIntrospectionMixin
|
|
22
|
+
|
|
23
|
+
InputT = TypeVar('InputT')
|
|
24
|
+
OutputT = TypeVar('OutputT')
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Processor(Generic[InputT, OutputT], TypeIntrospectionMixin, ABC):
|
|
28
|
+
"""Generic protocol for processors that can convert between types in export pipelines.
|
|
29
|
+
|
|
30
|
+
Processors are the building blocks of processing pipelines in exporters. They can
|
|
31
|
+
transform data from one type to another, enabling flexible data processing chains.
|
|
32
|
+
|
|
33
|
+
The generic types work as follows:
|
|
34
|
+
- InputT: The type of items that this processor accepts
|
|
35
|
+
- OutputT: The type of items that this processor produces
|
|
36
|
+
|
|
37
|
+
Key Features:
|
|
38
|
+
- Type-safe transformations through generics
|
|
39
|
+
- Type introspection capabilities via TypeIntrospectionMixin
|
|
40
|
+
- Async processing support
|
|
41
|
+
- Chainable in processing pipelines
|
|
42
|
+
|
|
43
|
+
Inheritance Structure:
|
|
44
|
+
- Inherits from TypeIntrospectionMixin for type introspection capabilities
|
|
45
|
+
- Implements Generic[InputT, OutputT] for type safety
|
|
46
|
+
- Abstract base class requiring implementation of process()
|
|
47
|
+
|
|
48
|
+
Example:
|
|
49
|
+
.. code-block:: python
|
|
50
|
+
|
|
51
|
+
class SpanToOtelProcessor(Processor[Span, OtelSpan]):
|
|
52
|
+
async def process(self, item: Span) -> OtelSpan:
|
|
53
|
+
return convert_span_to_otel(item)
|
|
54
|
+
|
|
55
|
+
Note:
|
|
56
|
+
Processors are typically added to ProcessingExporter instances to create
|
|
57
|
+
transformation pipelines. The exporter validates type compatibility between
|
|
58
|
+
chained processors.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
@abstractmethod
|
|
62
|
+
async def process(self, item: InputT) -> OutputT:
|
|
63
|
+
"""Process an item and return a potentially different type.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
item (InputT): The item to process
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
OutputT: The processed item
|
|
70
|
+
"""
|
|
71
|
+
pass
|