nvidia-nat 1.3.0a20250910__py3-none-any.whl → 1.3.0a20250922__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. nat/agent/base.py +9 -4
  2. nat/agent/prompt_optimizer/prompt.py +68 -0
  3. nat/agent/prompt_optimizer/register.py +149 -0
  4. nat/agent/react_agent/agent.py +1 -1
  5. nat/agent/react_agent/register.py +17 -14
  6. nat/agent/reasoning_agent/reasoning_agent.py +9 -7
  7. nat/agent/register.py +1 -0
  8. nat/agent/rewoo_agent/agent.py +9 -2
  9. nat/agent/rewoo_agent/register.py +16 -12
  10. nat/agent/tool_calling_agent/agent.py +69 -7
  11. nat/agent/tool_calling_agent/register.py +14 -13
  12. nat/authentication/credential_validator/__init__.py +14 -0
  13. nat/authentication/credential_validator/bearer_token_validator.py +557 -0
  14. nat/authentication/oauth2/oauth2_resource_server_config.py +124 -0
  15. nat/builder/builder.py +27 -4
  16. nat/builder/component_utils.py +7 -3
  17. nat/builder/context.py +28 -6
  18. nat/builder/function.py +313 -0
  19. nat/builder/function_info.py +1 -1
  20. nat/builder/workflow.py +5 -0
  21. nat/builder/workflow_builder.py +215 -16
  22. nat/cli/commands/optimize.py +90 -0
  23. nat/cli/commands/workflow/templates/config.yml.j2 +0 -1
  24. nat/cli/commands/workflow/workflow_commands.py +4 -7
  25. nat/cli/entrypoint.py +4 -9
  26. nat/cli/register_workflow.py +38 -4
  27. nat/cli/type_registry.py +71 -0
  28. nat/control_flow/__init__.py +0 -0
  29. nat/control_flow/register.py +20 -0
  30. nat/control_flow/router_agent/__init__.py +0 -0
  31. nat/control_flow/router_agent/agent.py +329 -0
  32. nat/control_flow/router_agent/prompt.py +48 -0
  33. nat/control_flow/router_agent/register.py +91 -0
  34. nat/control_flow/sequential_executor.py +167 -0
  35. nat/data_models/agent.py +34 -0
  36. nat/data_models/authentication.py +38 -0
  37. nat/data_models/component.py +2 -0
  38. nat/data_models/component_ref.py +11 -0
  39. nat/data_models/config.py +40 -16
  40. nat/data_models/function.py +34 -0
  41. nat/data_models/function_dependencies.py +8 -0
  42. nat/data_models/optimizable.py +119 -0
  43. nat/data_models/optimizer.py +149 -0
  44. nat/data_models/temperature_mixin.py +4 -3
  45. nat/data_models/top_p_mixin.py +4 -3
  46. nat/embedder/nim_embedder.py +1 -1
  47. nat/embedder/openai_embedder.py +1 -1
  48. nat/eval/config.py +1 -1
  49. nat/eval/evaluate.py +5 -1
  50. nat/eval/register.py +4 -0
  51. nat/eval/runtime_evaluator/__init__.py +14 -0
  52. nat/eval/runtime_evaluator/evaluate.py +123 -0
  53. nat/eval/runtime_evaluator/register.py +100 -0
  54. nat/experimental/test_time_compute/functions/plan_select_execute_function.py +5 -1
  55. nat/front_ends/fastapi/dask_client_mixin.py +65 -0
  56. nat/front_ends/fastapi/fastapi_front_end_config.py +18 -3
  57. nat/front_ends/fastapi/fastapi_front_end_plugin.py +134 -3
  58. nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +243 -228
  59. nat/front_ends/fastapi/job_store.py +518 -99
  60. nat/front_ends/fastapi/main.py +11 -19
  61. nat/front_ends/fastapi/utils.py +57 -0
  62. nat/front_ends/mcp/introspection_token_verifier.py +73 -0
  63. nat/front_ends/mcp/mcp_front_end_config.py +5 -1
  64. nat/front_ends/mcp/mcp_front_end_plugin.py +37 -11
  65. nat/front_ends/mcp/mcp_front_end_plugin_worker.py +111 -3
  66. nat/front_ends/mcp/tool_converter.py +3 -0
  67. nat/llm/aws_bedrock_llm.py +14 -3
  68. nat/llm/nim_llm.py +14 -3
  69. nat/llm/openai_llm.py +8 -1
  70. nat/observability/exporter/processing_exporter.py +29 -55
  71. nat/observability/mixin/redaction_config_mixin.py +5 -4
  72. nat/observability/mixin/tagging_config_mixin.py +26 -14
  73. nat/observability/mixin/type_introspection_mixin.py +420 -107
  74. nat/observability/processor/processor.py +3 -0
  75. nat/observability/processor/redaction/__init__.py +24 -0
  76. nat/observability/processor/redaction/contextual_redaction_processor.py +125 -0
  77. nat/observability/processor/redaction/contextual_span_redaction_processor.py +66 -0
  78. nat/observability/processor/redaction/redaction_processor.py +177 -0
  79. nat/observability/processor/redaction/span_header_redaction_processor.py +92 -0
  80. nat/observability/processor/span_tagging_processor.py +21 -14
  81. nat/profiler/decorators/framework_wrapper.py +9 -6
  82. nat/profiler/parameter_optimization/__init__.py +0 -0
  83. nat/profiler/parameter_optimization/optimizable_utils.py +93 -0
  84. nat/profiler/parameter_optimization/optimizer_runtime.py +67 -0
  85. nat/profiler/parameter_optimization/parameter_optimizer.py +153 -0
  86. nat/profiler/parameter_optimization/parameter_selection.py +108 -0
  87. nat/profiler/parameter_optimization/pareto_visualizer.py +380 -0
  88. nat/profiler/parameter_optimization/prompt_optimizer.py +384 -0
  89. nat/profiler/parameter_optimization/update_helpers.py +66 -0
  90. nat/profiler/utils.py +3 -1
  91. nat/tool/chat_completion.py +4 -1
  92. nat/tool/github_tools.py +450 -0
  93. nat/tool/register.py +2 -7
  94. nat/utils/callable_utils.py +70 -0
  95. nat/utils/exception_handlers/automatic_retries.py +103 -48
  96. nat/utils/log_levels.py +25 -0
  97. nat/utils/type_utils.py +4 -0
  98. {nvidia_nat-1.3.0a20250910.dist-info → nvidia_nat-1.3.0a20250922.dist-info}/METADATA +10 -1
  99. {nvidia_nat-1.3.0a20250910.dist-info → nvidia_nat-1.3.0a20250922.dist-info}/RECORD +105 -76
  100. {nvidia_nat-1.3.0a20250910.dist-info → nvidia_nat-1.3.0a20250922.dist-info}/entry_points.txt +1 -0
  101. nat/observability/processor/header_redaction_processor.py +0 -123
  102. nat/observability/processor/redaction_processor.py +0 -77
  103. nat/tool/github_tools/create_github_commit.py +0 -133
  104. nat/tool/github_tools/create_github_issue.py +0 -87
  105. nat/tool/github_tools/create_github_pr.py +0 -106
  106. nat/tool/github_tools/get_github_file.py +0 -106
  107. nat/tool/github_tools/get_github_issue.py +0 -166
  108. nat/tool/github_tools/get_github_pr.py +0 -256
  109. nat/tool/github_tools/update_github_issue.py +0 -100
  110. /nat/{tool/github_tools → agent/prompt_optimizer}/__init__.py +0 -0
  111. {nvidia_nat-1.3.0a20250910.dist-info → nvidia_nat-1.3.0a20250922.dist-info}/WHEEL +0 -0
  112. {nvidia_nat-1.3.0a20250910.dist-info → nvidia_nat-1.3.0a20250922.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
  113. {nvidia_nat-1.3.0a20250910.dist-info → nvidia_nat-1.3.0a20250922.dist-info}/licenses/LICENSE.md +0 -0
  114. {nvidia_nat-1.3.0a20250910.dist-info → nvidia_nat-1.3.0a20250922.dist-info}/top_level.txt +0 -0
@@ -53,6 +53,8 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
53
53
  - Configurable None filtering: processors returning None can drop items from pipeline
54
54
  - Automatic type validation before export
55
55
  """
56
+ # All ProcessingExporter instances automatically use this for signature checking
57
+ _signature_method = '_process_pipeline'
56
58
 
57
59
  def __init__(self, context_state: ContextState | None = None, drop_nones: bool = True):
58
60
  """Initialize the processing exporter.
@@ -294,8 +296,6 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
294
296
  self._check_processor_compatibility(predecessor,
295
297
  processor,
296
298
  "predecessor",
297
- predecessor.output_class,
298
- processor.input_class,
299
299
  str(predecessor.output_type),
300
300
  str(processor.input_type))
301
301
 
@@ -304,8 +304,6 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
304
304
  self._check_processor_compatibility(processor,
305
305
  successor,
306
306
  "successor",
307
- processor.output_class,
308
- successor.input_class,
309
307
  str(processor.output_type),
310
308
  str(successor.input_type))
311
309
 
@@ -313,34 +311,22 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
313
311
  source_processor: Processor,
314
312
  target_processor: Processor,
315
313
  relationship: str,
316
- source_class: type,
317
- target_class: type,
318
314
  source_type: str,
319
315
  target_type: str) -> None:
320
- """Check type compatibility between two processors.
316
+ """Check type compatibility between two processors using Pydantic validation.
321
317
 
322
318
  Args:
323
319
  source_processor (Processor): The processor providing output
324
320
  target_processor (Processor): The processor receiving input
325
321
  relationship (str): Description of relationship ("predecessor" or "successor")
326
- source_class (type): The output class of source processor
327
- target_class (type): The input class of target processor
328
322
  source_type (str): String representation of source type
329
323
  target_type (str): String representation of target type
330
324
  """
331
- try:
332
- if not issubclass(source_class, target_class):
333
- raise ValueError(f"Processor {target_processor.__class__.__name__} input type {target_type} "
334
- f"is not compatible with {relationship} {source_processor.__class__.__name__} "
335
- f"output type {source_type}")
336
- except TypeError:
337
- logger.warning(
338
- "Cannot use issubclass() for type compatibility check between "
339
- "%s (%s) and %s (%s). Skipping compatibility check.",
340
- source_processor.__class__.__name__,
341
- source_type,
342
- target_processor.__class__.__name__,
343
- target_type)
325
+ # Use Pydantic-based type compatibility checking
326
+ if not source_processor.is_output_compatible_with(target_processor.input_type):
327
+ raise ValueError(f"Processor {target_processor.__class__.__name__} input type {target_type} "
328
+ f"is not compatible with {relationship} {source_processor.__class__.__name__} "
329
+ f"output type {source_type}")
344
330
 
345
331
  async def _pre_start(self) -> None:
346
332
 
@@ -350,36 +336,21 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
350
336
  last_processor = self._processors[-1]
351
337
 
352
338
  # validate that the first processor's input type is compatible with the exporter's input type
353
- try:
354
- if not issubclass(self.input_class, first_processor.input_class):
355
- raise ValueError(f"Processor {first_processor.__class__.__name__} input type "
356
- f"{first_processor.input_type} is not compatible with the "
357
- f"{self.input_type} input type")
358
- except TypeError as e:
359
- # Handle cases where classes are generic types that can't be used with issubclass
360
- logger.warning(
361
- "Cannot validate type compatibility between %s (%s) "
362
- "and exporter (%s): %s. Skipping validation.",
363
- first_processor.__class__.__name__,
364
- first_processor.input_type,
365
- self.input_type,
366
- e)
367
-
339
+ if not first_processor.is_compatible_with_input(self.input_type):
340
+ logger.error("First processor %s input=%s incompatible with exporter input=%s",
341
+ first_processor.__class__.__name__,
342
+ first_processor.input_type,
343
+ self.input_type)
344
+ raise ValueError("First processor incompatible with exporter input")
368
345
  # Validate that the last processor's output type is compatible with the exporter's output type
369
- try:
370
- if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
371
- raise ValueError(f"Processor {last_processor.__class__.__name__} output type "
372
- f"{last_processor.output_type} is not compatible with the "
373
- f"{self.output_type} output type")
374
- except TypeError as e:
375
- # Handle cases where classes are generic types that can't be used with issubclass
376
- logger.warning(
377
- "Cannot validate type compatibility between %s (%s) "
378
- "and exporter (%s): %s. Skipping validation.",
379
- last_processor.__class__.__name__,
380
- last_processor.output_type,
381
- self.output_type,
382
- e)
346
+ # Use DecomposedType.is_type_compatible for the final export stage to allow batch compatibility
347
+ # This enables BatchingProcessor[T] -> Exporter[T] patterns where the exporter handles both T and list[T]
348
+ if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
349
+ logger.error("Last processor %s output=%s incompatible with exporter output=%s",
350
+ last_processor.__class__.__name__,
351
+ last_processor.output_type,
352
+ self.output_type)
353
+ raise ValueError("Last processor incompatible with exporter output")
383
354
 
384
355
  # Lock the pipeline to prevent further modifications
385
356
  self._pipeline_locked = True
@@ -432,12 +403,15 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
432
403
  await self.export_processed(processed_item)
433
404
  else:
434
405
  logger.debug("Skipping export of empty batch")
435
- elif isinstance(processed_item, self.output_class):
406
+ elif self.validate_output_type(processed_item):
436
407
  await self.export_processed(processed_item)
437
408
  else:
438
409
  if raise_on_invalid:
439
- raise ValueError(f"Processed item {processed_item} is not a valid output type. "
440
- f"Expected {self.output_class} or list[{self.output_class}]")
410
+ logger.error("Invalid processed item type for export: %s (expected %s or list[%s])",
411
+ type(processed_item),
412
+ self.output_type,
413
+ self.output_type)
414
+ raise ValueError("Invalid processed item type for export")
441
415
  logger.warning("Processed item %s is not a valid output type for export", processed_item)
442
416
 
443
417
  async def _continue_pipeline_after(self, source_processor: Processor, item: Any) -> None:
@@ -512,7 +486,7 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
512
486
  event (IntermediateStep): The event to be exported.
513
487
  """
514
488
  # Convert IntermediateStep to PipelineInputT and create export task
515
- if isinstance(event, self.input_class):
489
+ if self.validate_input_type(event):
516
490
  input_item: PipelineInputT = event # type: ignore
517
491
  coro = self._export_with_processing(input_item)
518
492
  self._create_export_task(coro)
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
2
  # SPDX-License-Identifier: Apache-2.0
3
3
  #
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,9 +25,10 @@ class RedactionConfigMixin(BaseModel):
25
25
  """
26
26
  redaction_enabled: bool = Field(default=False, description="Whether to enable redaction processing.")
27
27
  redaction_value: str = Field(default="[REDACTED]", description="Value to replace redacted attributes with.")
28
- redaction_attributes: list[str] = Field(default_factory=lambda: ["input.value", "output.value", "metadata"],
29
- description="Span attributes to redact when redaction is triggered.")
28
+ redaction_attributes: list[str] = Field(default_factory=lambda: ["input.value", "output.value", "nat.metadata"],
29
+ description="Attributes to redact when redaction is triggered.")
30
30
  force_redaction: bool = Field(default=False, description="Always redact regardless of other conditions.")
31
+ redaction_tag: str | None = Field(default=None, description="Tag to add to spans when redaction is triggered.")
31
32
 
32
33
 
33
34
  class HeaderRedactionConfigMixin(RedactionConfigMixin):
@@ -38,4 +39,4 @@ class HeaderRedactionConfigMixin(RedactionConfigMixin):
38
39
 
39
40
  Note: The callback function must be provided directly to the processor at runtime.
40
41
  """
41
- redaction_header: str = Field(default="x-redaction-key", description="Header to check for redaction decisions.")
42
+ redaction_headers: list[str] = Field(default_factory=list, description="Headers to check for redaction decisions.")
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
2
  # SPDX-License-Identifier: Apache-2.0
3
3
  #
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +13,8 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ import sys
17
+ from collections.abc import Mapping
16
18
  from enum import Enum
17
19
  from typing import Generic
18
20
  from typing import TypeVar
@@ -20,7 +22,17 @@ from typing import TypeVar
20
22
  from pydantic import BaseModel
21
23
  from pydantic import Field
22
24
 
23
- TagValueT = TypeVar("TagValueT")
25
+ if sys.version_info >= (3, 12):
26
+ from typing import TypedDict
27
+ else:
28
+ from typing_extensions import TypedDict
29
+
30
+ TagMappingT = TypeVar("TagMappingT", bound=Mapping)
31
+
32
+
33
+ class BaseTaggingConfigMixin(BaseModel, Generic[TagMappingT]):
34
+ """Base mixin for tagging spans."""
35
+ tags: TagMappingT | None = Field(default=None, description="Tags to add to the span.")
24
36
 
25
37
 
26
38
  class PrivacyLevel(str, Enum):
@@ -31,20 +43,20 @@ class PrivacyLevel(str, Enum):
31
43
  HIGH = "high"
32
44
 
33
45
 
34
- class TaggingConfigMixin(BaseModel, Generic[TagValueT]):
35
- """Generic mixin for tagging spans with typed values.
46
+ PrivacyTagSchema = TypedDict(
47
+ "PrivacyTagSchema",
48
+ {
49
+ "privacy.level": PrivacyLevel,
50
+ },
51
+ total=True,
52
+ )
36
53
 
37
- This mixin provides a flexible tagging system where both the tag key
38
- and value type can be customized for different use cases.
39
- """
40
- tag_key: str | None = Field(default=None, description="Key to use when tagging traces.")
41
- tag_value: TagValueT | None = Field(default=None, description="Value to tag the traces with.")
42
54
 
55
+ class PrivacyTaggingConfigMixin(BaseTaggingConfigMixin[PrivacyTagSchema]):
56
+ """Mixin for privacy level tagging on spans."""
57
+ pass
43
58
 
44
- class PrivacyTaggingConfigMixin(TaggingConfigMixin[PrivacyLevel]):
45
- """Mixin for privacy level tagging on spans.
46
59
 
47
- Specializes TaggingConfigMixin to work with PrivacyLevel enum values,
48
- providing a typed interface for privacy-related span tagging.
49
- """
60
+ class CustomTaggingConfigMixin(BaseTaggingConfigMixin[dict[str, str]]):
61
+ """Mixin for string key-value tagging on spans."""
50
62
  pass