nvidia-nat 1.3.0.dev2__py3-none-any.whl → 1.3.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (250) hide show
  1. aiq/__init__.py +2 -2
  2. nat/agent/base.py +24 -15
  3. nat/agent/dual_node.py +9 -4
  4. nat/agent/prompt_optimizer/prompt.py +68 -0
  5. nat/agent/prompt_optimizer/register.py +149 -0
  6. nat/agent/react_agent/agent.py +79 -47
  7. nat/agent/react_agent/register.py +50 -22
  8. nat/agent/reasoning_agent/reasoning_agent.py +11 -9
  9. nat/agent/register.py +1 -1
  10. nat/agent/rewoo_agent/agent.py +326 -148
  11. nat/agent/rewoo_agent/prompt.py +19 -22
  12. nat/agent/rewoo_agent/register.py +54 -27
  13. nat/agent/tool_calling_agent/agent.py +84 -28
  14. nat/agent/tool_calling_agent/register.py +51 -28
  15. nat/authentication/api_key/api_key_auth_provider.py +2 -2
  16. nat/authentication/credential_validator/bearer_token_validator.py +557 -0
  17. nat/authentication/http_basic_auth/http_basic_auth_provider.py +1 -1
  18. nat/authentication/interfaces.py +5 -2
  19. nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +69 -36
  20. nat/authentication/oauth2/oauth2_resource_server_config.py +124 -0
  21. nat/authentication/register.py +0 -1
  22. nat/builder/builder.py +56 -24
  23. nat/builder/component_utils.py +9 -5
  24. nat/builder/context.py +68 -17
  25. nat/builder/eval_builder.py +16 -11
  26. nat/builder/framework_enum.py +1 -0
  27. nat/builder/front_end.py +1 -1
  28. nat/builder/function.py +378 -8
  29. nat/builder/function_base.py +3 -3
  30. nat/builder/function_info.py +6 -8
  31. nat/builder/user_interaction_manager.py +2 -2
  32. nat/builder/workflow.py +13 -1
  33. nat/builder/workflow_builder.py +281 -76
  34. nat/cli/cli_utils/config_override.py +2 -2
  35. nat/cli/commands/evaluate.py +1 -1
  36. nat/cli/commands/info/info.py +16 -6
  37. nat/cli/commands/info/list_channels.py +1 -1
  38. nat/cli/commands/info/list_components.py +7 -8
  39. nat/cli/commands/mcp/__init__.py +14 -0
  40. nat/cli/commands/mcp/mcp.py +986 -0
  41. nat/cli/commands/object_store/__init__.py +14 -0
  42. nat/cli/commands/object_store/object_store.py +227 -0
  43. nat/cli/commands/optimize.py +90 -0
  44. nat/cli/commands/registry/publish.py +2 -2
  45. nat/cli/commands/registry/pull.py +2 -2
  46. nat/cli/commands/registry/remove.py +2 -2
  47. nat/cli/commands/registry/search.py +15 -17
  48. nat/cli/commands/start.py +16 -5
  49. nat/cli/commands/uninstall.py +1 -1
  50. nat/cli/commands/workflow/templates/config.yml.j2 +14 -13
  51. nat/cli/commands/workflow/templates/pyproject.toml.j2 +4 -1
  52. nat/cli/commands/workflow/templates/register.py.j2 +2 -3
  53. nat/cli/commands/workflow/templates/workflow.py.j2 +35 -21
  54. nat/cli/commands/workflow/workflow_commands.py +62 -22
  55. nat/cli/entrypoint.py +8 -10
  56. nat/cli/main.py +3 -0
  57. nat/cli/register_workflow.py +38 -4
  58. nat/cli/type_registry.py +75 -6
  59. nat/control_flow/__init__.py +0 -0
  60. nat/control_flow/register.py +20 -0
  61. nat/control_flow/router_agent/__init__.py +0 -0
  62. nat/control_flow/router_agent/agent.py +329 -0
  63. nat/control_flow/router_agent/prompt.py +48 -0
  64. nat/control_flow/router_agent/register.py +91 -0
  65. nat/control_flow/sequential_executor.py +166 -0
  66. nat/data_models/agent.py +34 -0
  67. nat/data_models/api_server.py +74 -66
  68. nat/data_models/authentication.py +23 -9
  69. nat/data_models/common.py +1 -1
  70. nat/data_models/component.py +2 -0
  71. nat/data_models/component_ref.py +11 -0
  72. nat/data_models/config.py +41 -17
  73. nat/data_models/dataset_handler.py +1 -1
  74. nat/data_models/discovery_metadata.py +4 -4
  75. nat/data_models/evaluate.py +4 -1
  76. nat/data_models/function.py +34 -0
  77. nat/data_models/function_dependencies.py +14 -6
  78. nat/data_models/gated_field_mixin.py +242 -0
  79. nat/data_models/intermediate_step.py +3 -3
  80. nat/data_models/optimizable.py +119 -0
  81. nat/data_models/optimizer.py +149 -0
  82. nat/data_models/span.py +41 -3
  83. nat/data_models/swe_bench_model.py +1 -1
  84. nat/data_models/temperature_mixin.py +44 -0
  85. nat/data_models/thinking_mixin.py +86 -0
  86. nat/data_models/top_p_mixin.py +44 -0
  87. nat/embedder/nim_embedder.py +1 -1
  88. nat/embedder/openai_embedder.py +1 -1
  89. nat/embedder/register.py +0 -1
  90. nat/eval/config.py +3 -1
  91. nat/eval/dataset_handler/dataset_handler.py +71 -7
  92. nat/eval/evaluate.py +86 -31
  93. nat/eval/evaluator/base_evaluator.py +1 -1
  94. nat/eval/evaluator/evaluator_model.py +13 -0
  95. nat/eval/intermediate_step_adapter.py +1 -1
  96. nat/eval/rag_evaluator/evaluate.py +2 -2
  97. nat/eval/rag_evaluator/register.py +3 -3
  98. nat/eval/register.py +4 -1
  99. nat/eval/remote_workflow.py +3 -3
  100. nat/eval/runtime_evaluator/__init__.py +14 -0
  101. nat/eval/runtime_evaluator/evaluate.py +123 -0
  102. nat/eval/runtime_evaluator/register.py +100 -0
  103. nat/eval/swe_bench_evaluator/evaluate.py +6 -6
  104. nat/eval/trajectory_evaluator/evaluate.py +1 -1
  105. nat/eval/trajectory_evaluator/register.py +1 -1
  106. nat/eval/tunable_rag_evaluator/evaluate.py +4 -7
  107. nat/eval/utils/eval_trace_ctx.py +89 -0
  108. nat/eval/utils/weave_eval.py +18 -9
  109. nat/experimental/decorators/experimental_warning_decorator.py +27 -7
  110. nat/experimental/test_time_compute/functions/plan_select_execute_function.py +7 -3
  111. nat/experimental/test_time_compute/functions/ttc_tool_orchestration_function.py +3 -3
  112. nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +1 -1
  113. nat/experimental/test_time_compute/models/strategy_base.py +5 -4
  114. nat/experimental/test_time_compute/register.py +0 -1
  115. nat/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +1 -3
  116. nat/front_ends/console/authentication_flow_handler.py +82 -30
  117. nat/front_ends/console/console_front_end_plugin.py +8 -5
  118. nat/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +52 -17
  119. nat/front_ends/fastapi/dask_client_mixin.py +65 -0
  120. nat/front_ends/fastapi/fastapi_front_end_config.py +36 -5
  121. nat/front_ends/fastapi/fastapi_front_end_controller.py +4 -4
  122. nat/front_ends/fastapi/fastapi_front_end_plugin.py +135 -4
  123. nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +452 -282
  124. nat/front_ends/fastapi/job_store.py +518 -99
  125. nat/front_ends/fastapi/main.py +11 -19
  126. nat/front_ends/fastapi/message_handler.py +13 -14
  127. nat/front_ends/fastapi/message_validator.py +19 -19
  128. nat/front_ends/fastapi/response_helpers.py +4 -4
  129. nat/front_ends/fastapi/step_adaptor.py +2 -2
  130. nat/front_ends/fastapi/utils.py +57 -0
  131. nat/front_ends/mcp/introspection_token_verifier.py +73 -0
  132. nat/front_ends/mcp/mcp_front_end_config.py +10 -1
  133. nat/front_ends/mcp/mcp_front_end_plugin.py +45 -13
  134. nat/front_ends/mcp/mcp_front_end_plugin_worker.py +116 -8
  135. nat/front_ends/mcp/tool_converter.py +44 -14
  136. nat/front_ends/register.py +0 -1
  137. nat/front_ends/simple_base/simple_front_end_plugin_base.py +3 -1
  138. nat/llm/aws_bedrock_llm.py +24 -12
  139. nat/llm/azure_openai_llm.py +13 -6
  140. nat/llm/litellm_llm.py +69 -0
  141. nat/llm/nim_llm.py +20 -8
  142. nat/llm/openai_llm.py +14 -6
  143. nat/llm/register.py +4 -1
  144. nat/llm/utils/env_config_value.py +2 -3
  145. nat/llm/utils/thinking.py +215 -0
  146. nat/meta/pypi.md +9 -9
  147. nat/object_store/register.py +0 -1
  148. nat/observability/exporter/base_exporter.py +3 -3
  149. nat/observability/exporter/file_exporter.py +1 -1
  150. nat/observability/exporter/processing_exporter.py +309 -81
  151. nat/observability/exporter/span_exporter.py +35 -15
  152. nat/observability/exporter_manager.py +7 -7
  153. nat/observability/mixin/file_mixin.py +7 -7
  154. nat/observability/mixin/redaction_config_mixin.py +42 -0
  155. nat/observability/mixin/tagging_config_mixin.py +62 -0
  156. nat/observability/mixin/type_introspection_mixin.py +420 -107
  157. nat/observability/processor/batching_processor.py +5 -7
  158. nat/observability/processor/falsy_batch_filter_processor.py +55 -0
  159. nat/observability/processor/processor.py +3 -0
  160. nat/observability/processor/processor_factory.py +70 -0
  161. nat/observability/processor/redaction/__init__.py +24 -0
  162. nat/observability/processor/redaction/contextual_redaction_processor.py +125 -0
  163. nat/observability/processor/redaction/contextual_span_redaction_processor.py +66 -0
  164. nat/observability/processor/redaction/redaction_processor.py +177 -0
  165. nat/observability/processor/redaction/span_header_redaction_processor.py +92 -0
  166. nat/observability/processor/span_tagging_processor.py +68 -0
  167. nat/observability/register.py +6 -4
  168. nat/profiler/calc/calc_runner.py +3 -4
  169. nat/profiler/callbacks/agno_callback_handler.py +1 -1
  170. nat/profiler/callbacks/langchain_callback_handler.py +6 -6
  171. nat/profiler/callbacks/llama_index_callback_handler.py +3 -3
  172. nat/profiler/callbacks/semantic_kernel_callback_handler.py +3 -3
  173. nat/profiler/data_frame_row.py +1 -1
  174. nat/profiler/decorators/framework_wrapper.py +62 -13
  175. nat/profiler/decorators/function_tracking.py +160 -3
  176. nat/profiler/forecasting/models/forecasting_base_model.py +3 -1
  177. nat/profiler/forecasting/models/linear_model.py +1 -1
  178. nat/profiler/forecasting/models/random_forest_regressor.py +1 -1
  179. nat/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +1 -1
  180. nat/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +1 -1
  181. nat/profiler/inference_optimization/data_models.py +3 -3
  182. nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +8 -9
  183. nat/profiler/inference_optimization/token_uniqueness.py +1 -1
  184. nat/profiler/parameter_optimization/__init__.py +0 -0
  185. nat/profiler/parameter_optimization/optimizable_utils.py +93 -0
  186. nat/profiler/parameter_optimization/optimizer_runtime.py +67 -0
  187. nat/profiler/parameter_optimization/parameter_optimizer.py +153 -0
  188. nat/profiler/parameter_optimization/parameter_selection.py +107 -0
  189. nat/profiler/parameter_optimization/pareto_visualizer.py +380 -0
  190. nat/profiler/parameter_optimization/prompt_optimizer.py +384 -0
  191. nat/profiler/parameter_optimization/update_helpers.py +66 -0
  192. nat/profiler/profile_runner.py +14 -9
  193. nat/profiler/utils.py +4 -2
  194. nat/registry_handlers/local/local_handler.py +2 -2
  195. nat/registry_handlers/package_utils.py +1 -2
  196. nat/registry_handlers/pypi/pypi_handler.py +23 -26
  197. nat/registry_handlers/register.py +3 -4
  198. nat/registry_handlers/rest/rest_handler.py +12 -13
  199. nat/retriever/milvus/retriever.py +2 -2
  200. nat/retriever/nemo_retriever/retriever.py +1 -1
  201. nat/retriever/register.py +0 -1
  202. nat/runtime/loader.py +2 -2
  203. nat/runtime/runner.py +106 -8
  204. nat/runtime/session.py +69 -8
  205. nat/settings/global_settings.py +16 -5
  206. nat/tool/chat_completion.py +5 -2
  207. nat/tool/code_execution/local_sandbox/local_sandbox_server.py +3 -3
  208. nat/tool/datetime_tools.py +49 -9
  209. nat/tool/document_search.py +2 -2
  210. nat/tool/github_tools.py +450 -0
  211. nat/tool/memory_tools/get_memory_tool.py +1 -1
  212. nat/tool/nvidia_rag.py +1 -1
  213. nat/tool/register.py +2 -9
  214. nat/tool/retriever.py +3 -2
  215. nat/utils/callable_utils.py +70 -0
  216. nat/utils/data_models/schema_validator.py +3 -3
  217. nat/utils/decorators.py +210 -0
  218. nat/utils/exception_handlers/automatic_retries.py +104 -51
  219. nat/utils/exception_handlers/schemas.py +1 -1
  220. nat/utils/io/yaml_tools.py +2 -2
  221. nat/utils/log_levels.py +25 -0
  222. nat/utils/reactive/base/observable_base.py +2 -2
  223. nat/utils/reactive/base/observer_base.py +1 -1
  224. nat/utils/reactive/observable.py +2 -2
  225. nat/utils/reactive/observer.py +4 -4
  226. nat/utils/reactive/subscription.py +1 -1
  227. nat/utils/settings/global_settings.py +6 -8
  228. nat/utils/type_converter.py +4 -3
  229. nat/utils/type_utils.py +9 -5
  230. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/METADATA +42 -18
  231. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/RECORD +238 -196
  232. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/entry_points.txt +1 -0
  233. nat/cli/commands/info/list_mcp.py +0 -304
  234. nat/tool/github_tools/create_github_commit.py +0 -133
  235. nat/tool/github_tools/create_github_issue.py +0 -87
  236. nat/tool/github_tools/create_github_pr.py +0 -106
  237. nat/tool/github_tools/get_github_file.py +0 -106
  238. nat/tool/github_tools/get_github_issue.py +0 -166
  239. nat/tool/github_tools/get_github_pr.py +0 -256
  240. nat/tool/github_tools/update_github_issue.py +0 -100
  241. nat/tool/mcp/exceptions.py +0 -142
  242. nat/tool/mcp/mcp_client.py +0 -255
  243. nat/tool/mcp/mcp_tool.py +0 -96
  244. nat/utils/exception_handlers/mcp.py +0 -211
  245. /nat/{tool/github_tools → agent/prompt_optimizer}/__init__.py +0 -0
  246. /nat/{tool/mcp → authentication/credential_validator}/__init__.py +0 -0
  247. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/WHEEL +0 -0
  248. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
  249. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/licenses/LICENSE.md +0 -0
  250. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,210 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Deprecation utilities.
16
+
17
+ This module provides helpers to standardize deprecation signaling across the
18
+ codebase:
19
+
20
+ - ``issue_deprecation_warning``: Builds and emits a single deprecation message
21
+ per function using the standard logging pipeline.
22
+ - ``deprecated``: A decorator that wraps sync/async functions and generators to
23
+ log a one-time deprecation message upon first use. It supports optional
24
+ metadata, a planned removal version, a suggested replacement, and an
25
+ optional feature name label.
26
+
27
+ Messages are emitted via ``logging.getLogger(__name__).warning`` (not
28
+ ``warnings.warn``) so they appear in normal application logs and respect global
29
+ logging configuration. Each unique function logs at most once per process.
30
+ """
31
+
32
+ import functools
33
+ import inspect
34
+ import logging
35
+ from collections.abc import AsyncGenerator
36
+ from collections.abc import Callable
37
+ from collections.abc import Generator
38
+ from typing import Any
39
+ from typing import TypeVar
40
+ from typing import overload
41
+
42
+ logger = logging.getLogger(__name__)
43
+
44
+ _warning_issued = set()
45
+
46
+ # Type variables for overloads
47
+ F = TypeVar('F', bound=Callable[..., Any])
48
+
49
+
50
+ def issue_deprecation_warning(function_name: str,
51
+ removal_version: str | None = None,
52
+ replacement: str | None = None,
53
+ reason: str | None = None,
54
+ feature_name: str | None = None,
55
+ metadata: dict[str, Any] | None = None) -> None:
56
+ """
57
+ Log a deprecation warning message for the function.
58
+
59
+ A warning is emitted only once per function. When a ``metadata`` dict
60
+ is supplied, it is appended to the log entry to provide extra context
61
+ (e.g., version, author, feature flag).
62
+
63
+ Args:
64
+ function_name: The name of the deprecated function
65
+ removal_version: The version when the function will be removed
66
+ replacement: What to use instead of this function
67
+ reason: Why the function is being deprecated
68
+ feature_name: Optional name of the feature that is deprecated
69
+ metadata: Optional dictionary of metadata to log with the warning
70
+ """
71
+ if function_name not in _warning_issued:
72
+ # Build the deprecation message
73
+ if feature_name:
74
+ warning_message = f"{feature_name} is deprecated"
75
+ else:
76
+ warning_message = f"Function {function_name} is deprecated"
77
+
78
+ if removal_version:
79
+ warning_message += f" and will be removed in version {removal_version}"
80
+ else:
81
+ warning_message += " and will be removed in a future release"
82
+
83
+ warning_message += "."
84
+
85
+ if reason:
86
+ warning_message += f" Reason: {reason}."
87
+
88
+ if replacement:
89
+ warning_message += f" Use '{replacement}' instead."
90
+
91
+ if metadata:
92
+ warning_message += f" | Metadata: {metadata}"
93
+
94
+ # Issue warning and save function name to avoid duplicate warnings
95
+ logger.warning(warning_message)
96
+ _warning_issued.add(function_name)
97
+
98
+
99
+ # Overloads for different function types
100
+ @overload
101
+ def deprecated(func: F,
102
+ *,
103
+ removal_version: str | None = None,
104
+ replacement: str | None = None,
105
+ reason: str | None = None,
106
+ feature_name: str | None = None,
107
+ metadata: dict[str, Any] | None = None) -> F:
108
+ """Overload for direct decorator usage (when called without parentheses)."""
109
+ ...
110
+
111
+
112
+ @overload
113
+ def deprecated(*,
114
+ removal_version: str | None = None,
115
+ replacement: str | None = None,
116
+ reason: str | None = None,
117
+ feature_name: str | None = None,
118
+ metadata: dict[str, Any] | None = None) -> Callable[[F], F]:
119
+ """Overload for decorator factory usage (when called with parentheses)."""
120
+ ...
121
+
122
+
123
+ def deprecated(func: Any = None,
124
+ *,
125
+ removal_version: str | None = None,
126
+ replacement: str | None = None,
127
+ reason: str | None = None,
128
+ feature_name: str | None = None,
129
+ metadata: dict[str, Any] | None = None) -> Any:
130
+ """
131
+ Decorator that can wrap any type of function (sync, async, generator,
132
+ async generator) and logs a deprecation warning.
133
+
134
+ Args:
135
+ func: The function to be decorated.
136
+ removal_version: The version when the function will be removed
137
+ replacement: What to use instead of this function
138
+ reason: Why the function is being deprecated
139
+ feature_name: Optional name of the feature that is deprecated. If provided, the warning will be
140
+ prefixed with "The <feature_name> feature is deprecated".
141
+ metadata: Optional dictionary of metadata to log with the warning. This can include information
142
+ like version, author, etc. If provided, the metadata will be
143
+ logged alongside the deprecation warning.
144
+ """
145
+ function_name: str = f"{func.__module__}.{func.__qualname__}" if func else "<unknown_function>"
146
+
147
+ # If called as @deprecated(...) but not immediately passed a function
148
+ if func is None:
149
+
150
+ def decorator_wrapper(actual_func):
151
+ return deprecated(actual_func,
152
+ removal_version=removal_version,
153
+ replacement=replacement,
154
+ reason=reason,
155
+ feature_name=feature_name,
156
+ metadata=metadata)
157
+
158
+ return decorator_wrapper
159
+
160
+ # --- Validate metadata ---
161
+ if metadata is not None:
162
+ if not isinstance(metadata, dict):
163
+ raise TypeError("metadata must be a dict[str, Any].")
164
+ if any(not isinstance(k, str) for k in metadata.keys()):
165
+ raise TypeError("All metadata keys must be strings.")
166
+
167
+ # --- Now detect the function type and wrap accordingly ---
168
+ if inspect.isasyncgenfunction(func):
169
+ # ---------------------
170
+ # ASYNC GENERATOR
171
+ # ---------------------
172
+
173
+ @functools.wraps(func)
174
+ async def async_gen_wrapper(*args, **kwargs) -> AsyncGenerator[Any, Any]:
175
+ issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
176
+ async for item in func(*args, **kwargs):
177
+ yield item # yield the original item
178
+
179
+ return async_gen_wrapper
180
+
181
+ if inspect.iscoroutinefunction(func):
182
+ # ---------------------
183
+ # ASYNC FUNCTION
184
+ # ---------------------
185
+ @functools.wraps(func)
186
+ async def async_wrapper(*args, **kwargs) -> Any:
187
+ issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
188
+ result = await func(*args, **kwargs)
189
+ return result
190
+
191
+ return async_wrapper
192
+
193
+ if inspect.isgeneratorfunction(func):
194
+ # ---------------------
195
+ # SYNC GENERATOR
196
+ # ---------------------
197
+ @functools.wraps(func)
198
+ def sync_gen_wrapper(*args, **kwargs) -> Generator[Any, Any, Any]:
199
+ issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
200
+ yield from func(*args, **kwargs) # yield the original item
201
+
202
+ return sync_gen_wrapper
203
+
204
+ @functools.wraps(func)
205
+ def sync_wrapper(*args, **kwargs) -> Any:
206
+ issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
207
+ result = func(*args, **kwargs)
208
+ return result
209
+
210
+ return sync_wrapper
@@ -26,8 +26,6 @@ from collections.abc import Sequence
26
26
  from typing import Any
27
27
  from typing import TypeVar
28
28
 
29
- # pylint: disable=inconsistent-return-statements
30
-
31
29
  T = TypeVar("T")
32
30
  Exc = tuple[type[BaseException], ...] # exception classes
33
31
  CodePattern = int | str | range # for retry_codes argument
@@ -120,6 +118,7 @@ def _retry_decorator(
120
118
  retry_codes: Sequence[CodePattern] | None = None,
121
119
  retry_on_messages: Sequence[str] | None = None,
122
120
  deepcopy: bool = False,
121
+ instance_context_aware: bool = False,
123
122
  ) -> Callable[[Callable[..., T]], Callable[..., T]]:
124
123
  """
125
124
  Build a decorator that retries with exponential back-off *iff*:
@@ -132,69 +131,122 @@ def _retry_decorator(
132
131
  deepcopy:
133
132
  If True, each retry receives deep‑copied *args and **kwargs* to avoid
134
133
  mutating shared state between attempts.
134
+
135
+ instance_context_aware:
136
+ If True, the decorator will check for a retry context flag on the first
137
+ argument (assumed to be 'self'). If the flag is set, retries are skipped
138
+ to prevent retry storms in nested method calls.
135
139
  """
136
140
 
137
141
  def decorate(fn: Callable[..., T]) -> Callable[..., T]:
138
142
  use_deepcopy = deepcopy
143
+ use_context_aware = instance_context_aware
139
144
 
140
- async def _call_with_retry_async(*args, **kw) -> T:
141
- delay = base_delay
142
- for attempt in range(retries):
143
- call_args = copy.deepcopy(args) if use_deepcopy else args
144
- call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
145
+ class _RetryContext:
146
+ """Context manager for instance-level retry gating."""
147
+
148
+ __slots__ = ("_obj", "_enabled", "_active")
149
+
150
+ def __init__(self, args: tuple[Any, ...]):
151
+ self._obj = args[0] if (use_context_aware and args) else None
152
+ self._enabled = bool(self._obj)
153
+ self._active = False
154
+
155
+ def __enter__(self):
156
+ if not self._enabled:
157
+ return False
145
158
  try:
146
- return await fn(*call_args, **call_kwargs)
147
- except retry_on as exc:
148
- if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
149
- or attempt == retries - 1):
150
- raise
151
- await asyncio.sleep(delay)
152
- delay *= backoff
159
+ # If already in retry context, signal caller to skip retries
160
+ if getattr(self._obj, "_in_retry_context", False):
161
+ return True
162
+ object.__setattr__(self._obj, "_in_retry_context", True)
163
+ self._active = True
164
+ return False
165
+ except Exception:
166
+ # If we cannot set the attribute, behave as if context isn't enabled
167
+ self._enabled = False
168
+ return False
169
+
170
+ def __exit__(self, _exc_type, _exc, _tb):
171
+ if self._enabled and self._active:
172
+ try:
173
+ object.__setattr__(self._obj, "_in_retry_context", False)
174
+ except Exception:
175
+ pass
176
+
177
+ async def _call_with_retry_async(*args, **kw) -> T:
178
+ with _RetryContext(args) as already_in_context:
179
+ if already_in_context:
180
+ return await fn(*args, **kw)
181
+ delay = base_delay
182
+ for attempt in range(retries):
183
+ call_args = copy.deepcopy(args) if use_deepcopy else args
184
+ call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
185
+ try:
186
+ return await fn(*call_args, **call_kwargs)
187
+ except retry_on as exc:
188
+ if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
189
+ or attempt == retries - 1):
190
+ raise
191
+ await asyncio.sleep(delay)
192
+ delay *= backoff
153
193
 
154
194
  async def _agen_with_retry(*args, **kw):
155
- delay = base_delay
156
- for attempt in range(retries):
157
- call_args = copy.deepcopy(args) if use_deepcopy else args
158
- call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
159
- try:
160
- async for item in fn(*call_args, **call_kwargs):
195
+ with _RetryContext(args) as already_in_context:
196
+ if already_in_context:
197
+ async for item in fn(*args, **kw):
161
198
  yield item
162
199
  return
163
- except retry_on as exc:
164
- if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
165
- or attempt == retries - 1):
166
- raise
167
- await asyncio.sleep(delay)
168
- delay *= backoff
200
+ delay = base_delay
201
+ for attempt in range(retries):
202
+ call_args = copy.deepcopy(args) if use_deepcopy else args
203
+ call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
204
+ try:
205
+ async for item in fn(*call_args, **call_kwargs):
206
+ yield item
207
+ return
208
+ except retry_on as exc:
209
+ if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
210
+ or attempt == retries - 1):
211
+ raise
212
+ await asyncio.sleep(delay)
213
+ delay *= backoff
169
214
 
170
215
  def _gen_with_retry(*args, **kw) -> Iterable[Any]:
171
- delay = base_delay
172
- for attempt in range(retries):
173
- call_args = copy.deepcopy(args) if use_deepcopy else args
174
- call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
175
- try:
176
- yield from fn(*call_args, **call_kwargs)
216
+ with _RetryContext(args) as already_in_context:
217
+ if already_in_context:
218
+ yield from fn(*args, **kw)
177
219
  return
178
- except retry_on as exc:
179
- if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
180
- or attempt == retries - 1):
181
- raise
182
- time.sleep(delay)
183
- delay *= backoff
220
+ delay = base_delay
221
+ for attempt in range(retries):
222
+ call_args = copy.deepcopy(args) if use_deepcopy else args
223
+ call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
224
+ try:
225
+ yield from fn(*call_args, **call_kwargs)
226
+ return
227
+ except retry_on as exc:
228
+ if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
229
+ or attempt == retries - 1):
230
+ raise
231
+ time.sleep(delay)
232
+ delay *= backoff
184
233
 
185
234
  def _sync_with_retry(*args, **kw) -> T:
186
- delay = base_delay
187
- for attempt in range(retries):
188
- call_args = copy.deepcopy(args) if use_deepcopy else args
189
- call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
190
- try:
191
- return fn(*call_args, **call_kwargs)
192
- except retry_on as exc:
193
- if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
194
- or attempt == retries - 1):
195
- raise
196
- time.sleep(delay)
197
- delay *= backoff
235
+ with _RetryContext(args) as already_in_context:
236
+ if already_in_context:
237
+ return fn(*args, **kw)
238
+ delay = base_delay
239
+ for attempt in range(retries):
240
+ call_args = copy.deepcopy(args) if use_deepcopy else args
241
+ call_kwargs = copy.deepcopy(kw) if use_deepcopy else kw
242
+ try:
243
+ return fn(*call_args, **call_kwargs)
244
+ except retry_on as exc:
245
+ if (not _want_retry(exc, code_patterns=retry_codes, msg_substrings=retry_on_messages)
246
+ or attempt == retries - 1):
247
+ raise
248
+ time.sleep(delay)
249
+ delay *= backoff
198
250
 
199
251
  # Decide which wrapper to return
200
252
  if inspect.iscoroutinefunction(fn):
@@ -247,6 +299,7 @@ def patch_with_retry(
247
299
  retry_codes=retry_codes,
248
300
  retry_on_messages=retry_on_messages,
249
301
  deepcopy=deepcopy,
302
+ instance_context_aware=True, # Prevent retry storms
250
303
  )
251
304
 
252
305
  # Choose attribute source: the *class* to avoid triggering __getattr__
@@ -257,7 +310,7 @@ def patch_with_retry(
257
310
  descriptor = inspect.getattr_static(cls, name)
258
311
 
259
312
  # Skip dunders, privates and all descriptors we must not wrap
260
- if (name.startswith("_") or isinstance(descriptor, (property, staticmethod, classmethod))):
313
+ if (name.startswith("_") or isinstance(descriptor, property | staticmethod | classmethod)):
261
314
  continue
262
315
 
263
316
  original = descriptor.__func__ if isinstance(descriptor, types.MethodType) else descriptor
@@ -21,7 +21,7 @@ from pydantic import ValidationError
21
21
  logger = logging.getLogger(__name__)
22
22
 
23
23
 
24
- def schema_exception_handler(func, **kwargs): # pylint: disable=unused-argument
24
+ def schema_exception_handler(func, **kwargs):
25
25
  """
26
26
  A decorator that handles `ValidationError` exceptions for schema validation functions.
27
27
 
@@ -57,7 +57,7 @@ def yaml_load(config_path: StrPath) -> dict:
57
57
  """
58
58
 
59
59
  # Read YAML file
60
- with open(config_path, "r", encoding="utf-8") as stream:
60
+ with open(config_path, encoding="utf-8") as stream:
61
61
  config_str = stream.read()
62
62
 
63
63
  return yaml_loads(config_str)
@@ -85,7 +85,7 @@ def yaml_loads(config: str) -> dict:
85
85
  try:
86
86
  config_data = yaml.safe_load(stream)
87
87
  except yaml.YAMLError as e:
88
- logger.error("Error loading YAML: %s", interpolated_config_str, exc_info=True)
88
+ logger.error("Error loading YAML: %s", interpolated_config_str)
89
89
  raise ValueError(f"Error loading YAML: {e}") from e
90
90
 
91
91
  assert isinstance(config_data, dict)
@@ -0,0 +1,25 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import logging
17
+
18
+ # Define log level choices
19
+ LOG_LEVELS = {
20
+ 'DEBUG': logging.DEBUG,
21
+ 'INFO': logging.INFO,
22
+ 'WARNING': logging.WARNING,
23
+ 'ERROR': logging.ERROR,
24
+ 'CRITICAL': logging.CRITICAL
25
+ }
@@ -25,8 +25,8 @@ from nat.utils.reactive.subscription import Subscription
25
25
 
26
26
  # Covariant type param: An Observable producing type X can also produce
27
27
  # a subtype of X.
28
- _T_out_co = TypeVar("_T_out_co", covariant=True) # pylint: disable=invalid-name
29
- _T = TypeVar("_T") # pylint: disable=invalid-name
28
+ _T_out_co = TypeVar("_T_out_co", covariant=True)
29
+ _T = TypeVar("_T")
30
30
 
31
31
  OnNext = Callable[[_T], None]
32
32
  OnError = Callable[[Exception], None]
@@ -20,7 +20,7 @@ from typing import TypeVar
20
20
 
21
21
  # Contravariant type param: An Observer that can accept type X can also
22
22
  # accept any supertype of X.
23
- _T_in_contra = TypeVar("_T_in_contra", contravariant=True) # pylint: disable=invalid-name
23
+ _T_in_contra = TypeVar("_T_in_contra", contravariant=True)
24
24
 
25
25
 
26
26
  class ObserverBase(Generic[_T_in_contra], ABC):
@@ -24,8 +24,8 @@ from nat.utils.type_utils import override
24
24
 
25
25
  # Covariant type param: An Observable producing type X can also produce
26
26
  # a subtype of X.
27
- _T_out_co = TypeVar("_T_out_co", covariant=True) # pylint: disable=invalid-name
28
- _T = TypeVar("_T") # pylint: disable=invalid-name
27
+ _T_out_co = TypeVar("_T_out_co", covariant=True)
28
+ _T = TypeVar("_T")
29
29
 
30
30
  OnNext = Callable[[_T], None]
31
31
  OnError = Callable[[Exception], None]
@@ -23,8 +23,8 @@ logger = logging.getLogger(__name__)
23
23
 
24
24
  # Contravariant type param: An Observer that can accept type X can also
25
25
  # accept any supertype of X.
26
- _T_in_contra = TypeVar("_T_in_contra", contravariant=True) # pylint: disable=invalid-name
27
- _T = TypeVar("_T") # pylint: disable=invalid-name
26
+ _T_in_contra = TypeVar("_T_in_contra", contravariant=True)
27
+ _T = TypeVar("_T")
28
28
 
29
29
  OnNext = Callable[[_T], None]
30
30
  OnError = Callable[[Exception], None]
@@ -64,7 +64,7 @@ class Observer(ObserverBase[_T_in_contra]):
64
64
  try:
65
65
  self._on_error(exc)
66
66
  except Exception as e:
67
- logger.exception("Error in on_error callback: %s", e, exc_info=True)
67
+ logger.exception("Error in on_error callback: %s", e)
68
68
 
69
69
  def on_complete(self) -> None:
70
70
  if not self._stopped:
@@ -73,4 +73,4 @@ class Observer(ObserverBase[_T_in_contra]):
73
73
  try:
74
74
  self._on_complete()
75
75
  except Exception as e:
76
- logger.exception("Error in on_complete callback: %s", e, exc_info=True)
76
+ logger.exception("Error in on_complete callback: %s", e)
@@ -21,7 +21,7 @@ from typing import TypeVar
21
21
  if typing.TYPE_CHECKING:
22
22
  from nat.utils.reactive.base.subject_base import SubjectBase
23
23
 
24
- _T = TypeVar("_T") # pylint: disable=invalid-name
24
+ _T = TypeVar("_T")
25
25
 
26
26
  OnNext = Callable[[_T], None]
27
27
  OnError = Callable[[Exception], None]
@@ -47,7 +47,7 @@ def configure_registry_channel(config_type: RegistryHandlerBaseConfig, channel_n
47
47
  user_input = input(f"{human_prompt}: ")
48
48
  model_fields = {}
49
49
  model_fields[field] = (info.annotation, ...)
50
- DynamicFieldModel = create_model("DynamicFieldModel", **model_fields) # pylint: disable=C0103
50
+ DynamicFieldModel = create_model("DynamicFieldModel", **model_fields)
51
51
  dynamic_inputs = {field: user_input}
52
52
 
53
53
  try:
@@ -55,7 +55,7 @@ def configure_registry_channel(config_type: RegistryHandlerBaseConfig, channel_n
55
55
  channel_registry_pre[field] = getattr(validated_field_model, field)
56
56
  break
57
57
  except Exception as e:
58
- logger.exception(e, exc_info=True)
58
+ logger.exception(e)
59
59
  logger.warning("Invalid '%s' input, input must be of type %s.", field, info.annotation)
60
60
 
61
61
  validated_model = config_type(**channel_registry_pre)
@@ -76,10 +76,9 @@ def add_channel_interative(channel_type: str) -> None:
76
76
  registry = GlobalTypeRegistry.get()
77
77
 
78
78
  try:
79
- ChannelConfigType = registry.get_registered_channel_info_by_channel_type( # pylint: disable=C0103
80
- channel_type=channel_type).config_type
79
+ ChannelConfigType = registry.get_registered_channel_info_by_channel_type(channel_type=channel_type).config_type
81
80
  except Exception as e:
82
- logger.exception("Invalid channel type: %s", e, exc_info=True)
81
+ logger.exception("Invalid channel type: %s", e)
83
82
  return
84
83
 
85
84
  while (True):
@@ -92,8 +91,7 @@ def add_channel_interative(channel_type: str) -> None:
92
91
  settings.channels[channel_name] = {}
93
92
  break
94
93
 
95
- ChannelConfigType = registry.get_registered_channel_info_by_channel_type( # pylint: disable=C0103
96
- channel_type=channel_type).config_type
94
+ ChannelConfigType = registry.get_registered_channel_info_by_channel_type(channel_type=channel_type).config_type
97
95
 
98
96
  configure_registry_channel(config_type=ChannelConfigType, channel_name=channel_name)
99
97
 
@@ -181,7 +179,7 @@ def match_valid_channel(channel_name: str) -> None:
181
179
 
182
180
  channals_settings = settings.channels
183
181
  channel_settings = channals_settings.get(channel_name)
184
- ChannelConfigType = registry.get_registered_channel_info_by_channel_type( # pylint: disable=C0103
182
+ ChannelConfigType = registry.get_registered_channel_info_by_channel_type(
185
183
  channel_type=channel_settings.static_type()).config_type
186
184
 
187
185
  configure_registry_channel(config_type=ChannelConfigType, channel_name=channel_name)
@@ -90,7 +90,7 @@ class TypeConverter:
90
90
  decomposed = DecomposedType(to_type)
91
91
 
92
92
  # 1) If data is already correct type, return it
93
- if to_type is None or decomposed.is_instance((data, to_type)):
93
+ if to_type is None or decomposed.is_instance(data):
94
94
  return data
95
95
 
96
96
  root = decomposed.root
@@ -198,16 +198,17 @@ class TypeConverter:
198
198
  """
199
199
  visited = set()
200
200
  final = self._try_indirect_conversion(data, to_type, visited)
201
+ src_type = type(data)
201
202
  if final is not None:
202
203
  # Warn once if found a chain
203
- self._maybe_warn_indirect(type(data), to_type)
204
+ self._maybe_warn_indirect(src_type, to_type)
204
205
  return final
205
206
 
206
207
  # If no success, try parent's indirect
207
208
  if self._parent is not None:
208
209
  parent_final = self._parent._try_indirect_convert(data, to_type)
209
210
  if parent_final is not None:
210
- self._maybe_warn_indirect(type(data), to_type)
211
+ self._maybe_warn_indirect(src_type, to_type)
211
212
  return parent_final
212
213
 
213
214
  return None