nvidia-nat 1.3.0rc1__py3-none-any.whl → 1.4.0a20251008__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nat/agent/prompt_optimizer/register.py +2 -2
- nat/agent/react_agent/register.py +9 -1
- nat/agent/rewoo_agent/register.py +8 -1
- nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +31 -18
- nat/builder/context.py +22 -6
- nat/cli/commands/mcp/mcp.py +6 -6
- nat/cli/commands/workflow/templates/config.yml.j2 +14 -12
- nat/cli/commands/workflow/templates/register.py.j2 +2 -2
- nat/cli/commands/workflow/templates/workflow.py.j2 +35 -21
- nat/cli/commands/workflow/workflow_commands.py +54 -10
- nat/cli/main.py +3 -0
- nat/data_models/api_server.py +65 -57
- nat/data_models/span.py +41 -3
- nat/experimental/test_time_compute/functions/execute_score_select_function.py +1 -1
- nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +2 -2
- nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +5 -35
- nat/front_ends/fastapi/message_validator.py +3 -1
- nat/observability/exporter/span_exporter.py +34 -14
- nat/profiler/decorators/framework_wrapper.py +1 -1
- nat/profiler/forecasting/models/linear_model.py +1 -1
- nat/profiler/forecasting/models/random_forest_regressor.py +1 -1
- nat/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +1 -1
- nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +1 -1
- nat/runtime/runner.py +103 -6
- nat/runtime/session.py +26 -0
- nat/tool/memory_tools/get_memory_tool.py +1 -1
- nat/utils/decorators.py +210 -0
- {nvidia_nat-1.3.0rc1.dist-info → nvidia_nat-1.4.0a20251008.dist-info}/METADATA +1 -3
- {nvidia_nat-1.3.0rc1.dist-info → nvidia_nat-1.4.0a20251008.dist-info}/RECORD +34 -33
- {nvidia_nat-1.3.0rc1.dist-info → nvidia_nat-1.4.0a20251008.dist-info}/WHEEL +0 -0
- {nvidia_nat-1.3.0rc1.dist-info → nvidia_nat-1.4.0a20251008.dist-info}/entry_points.txt +0 -0
- {nvidia_nat-1.3.0rc1.dist-info → nvidia_nat-1.4.0a20251008.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
- {nvidia_nat-1.3.0rc1.dist-info → nvidia_nat-1.4.0a20251008.dist-info}/licenses/LICENSE.md +0 -0
- {nvidia_nat-1.3.0rc1.dist-info → nvidia_nat-1.4.0a20251008.dist-info}/top_level.txt +0 -0
nat/runtime/runner.py
CHANGED
|
@@ -15,11 +15,16 @@
|
|
|
15
15
|
|
|
16
16
|
import logging
|
|
17
17
|
import typing
|
|
18
|
+
import uuid
|
|
18
19
|
from enum import Enum
|
|
19
20
|
|
|
20
21
|
from nat.builder.context import Context
|
|
21
22
|
from nat.builder.context import ContextState
|
|
22
23
|
from nat.builder.function import Function
|
|
24
|
+
from nat.data_models.intermediate_step import IntermediateStepPayload
|
|
25
|
+
from nat.data_models.intermediate_step import IntermediateStepType
|
|
26
|
+
from nat.data_models.intermediate_step import StreamEventData
|
|
27
|
+
from nat.data_models.intermediate_step import TraceMetadata
|
|
23
28
|
from nat.data_models.invocation_node import InvocationNode
|
|
24
29
|
from nat.observability.exporter_manager import ExporterManager
|
|
25
30
|
from nat.utils.reactive.subject import Subject
|
|
@@ -130,17 +135,59 @@ class Runner:
|
|
|
130
135
|
if (self._state != RunnerState.INITIALIZED):
|
|
131
136
|
raise ValueError("Cannot run the workflow without entering the context")
|
|
132
137
|
|
|
138
|
+
token_run_id = None
|
|
139
|
+
token_trace_id = None
|
|
133
140
|
try:
|
|
134
141
|
self._state = RunnerState.RUNNING
|
|
135
142
|
|
|
136
143
|
if (not self._entry_fn.has_single_output):
|
|
137
144
|
raise ValueError("Workflow does not support single output")
|
|
138
145
|
|
|
146
|
+
# Establish workflow run and trace identifiers
|
|
147
|
+
existing_run_id = self._context_state.workflow_run_id.get()
|
|
148
|
+
existing_trace_id = self._context_state.workflow_trace_id.get()
|
|
149
|
+
|
|
150
|
+
workflow_run_id = existing_run_id or str(uuid.uuid4())
|
|
151
|
+
|
|
152
|
+
workflow_trace_id = existing_trace_id or uuid.uuid4().int
|
|
153
|
+
|
|
154
|
+
token_run_id = self._context_state.workflow_run_id.set(workflow_run_id)
|
|
155
|
+
token_trace_id = self._context_state.workflow_trace_id.set(workflow_trace_id)
|
|
156
|
+
|
|
157
|
+
# Prepare workflow-level intermediate step identifiers
|
|
158
|
+
workflow_step_uuid = str(uuid.uuid4())
|
|
159
|
+
workflow_name = getattr(self._entry_fn, 'instance_name', None) or "workflow"
|
|
160
|
+
|
|
139
161
|
async with self._exporter_manager.start(context_state=self._context_state):
|
|
140
|
-
#
|
|
141
|
-
|
|
162
|
+
# Emit WORKFLOW_START
|
|
163
|
+
start_metadata = TraceMetadata(
|
|
164
|
+
provided_metadata={
|
|
165
|
+
"workflow_run_id": workflow_run_id,
|
|
166
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
167
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
168
|
+
})
|
|
169
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
170
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
171
|
+
event_type=IntermediateStepType.WORKFLOW_START,
|
|
172
|
+
name=workflow_name,
|
|
173
|
+
metadata=start_metadata))
|
|
174
|
+
|
|
175
|
+
result = await self._entry_fn.ainvoke(self._input_message, to_type=to_type) # type: ignore
|
|
176
|
+
|
|
177
|
+
# Emit WORKFLOW_END with output
|
|
178
|
+
end_metadata = TraceMetadata(
|
|
179
|
+
provided_metadata={
|
|
180
|
+
"workflow_run_id": workflow_run_id,
|
|
181
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
182
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
183
|
+
})
|
|
184
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
185
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
186
|
+
event_type=IntermediateStepType.WORKFLOW_END,
|
|
187
|
+
name=workflow_name,
|
|
188
|
+
metadata=end_metadata,
|
|
189
|
+
data=StreamEventData(output=result)))
|
|
142
190
|
|
|
143
|
-
# Close the intermediate stream
|
|
144
191
|
event_stream = self._context_state.event_stream.get()
|
|
145
192
|
if event_stream:
|
|
146
193
|
event_stream.on_complete()
|
|
@@ -155,25 +202,71 @@ class Runner:
|
|
|
155
202
|
if event_stream:
|
|
156
203
|
event_stream.on_complete()
|
|
157
204
|
self._state = RunnerState.FAILED
|
|
158
|
-
|
|
159
205
|
raise
|
|
206
|
+
finally:
|
|
207
|
+
if token_run_id is not None:
|
|
208
|
+
self._context_state.workflow_run_id.reset(token_run_id)
|
|
209
|
+
if token_trace_id is not None:
|
|
210
|
+
self._context_state.workflow_trace_id.reset(token_trace_id)
|
|
160
211
|
|
|
161
212
|
async def result_stream(self, to_type: type | None = None):
|
|
162
213
|
|
|
163
214
|
if (self._state != RunnerState.INITIALIZED):
|
|
164
215
|
raise ValueError("Cannot run the workflow without entering the context")
|
|
165
216
|
|
|
217
|
+
token_run_id = None
|
|
218
|
+
token_trace_id = None
|
|
166
219
|
try:
|
|
167
220
|
self._state = RunnerState.RUNNING
|
|
168
221
|
|
|
169
222
|
if (not self._entry_fn.has_streaming_output):
|
|
170
223
|
raise ValueError("Workflow does not support streaming output")
|
|
171
224
|
|
|
225
|
+
# Establish workflow run and trace identifiers
|
|
226
|
+
existing_run_id = self._context_state.workflow_run_id.get()
|
|
227
|
+
existing_trace_id = self._context_state.workflow_trace_id.get()
|
|
228
|
+
|
|
229
|
+
workflow_run_id = existing_run_id or str(uuid.uuid4())
|
|
230
|
+
|
|
231
|
+
workflow_trace_id = existing_trace_id or uuid.uuid4().int
|
|
232
|
+
|
|
233
|
+
token_run_id = self._context_state.workflow_run_id.set(workflow_run_id)
|
|
234
|
+
token_trace_id = self._context_state.workflow_trace_id.set(workflow_trace_id)
|
|
235
|
+
|
|
236
|
+
# Prepare workflow-level intermediate step identifiers
|
|
237
|
+
workflow_step_uuid = str(uuid.uuid4())
|
|
238
|
+
workflow_name = getattr(self._entry_fn, 'instance_name', None) or "workflow"
|
|
239
|
+
|
|
172
240
|
# Run the workflow
|
|
173
241
|
async with self._exporter_manager.start(context_state=self._context_state):
|
|
174
|
-
|
|
242
|
+
# Emit WORKFLOW_START
|
|
243
|
+
start_metadata = TraceMetadata(
|
|
244
|
+
provided_metadata={
|
|
245
|
+
"workflow_run_id": workflow_run_id,
|
|
246
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
247
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
248
|
+
})
|
|
249
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
250
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
251
|
+
event_type=IntermediateStepType.WORKFLOW_START,
|
|
252
|
+
name=workflow_name,
|
|
253
|
+
metadata=start_metadata))
|
|
254
|
+
|
|
255
|
+
async for m in self._entry_fn.astream(self._input_message, to_type=to_type): # type: ignore
|
|
175
256
|
yield m
|
|
176
257
|
|
|
258
|
+
# Emit WORKFLOW_END
|
|
259
|
+
end_metadata = TraceMetadata(
|
|
260
|
+
provided_metadata={
|
|
261
|
+
"workflow_run_id": workflow_run_id,
|
|
262
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
263
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
264
|
+
})
|
|
265
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
266
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
267
|
+
event_type=IntermediateStepType.WORKFLOW_END,
|
|
268
|
+
name=workflow_name,
|
|
269
|
+
metadata=end_metadata))
|
|
177
270
|
self._state = RunnerState.COMPLETED
|
|
178
271
|
|
|
179
272
|
# Close the intermediate stream
|
|
@@ -187,8 +280,12 @@ class Runner:
|
|
|
187
280
|
if event_stream:
|
|
188
281
|
event_stream.on_complete()
|
|
189
282
|
self._state = RunnerState.FAILED
|
|
190
|
-
|
|
191
283
|
raise
|
|
284
|
+
finally:
|
|
285
|
+
if token_run_id is not None:
|
|
286
|
+
self._context_state.workflow_run_id.reset(token_run_id)
|
|
287
|
+
if token_trace_id is not None:
|
|
288
|
+
self._context_state.workflow_trace_id.reset(token_trace_id)
|
|
192
289
|
|
|
193
290
|
|
|
194
291
|
# Compatibility aliases with previous releases
|
nat/runtime/session.py
CHANGED
|
@@ -16,6 +16,7 @@
|
|
|
16
16
|
import asyncio
|
|
17
17
|
import contextvars
|
|
18
18
|
import typing
|
|
19
|
+
import uuid
|
|
19
20
|
from collections.abc import Awaitable
|
|
20
21
|
from collections.abc import Callable
|
|
21
22
|
from contextlib import asynccontextmanager
|
|
@@ -161,6 +162,31 @@ class SessionManager:
|
|
|
161
162
|
if request.headers.get("user-message-id"):
|
|
162
163
|
self._context_state.user_message_id.set(request.headers["user-message-id"])
|
|
163
164
|
|
|
165
|
+
# W3C Trace Context header: traceparent: 00-<trace-id>-<span-id>-<flags>
|
|
166
|
+
traceparent = request.headers.get("traceparent")
|
|
167
|
+
if traceparent:
|
|
168
|
+
try:
|
|
169
|
+
parts = traceparent.split("-")
|
|
170
|
+
if len(parts) >= 4:
|
|
171
|
+
trace_id_hex = parts[1]
|
|
172
|
+
if len(trace_id_hex) == 32:
|
|
173
|
+
trace_id_int = uuid.UUID(trace_id_hex).int
|
|
174
|
+
self._context_state.workflow_trace_id.set(trace_id_int)
|
|
175
|
+
except Exception:
|
|
176
|
+
pass
|
|
177
|
+
|
|
178
|
+
if not self._context_state.workflow_trace_id.get():
|
|
179
|
+
workflow_trace_id = request.headers.get("workflow-trace-id")
|
|
180
|
+
if workflow_trace_id:
|
|
181
|
+
try:
|
|
182
|
+
self._context_state.workflow_trace_id.set(uuid.UUID(workflow_trace_id).int)
|
|
183
|
+
except Exception:
|
|
184
|
+
pass
|
|
185
|
+
|
|
186
|
+
workflow_run_id = request.headers.get("workflow-run-id")
|
|
187
|
+
if workflow_run_id:
|
|
188
|
+
self._context_state.workflow_run_id.set(workflow_run_id)
|
|
189
|
+
|
|
164
190
|
def set_metadata_from_websocket(self,
|
|
165
191
|
websocket: WebSocket,
|
|
166
192
|
user_message_id: str | None,
|
|
@@ -67,6 +67,6 @@ async def get_memory_tool(config: GetToolConfig, builder: Builder):
|
|
|
67
67
|
|
|
68
68
|
except Exception as e:
|
|
69
69
|
|
|
70
|
-
raise ToolException(f"Error
|
|
70
|
+
raise ToolException(f"Error retrieving memory: {e}") from e
|
|
71
71
|
|
|
72
72
|
yield FunctionInfo.from_fn(_arun, description=config.description)
|
nat/utils/decorators.py
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Deprecation utilities.
|
|
16
|
+
|
|
17
|
+
This module provides helpers to standardize deprecation signaling across the
|
|
18
|
+
codebase:
|
|
19
|
+
|
|
20
|
+
- ``issue_deprecation_warning``: Builds and emits a single deprecation message
|
|
21
|
+
per function using the standard logging pipeline.
|
|
22
|
+
- ``deprecated``: A decorator that wraps sync/async functions and generators to
|
|
23
|
+
log a one-time deprecation message upon first use. It supports optional
|
|
24
|
+
metadata, a planned removal version, a suggested replacement, and an
|
|
25
|
+
optional feature name label.
|
|
26
|
+
|
|
27
|
+
Messages are emitted via ``logging.getLogger(__name__).warning`` (not
|
|
28
|
+
``warnings.warn``) so they appear in normal application logs and respect global
|
|
29
|
+
logging configuration. Each unique function logs at most once per process.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
import functools
|
|
33
|
+
import inspect
|
|
34
|
+
import logging
|
|
35
|
+
from collections.abc import AsyncGenerator
|
|
36
|
+
from collections.abc import Callable
|
|
37
|
+
from collections.abc import Generator
|
|
38
|
+
from typing import Any
|
|
39
|
+
from typing import TypeVar
|
|
40
|
+
from typing import overload
|
|
41
|
+
|
|
42
|
+
logger = logging.getLogger(__name__)
|
|
43
|
+
|
|
44
|
+
_warning_issued = set()
|
|
45
|
+
|
|
46
|
+
# Type variables for overloads
|
|
47
|
+
F = TypeVar('F', bound=Callable[..., Any])
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def issue_deprecation_warning(function_name: str,
|
|
51
|
+
removal_version: str | None = None,
|
|
52
|
+
replacement: str | None = None,
|
|
53
|
+
reason: str | None = None,
|
|
54
|
+
feature_name: str | None = None,
|
|
55
|
+
metadata: dict[str, Any] | None = None) -> None:
|
|
56
|
+
"""
|
|
57
|
+
Log a deprecation warning message for the function.
|
|
58
|
+
|
|
59
|
+
A warning is emitted only once per function. When a ``metadata`` dict
|
|
60
|
+
is supplied, it is appended to the log entry to provide extra context
|
|
61
|
+
(e.g., version, author, feature flag).
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
function_name: The name of the deprecated function
|
|
65
|
+
removal_version: The version when the function will be removed
|
|
66
|
+
replacement: What to use instead of this function
|
|
67
|
+
reason: Why the function is being deprecated
|
|
68
|
+
feature_name: Optional name of the feature that is deprecated
|
|
69
|
+
metadata: Optional dictionary of metadata to log with the warning
|
|
70
|
+
"""
|
|
71
|
+
if function_name not in _warning_issued:
|
|
72
|
+
# Build the deprecation message
|
|
73
|
+
if feature_name:
|
|
74
|
+
warning_message = f"{feature_name} is deprecated"
|
|
75
|
+
else:
|
|
76
|
+
warning_message = f"Function {function_name} is deprecated"
|
|
77
|
+
|
|
78
|
+
if removal_version:
|
|
79
|
+
warning_message += f" and will be removed in version {removal_version}"
|
|
80
|
+
else:
|
|
81
|
+
warning_message += " and will be removed in a future release"
|
|
82
|
+
|
|
83
|
+
warning_message += "."
|
|
84
|
+
|
|
85
|
+
if reason:
|
|
86
|
+
warning_message += f" Reason: {reason}."
|
|
87
|
+
|
|
88
|
+
if replacement:
|
|
89
|
+
warning_message += f" Use '{replacement}' instead."
|
|
90
|
+
|
|
91
|
+
if metadata:
|
|
92
|
+
warning_message += f" | Metadata: {metadata}"
|
|
93
|
+
|
|
94
|
+
# Issue warning and save function name to avoid duplicate warnings
|
|
95
|
+
logger.warning(warning_message)
|
|
96
|
+
_warning_issued.add(function_name)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
# Overloads for different function types
|
|
100
|
+
@overload
|
|
101
|
+
def deprecated(func: F,
|
|
102
|
+
*,
|
|
103
|
+
removal_version: str | None = None,
|
|
104
|
+
replacement: str | None = None,
|
|
105
|
+
reason: str | None = None,
|
|
106
|
+
feature_name: str | None = None,
|
|
107
|
+
metadata: dict[str, Any] | None = None) -> F:
|
|
108
|
+
"""Overload for direct decorator usage (when called without parentheses)."""
|
|
109
|
+
...
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@overload
|
|
113
|
+
def deprecated(*,
|
|
114
|
+
removal_version: str | None = None,
|
|
115
|
+
replacement: str | None = None,
|
|
116
|
+
reason: str | None = None,
|
|
117
|
+
feature_name: str | None = None,
|
|
118
|
+
metadata: dict[str, Any] | None = None) -> Callable[[F], F]:
|
|
119
|
+
"""Overload for decorator factory usage (when called with parentheses)."""
|
|
120
|
+
...
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def deprecated(func: Any = None,
|
|
124
|
+
*,
|
|
125
|
+
removal_version: str | None = None,
|
|
126
|
+
replacement: str | None = None,
|
|
127
|
+
reason: str | None = None,
|
|
128
|
+
feature_name: str | None = None,
|
|
129
|
+
metadata: dict[str, Any] | None = None) -> Any:
|
|
130
|
+
"""
|
|
131
|
+
Decorator that can wrap any type of function (sync, async, generator,
|
|
132
|
+
async generator) and logs a deprecation warning.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
func: The function to be decorated.
|
|
136
|
+
removal_version: The version when the function will be removed
|
|
137
|
+
replacement: What to use instead of this function
|
|
138
|
+
reason: Why the function is being deprecated
|
|
139
|
+
feature_name: Optional name of the feature that is deprecated. If provided, the warning will be
|
|
140
|
+
prefixed with "The <feature_name> feature is deprecated".
|
|
141
|
+
metadata: Optional dictionary of metadata to log with the warning. This can include information
|
|
142
|
+
like version, author, etc. If provided, the metadata will be
|
|
143
|
+
logged alongside the deprecation warning.
|
|
144
|
+
"""
|
|
145
|
+
function_name: str = f"{func.__module__}.{func.__qualname__}" if func else "<unknown_function>"
|
|
146
|
+
|
|
147
|
+
# If called as @deprecated(...) but not immediately passed a function
|
|
148
|
+
if func is None:
|
|
149
|
+
|
|
150
|
+
def decorator_wrapper(actual_func):
|
|
151
|
+
return deprecated(actual_func,
|
|
152
|
+
removal_version=removal_version,
|
|
153
|
+
replacement=replacement,
|
|
154
|
+
reason=reason,
|
|
155
|
+
feature_name=feature_name,
|
|
156
|
+
metadata=metadata)
|
|
157
|
+
|
|
158
|
+
return decorator_wrapper
|
|
159
|
+
|
|
160
|
+
# --- Validate metadata ---
|
|
161
|
+
if metadata is not None:
|
|
162
|
+
if not isinstance(metadata, dict):
|
|
163
|
+
raise TypeError("metadata must be a dict[str, Any].")
|
|
164
|
+
if any(not isinstance(k, str) for k in metadata.keys()):
|
|
165
|
+
raise TypeError("All metadata keys must be strings.")
|
|
166
|
+
|
|
167
|
+
# --- Now detect the function type and wrap accordingly ---
|
|
168
|
+
if inspect.isasyncgenfunction(func):
|
|
169
|
+
# ---------------------
|
|
170
|
+
# ASYNC GENERATOR
|
|
171
|
+
# ---------------------
|
|
172
|
+
|
|
173
|
+
@functools.wraps(func)
|
|
174
|
+
async def async_gen_wrapper(*args, **kwargs) -> AsyncGenerator[Any, Any]:
|
|
175
|
+
issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
|
|
176
|
+
async for item in func(*args, **kwargs):
|
|
177
|
+
yield item # yield the original item
|
|
178
|
+
|
|
179
|
+
return async_gen_wrapper
|
|
180
|
+
|
|
181
|
+
if inspect.iscoroutinefunction(func):
|
|
182
|
+
# ---------------------
|
|
183
|
+
# ASYNC FUNCTION
|
|
184
|
+
# ---------------------
|
|
185
|
+
@functools.wraps(func)
|
|
186
|
+
async def async_wrapper(*args, **kwargs) -> Any:
|
|
187
|
+
issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
|
|
188
|
+
result = await func(*args, **kwargs)
|
|
189
|
+
return result
|
|
190
|
+
|
|
191
|
+
return async_wrapper
|
|
192
|
+
|
|
193
|
+
if inspect.isgeneratorfunction(func):
|
|
194
|
+
# ---------------------
|
|
195
|
+
# SYNC GENERATOR
|
|
196
|
+
# ---------------------
|
|
197
|
+
@functools.wraps(func)
|
|
198
|
+
def sync_gen_wrapper(*args, **kwargs) -> Generator[Any, Any, Any]:
|
|
199
|
+
issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
|
|
200
|
+
yield from func(*args, **kwargs) # yield the original item
|
|
201
|
+
|
|
202
|
+
return sync_gen_wrapper
|
|
203
|
+
|
|
204
|
+
@functools.wraps(func)
|
|
205
|
+
def sync_wrapper(*args, **kwargs) -> Any:
|
|
206
|
+
issue_deprecation_warning(function_name, removal_version, replacement, reason, feature_name, metadata)
|
|
207
|
+
result = func(*args, **kwargs)
|
|
208
|
+
return result
|
|
209
|
+
|
|
210
|
+
return sync_wrapper
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nvidia-nat
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.0a20251008
|
|
4
4
|
Summary: NVIDIA NeMo Agent toolkit
|
|
5
5
|
Author: NVIDIA Corporation
|
|
6
6
|
Maintainer: NVIDIA Corporation
|
|
@@ -296,12 +296,10 @@ Requires-Dist: nat_alert_triage_agent; extra == "examples"
|
|
|
296
296
|
Requires-Dist: nat_automated_description_generation; extra == "examples"
|
|
297
297
|
Requires-Dist: nat_email_phishing_analyzer; extra == "examples"
|
|
298
298
|
Requires-Dist: nat_multi_frameworks; extra == "examples"
|
|
299
|
-
Requires-Dist: nat_first_search_agent; extra == "examples"
|
|
300
299
|
Requires-Dist: nat_plot_charts; extra == "examples"
|
|
301
300
|
Requires-Dist: nat_por_to_jiratickets; extra == "examples"
|
|
302
301
|
Requires-Dist: nat_profiler_agent; extra == "examples"
|
|
303
302
|
Requires-Dist: nat_redact_pii; extra == "examples"
|
|
304
|
-
Requires-Dist: nat_retail_sales_agent; extra == "examples"
|
|
305
303
|
Requires-Dist: nat_router_agent; extra == "examples"
|
|
306
304
|
Requires-Dist: nat_semantic_kernel_demo; extra == "examples"
|
|
307
305
|
Requires-Dist: nat_sequential_executor; extra == "examples"
|