nvidia-nat 1.3.0.dev2__py3-none-any.whl → 1.3.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiq/__init__.py +2 -2
- nat/agent/base.py +24 -15
- nat/agent/dual_node.py +9 -4
- nat/agent/prompt_optimizer/prompt.py +68 -0
- nat/agent/prompt_optimizer/register.py +149 -0
- nat/agent/react_agent/agent.py +79 -47
- nat/agent/react_agent/register.py +50 -22
- nat/agent/reasoning_agent/reasoning_agent.py +11 -9
- nat/agent/register.py +1 -1
- nat/agent/rewoo_agent/agent.py +326 -148
- nat/agent/rewoo_agent/prompt.py +19 -22
- nat/agent/rewoo_agent/register.py +54 -27
- nat/agent/tool_calling_agent/agent.py +84 -28
- nat/agent/tool_calling_agent/register.py +51 -28
- nat/authentication/api_key/api_key_auth_provider.py +2 -2
- nat/authentication/credential_validator/bearer_token_validator.py +557 -0
- nat/authentication/http_basic_auth/http_basic_auth_provider.py +1 -1
- nat/authentication/interfaces.py +5 -2
- nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +69 -36
- nat/authentication/oauth2/oauth2_resource_server_config.py +124 -0
- nat/authentication/register.py +0 -1
- nat/builder/builder.py +56 -24
- nat/builder/component_utils.py +9 -5
- nat/builder/context.py +68 -17
- nat/builder/eval_builder.py +16 -11
- nat/builder/framework_enum.py +1 -0
- nat/builder/front_end.py +1 -1
- nat/builder/function.py +378 -8
- nat/builder/function_base.py +3 -3
- nat/builder/function_info.py +6 -8
- nat/builder/user_interaction_manager.py +2 -2
- nat/builder/workflow.py +13 -1
- nat/builder/workflow_builder.py +281 -76
- nat/cli/cli_utils/config_override.py +2 -2
- nat/cli/commands/evaluate.py +1 -1
- nat/cli/commands/info/info.py +16 -6
- nat/cli/commands/info/list_channels.py +1 -1
- nat/cli/commands/info/list_components.py +7 -8
- nat/cli/commands/mcp/__init__.py +14 -0
- nat/cli/commands/mcp/mcp.py +986 -0
- nat/cli/commands/object_store/__init__.py +14 -0
- nat/cli/commands/object_store/object_store.py +227 -0
- nat/cli/commands/optimize.py +90 -0
- nat/cli/commands/registry/publish.py +2 -2
- nat/cli/commands/registry/pull.py +2 -2
- nat/cli/commands/registry/remove.py +2 -2
- nat/cli/commands/registry/search.py +15 -17
- nat/cli/commands/start.py +16 -5
- nat/cli/commands/uninstall.py +1 -1
- nat/cli/commands/workflow/templates/config.yml.j2 +14 -13
- nat/cli/commands/workflow/templates/pyproject.toml.j2 +4 -1
- nat/cli/commands/workflow/templates/register.py.j2 +2 -3
- nat/cli/commands/workflow/templates/workflow.py.j2 +35 -21
- nat/cli/commands/workflow/workflow_commands.py +62 -22
- nat/cli/entrypoint.py +8 -10
- nat/cli/main.py +3 -0
- nat/cli/register_workflow.py +38 -4
- nat/cli/type_registry.py +75 -6
- nat/control_flow/__init__.py +0 -0
- nat/control_flow/register.py +20 -0
- nat/control_flow/router_agent/__init__.py +0 -0
- nat/control_flow/router_agent/agent.py +329 -0
- nat/control_flow/router_agent/prompt.py +48 -0
- nat/control_flow/router_agent/register.py +91 -0
- nat/control_flow/sequential_executor.py +166 -0
- nat/data_models/agent.py +34 -0
- nat/data_models/api_server.py +74 -66
- nat/data_models/authentication.py +23 -9
- nat/data_models/common.py +1 -1
- nat/data_models/component.py +2 -0
- nat/data_models/component_ref.py +11 -0
- nat/data_models/config.py +41 -17
- nat/data_models/dataset_handler.py +1 -1
- nat/data_models/discovery_metadata.py +4 -4
- nat/data_models/evaluate.py +4 -1
- nat/data_models/function.py +34 -0
- nat/data_models/function_dependencies.py +14 -6
- nat/data_models/gated_field_mixin.py +242 -0
- nat/data_models/intermediate_step.py +3 -3
- nat/data_models/optimizable.py +119 -0
- nat/data_models/optimizer.py +149 -0
- nat/data_models/span.py +41 -3
- nat/data_models/swe_bench_model.py +1 -1
- nat/data_models/temperature_mixin.py +44 -0
- nat/data_models/thinking_mixin.py +86 -0
- nat/data_models/top_p_mixin.py +44 -0
- nat/embedder/nim_embedder.py +1 -1
- nat/embedder/openai_embedder.py +1 -1
- nat/embedder/register.py +0 -1
- nat/eval/config.py +3 -1
- nat/eval/dataset_handler/dataset_handler.py +71 -7
- nat/eval/evaluate.py +86 -31
- nat/eval/evaluator/base_evaluator.py +1 -1
- nat/eval/evaluator/evaluator_model.py +13 -0
- nat/eval/intermediate_step_adapter.py +1 -1
- nat/eval/rag_evaluator/evaluate.py +2 -2
- nat/eval/rag_evaluator/register.py +3 -3
- nat/eval/register.py +4 -1
- nat/eval/remote_workflow.py +3 -3
- nat/eval/runtime_evaluator/__init__.py +14 -0
- nat/eval/runtime_evaluator/evaluate.py +123 -0
- nat/eval/runtime_evaluator/register.py +100 -0
- nat/eval/swe_bench_evaluator/evaluate.py +6 -6
- nat/eval/trajectory_evaluator/evaluate.py +1 -1
- nat/eval/trajectory_evaluator/register.py +1 -1
- nat/eval/tunable_rag_evaluator/evaluate.py +4 -7
- nat/eval/utils/eval_trace_ctx.py +89 -0
- nat/eval/utils/weave_eval.py +18 -9
- nat/experimental/decorators/experimental_warning_decorator.py +27 -7
- nat/experimental/test_time_compute/functions/plan_select_execute_function.py +7 -3
- nat/experimental/test_time_compute/functions/ttc_tool_orchestration_function.py +3 -3
- nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +1 -1
- nat/experimental/test_time_compute/models/strategy_base.py +5 -4
- nat/experimental/test_time_compute/register.py +0 -1
- nat/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +1 -3
- nat/front_ends/console/authentication_flow_handler.py +82 -30
- nat/front_ends/console/console_front_end_plugin.py +8 -5
- nat/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +52 -17
- nat/front_ends/fastapi/dask_client_mixin.py +65 -0
- nat/front_ends/fastapi/fastapi_front_end_config.py +36 -5
- nat/front_ends/fastapi/fastapi_front_end_controller.py +4 -4
- nat/front_ends/fastapi/fastapi_front_end_plugin.py +135 -4
- nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +452 -282
- nat/front_ends/fastapi/job_store.py +518 -99
- nat/front_ends/fastapi/main.py +11 -19
- nat/front_ends/fastapi/message_handler.py +13 -14
- nat/front_ends/fastapi/message_validator.py +19 -19
- nat/front_ends/fastapi/response_helpers.py +4 -4
- nat/front_ends/fastapi/step_adaptor.py +2 -2
- nat/front_ends/fastapi/utils.py +57 -0
- nat/front_ends/mcp/introspection_token_verifier.py +73 -0
- nat/front_ends/mcp/mcp_front_end_config.py +10 -1
- nat/front_ends/mcp/mcp_front_end_plugin.py +45 -13
- nat/front_ends/mcp/mcp_front_end_plugin_worker.py +116 -8
- nat/front_ends/mcp/tool_converter.py +44 -14
- nat/front_ends/register.py +0 -1
- nat/front_ends/simple_base/simple_front_end_plugin_base.py +3 -1
- nat/llm/aws_bedrock_llm.py +24 -12
- nat/llm/azure_openai_llm.py +13 -6
- nat/llm/litellm_llm.py +69 -0
- nat/llm/nim_llm.py +20 -8
- nat/llm/openai_llm.py +14 -6
- nat/llm/register.py +4 -1
- nat/llm/utils/env_config_value.py +2 -3
- nat/llm/utils/thinking.py +215 -0
- nat/meta/pypi.md +9 -9
- nat/object_store/register.py +0 -1
- nat/observability/exporter/base_exporter.py +3 -3
- nat/observability/exporter/file_exporter.py +1 -1
- nat/observability/exporter/processing_exporter.py +309 -81
- nat/observability/exporter/span_exporter.py +35 -15
- nat/observability/exporter_manager.py +7 -7
- nat/observability/mixin/file_mixin.py +7 -7
- nat/observability/mixin/redaction_config_mixin.py +42 -0
- nat/observability/mixin/tagging_config_mixin.py +62 -0
- nat/observability/mixin/type_introspection_mixin.py +420 -107
- nat/observability/processor/batching_processor.py +5 -7
- nat/observability/processor/falsy_batch_filter_processor.py +55 -0
- nat/observability/processor/processor.py +3 -0
- nat/observability/processor/processor_factory.py +70 -0
- nat/observability/processor/redaction/__init__.py +24 -0
- nat/observability/processor/redaction/contextual_redaction_processor.py +125 -0
- nat/observability/processor/redaction/contextual_span_redaction_processor.py +66 -0
- nat/observability/processor/redaction/redaction_processor.py +177 -0
- nat/observability/processor/redaction/span_header_redaction_processor.py +92 -0
- nat/observability/processor/span_tagging_processor.py +68 -0
- nat/observability/register.py +6 -4
- nat/profiler/calc/calc_runner.py +3 -4
- nat/profiler/callbacks/agno_callback_handler.py +1 -1
- nat/profiler/callbacks/langchain_callback_handler.py +6 -6
- nat/profiler/callbacks/llama_index_callback_handler.py +3 -3
- nat/profiler/callbacks/semantic_kernel_callback_handler.py +3 -3
- nat/profiler/data_frame_row.py +1 -1
- nat/profiler/decorators/framework_wrapper.py +62 -13
- nat/profiler/decorators/function_tracking.py +160 -3
- nat/profiler/forecasting/models/forecasting_base_model.py +3 -1
- nat/profiler/forecasting/models/linear_model.py +1 -1
- nat/profiler/forecasting/models/random_forest_regressor.py +1 -1
- nat/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +1 -1
- nat/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +1 -1
- nat/profiler/inference_optimization/data_models.py +3 -3
- nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +8 -9
- nat/profiler/inference_optimization/token_uniqueness.py +1 -1
- nat/profiler/parameter_optimization/__init__.py +0 -0
- nat/profiler/parameter_optimization/optimizable_utils.py +93 -0
- nat/profiler/parameter_optimization/optimizer_runtime.py +67 -0
- nat/profiler/parameter_optimization/parameter_optimizer.py +153 -0
- nat/profiler/parameter_optimization/parameter_selection.py +107 -0
- nat/profiler/parameter_optimization/pareto_visualizer.py +380 -0
- nat/profiler/parameter_optimization/prompt_optimizer.py +384 -0
- nat/profiler/parameter_optimization/update_helpers.py +66 -0
- nat/profiler/profile_runner.py +14 -9
- nat/profiler/utils.py +4 -2
- nat/registry_handlers/local/local_handler.py +2 -2
- nat/registry_handlers/package_utils.py +1 -2
- nat/registry_handlers/pypi/pypi_handler.py +23 -26
- nat/registry_handlers/register.py +3 -4
- nat/registry_handlers/rest/rest_handler.py +12 -13
- nat/retriever/milvus/retriever.py +2 -2
- nat/retriever/nemo_retriever/retriever.py +1 -1
- nat/retriever/register.py +0 -1
- nat/runtime/loader.py +2 -2
- nat/runtime/runner.py +106 -8
- nat/runtime/session.py +69 -8
- nat/settings/global_settings.py +16 -5
- nat/tool/chat_completion.py +5 -2
- nat/tool/code_execution/local_sandbox/local_sandbox_server.py +3 -3
- nat/tool/datetime_tools.py +49 -9
- nat/tool/document_search.py +2 -2
- nat/tool/github_tools.py +450 -0
- nat/tool/memory_tools/get_memory_tool.py +1 -1
- nat/tool/nvidia_rag.py +1 -1
- nat/tool/register.py +2 -9
- nat/tool/retriever.py +3 -2
- nat/utils/callable_utils.py +70 -0
- nat/utils/data_models/schema_validator.py +3 -3
- nat/utils/decorators.py +210 -0
- nat/utils/exception_handlers/automatic_retries.py +104 -51
- nat/utils/exception_handlers/schemas.py +1 -1
- nat/utils/io/yaml_tools.py +2 -2
- nat/utils/log_levels.py +25 -0
- nat/utils/reactive/base/observable_base.py +2 -2
- nat/utils/reactive/base/observer_base.py +1 -1
- nat/utils/reactive/observable.py +2 -2
- nat/utils/reactive/observer.py +4 -4
- nat/utils/reactive/subscription.py +1 -1
- nat/utils/settings/global_settings.py +6 -8
- nat/utils/type_converter.py +4 -3
- nat/utils/type_utils.py +9 -5
- {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/METADATA +42 -18
- {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/RECORD +238 -196
- {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/entry_points.txt +1 -0
- nat/cli/commands/info/list_mcp.py +0 -304
- nat/tool/github_tools/create_github_commit.py +0 -133
- nat/tool/github_tools/create_github_issue.py +0 -87
- nat/tool/github_tools/create_github_pr.py +0 -106
- nat/tool/github_tools/get_github_file.py +0 -106
- nat/tool/github_tools/get_github_issue.py +0 -166
- nat/tool/github_tools/get_github_pr.py +0 -256
- nat/tool/github_tools/update_github_issue.py +0 -100
- nat/tool/mcp/exceptions.py +0 -142
- nat/tool/mcp/mcp_client.py +0 -255
- nat/tool/mcp/mcp_tool.py +0 -96
- nat/utils/exception_handlers/mcp.py +0 -211
- /nat/{tool/github_tools → agent/prompt_optimizer}/__init__.py +0 -0
- /nat/{tool/mcp → authentication/credential_validator}/__init__.py +0 -0
- {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/WHEEL +0 -0
- {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
- {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/licenses/LICENSE.md +0 -0
- {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/top_level.txt +0 -0
|
@@ -42,15 +42,14 @@ logger = logging.getLogger(__name__)
|
|
|
42
42
|
class RestRegistryHandler(AbstractRegistryHandler):
|
|
43
43
|
"""A registry handler for interactions with a remote REST registry."""
|
|
44
44
|
|
|
45
|
-
def __init__(
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
remove_route: str = ""):
|
|
45
|
+
def __init__(self,
|
|
46
|
+
endpoint: str,
|
|
47
|
+
token: str,
|
|
48
|
+
timeout: int = 30,
|
|
49
|
+
publish_route: str = "",
|
|
50
|
+
pull_route: str = "",
|
|
51
|
+
search_route: str = "",
|
|
52
|
+
remove_route: str = ""):
|
|
54
53
|
super().__init__()
|
|
55
54
|
self._endpoint = endpoint.rstrip("/")
|
|
56
55
|
self._timeout = timeout
|
|
@@ -89,7 +88,7 @@ class RestRegistryHandler(AbstractRegistryHandler):
|
|
|
89
88
|
validated_publish_response = PublishResponse(status={
|
|
90
89
|
"status": StatusEnum.ERROR, "message": msg, "action": ActionEnum.PUBLISH
|
|
91
90
|
})
|
|
92
|
-
logger.exception(validated_publish_response.status.message
|
|
91
|
+
logger.exception(validated_publish_response.status.message)
|
|
93
92
|
|
|
94
93
|
yield validated_publish_response
|
|
95
94
|
|
|
@@ -156,7 +155,7 @@ class RestRegistryHandler(AbstractRegistryHandler):
|
|
|
156
155
|
validated_pull_response = PullResponse(status={
|
|
157
156
|
"status": StatusEnum.ERROR, "message": msg, "action": ActionEnum.PULL
|
|
158
157
|
})
|
|
159
|
-
logger.exception(validated_pull_response.status.message
|
|
158
|
+
logger.exception(validated_pull_response.status.message)
|
|
160
159
|
|
|
161
160
|
yield validated_pull_response
|
|
162
161
|
|
|
@@ -194,7 +193,7 @@ class RestRegistryHandler(AbstractRegistryHandler):
|
|
|
194
193
|
"message": msg,
|
|
195
194
|
"action": ActionEnum.SEARCH
|
|
196
195
|
})
|
|
197
|
-
logger.exception(validated_search_response.status.message
|
|
196
|
+
logger.exception(validated_search_response.status.message)
|
|
198
197
|
|
|
199
198
|
yield validated_search_response
|
|
200
199
|
|
|
@@ -229,7 +228,7 @@ class RestRegistryHandler(AbstractRegistryHandler):
|
|
|
229
228
|
validated_remove_response = RemoveResponse(status={
|
|
230
229
|
"status": StatusEnum.ERROR, "message": msg, "action": ActionEnum.REMOVE
|
|
231
230
|
})
|
|
232
|
-
logger.exception(validated_remove_response.status.message
|
|
231
|
+
logger.exception(validated_remove_response.status.message)
|
|
233
232
|
|
|
234
233
|
yield validated_remove_response
|
|
235
234
|
|
|
@@ -154,7 +154,7 @@ class MilvusRetriever(Retriever):
|
|
|
154
154
|
return _wrap_milvus_results(results, content_field=self.content_field)
|
|
155
155
|
|
|
156
156
|
except Exception as e:
|
|
157
|
-
logger.
|
|
157
|
+
logger.error("Exception when retrieving results from milvus for query %s: %s", query, e)
|
|
158
158
|
raise RetrieverError(f"Error when retrieving documents from {collection_name} for query '{query}'") from e
|
|
159
159
|
|
|
160
160
|
async def _search(self,
|
|
@@ -214,7 +214,7 @@ def _wrap_milvus_results(res: list[Hit], content_field: str):
|
|
|
214
214
|
|
|
215
215
|
|
|
216
216
|
def _wrap_milvus_single_results(res: Hit | dict, content_field: str) -> Document:
|
|
217
|
-
if not isinstance(res,
|
|
217
|
+
if not isinstance(res, Hit | dict):
|
|
218
218
|
raise ValueError(f"Milvus search returned object of type {type(res)}. Expected 'Hit' or 'dict'.")
|
|
219
219
|
|
|
220
220
|
if isinstance(res, Hit):
|
|
@@ -143,7 +143,7 @@ class NemoRetriever(Retriever):
|
|
|
143
143
|
return _wrap_nemo_results(output=output, content_field="content")
|
|
144
144
|
|
|
145
145
|
except Exception as e:
|
|
146
|
-
logger.
|
|
146
|
+
logger.error("Encountered an error when retrieving results from Nemo Retriever: %s", e)
|
|
147
147
|
raise CollectionUnavailableError(
|
|
148
148
|
f"Error when retrieving documents from {collection_name} for query '{query}'") from e
|
|
149
149
|
|
nat/retriever/register.py
CHANGED
nat/runtime/loader.py
CHANGED
|
@@ -114,7 +114,7 @@ async def load_workflow(config_file: StrPath, max_concurrency: int = -1):
|
|
|
114
114
|
# Must yield the workflow function otherwise it cleans up
|
|
115
115
|
async with WorkflowBuilder.from_config(config=config) as workflow:
|
|
116
116
|
|
|
117
|
-
yield SessionManager(workflow.build(), max_concurrency=max_concurrency)
|
|
117
|
+
yield SessionManager(await workflow.build(), max_concurrency=max_concurrency)
|
|
118
118
|
|
|
119
119
|
|
|
120
120
|
@lru_cache
|
|
@@ -210,7 +210,7 @@ def discover_and_register_plugins(plugin_type: PluginTypes):
|
|
|
210
210
|
# Optionally, you can mark the plugin as unavailable or take other actions
|
|
211
211
|
|
|
212
212
|
except Exception:
|
|
213
|
-
logger.exception("An error occurred while loading plugin '%s'
|
|
213
|
+
logger.exception("An error occurred while loading plugin '%s'", entry_point.name)
|
|
214
214
|
|
|
215
215
|
finally:
|
|
216
216
|
count += 1
|
nat/runtime/runner.py
CHANGED
|
@@ -15,11 +15,16 @@
|
|
|
15
15
|
|
|
16
16
|
import logging
|
|
17
17
|
import typing
|
|
18
|
+
import uuid
|
|
18
19
|
from enum import Enum
|
|
19
20
|
|
|
20
21
|
from nat.builder.context import Context
|
|
21
22
|
from nat.builder.context import ContextState
|
|
22
23
|
from nat.builder.function import Function
|
|
24
|
+
from nat.data_models.intermediate_step import IntermediateStepPayload
|
|
25
|
+
from nat.data_models.intermediate_step import IntermediateStepType
|
|
26
|
+
from nat.data_models.intermediate_step import StreamEventData
|
|
27
|
+
from nat.data_models.intermediate_step import TraceMetadata
|
|
23
28
|
from nat.data_models.invocation_node import InvocationNode
|
|
24
29
|
from nat.observability.exporter_manager import ExporterManager
|
|
25
30
|
from nat.utils.reactive.subject import Subject
|
|
@@ -130,17 +135,59 @@ class Runner:
|
|
|
130
135
|
if (self._state != RunnerState.INITIALIZED):
|
|
131
136
|
raise ValueError("Cannot run the workflow without entering the context")
|
|
132
137
|
|
|
138
|
+
token_run_id = None
|
|
139
|
+
token_trace_id = None
|
|
133
140
|
try:
|
|
134
141
|
self._state = RunnerState.RUNNING
|
|
135
142
|
|
|
136
143
|
if (not self._entry_fn.has_single_output):
|
|
137
144
|
raise ValueError("Workflow does not support single output")
|
|
138
145
|
|
|
146
|
+
# Establish workflow run and trace identifiers
|
|
147
|
+
existing_run_id = self._context_state.workflow_run_id.get()
|
|
148
|
+
existing_trace_id = self._context_state.workflow_trace_id.get()
|
|
149
|
+
|
|
150
|
+
workflow_run_id = existing_run_id or str(uuid.uuid4())
|
|
151
|
+
|
|
152
|
+
workflow_trace_id = existing_trace_id or uuid.uuid4().int
|
|
153
|
+
|
|
154
|
+
token_run_id = self._context_state.workflow_run_id.set(workflow_run_id)
|
|
155
|
+
token_trace_id = self._context_state.workflow_trace_id.set(workflow_trace_id)
|
|
156
|
+
|
|
157
|
+
# Prepare workflow-level intermediate step identifiers
|
|
158
|
+
workflow_step_uuid = str(uuid.uuid4())
|
|
159
|
+
workflow_name = getattr(self._entry_fn, 'instance_name', None) or "workflow"
|
|
160
|
+
|
|
139
161
|
async with self._exporter_manager.start(context_state=self._context_state):
|
|
140
|
-
#
|
|
141
|
-
|
|
162
|
+
# Emit WORKFLOW_START
|
|
163
|
+
start_metadata = TraceMetadata(
|
|
164
|
+
provided_metadata={
|
|
165
|
+
"workflow_run_id": workflow_run_id,
|
|
166
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
167
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
168
|
+
})
|
|
169
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
170
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
171
|
+
event_type=IntermediateStepType.WORKFLOW_START,
|
|
172
|
+
name=workflow_name,
|
|
173
|
+
metadata=start_metadata))
|
|
174
|
+
|
|
175
|
+
result = await self._entry_fn.ainvoke(self._input_message, to_type=to_type) # type: ignore
|
|
176
|
+
|
|
177
|
+
# Emit WORKFLOW_END with output
|
|
178
|
+
end_metadata = TraceMetadata(
|
|
179
|
+
provided_metadata={
|
|
180
|
+
"workflow_run_id": workflow_run_id,
|
|
181
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
182
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
183
|
+
})
|
|
184
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
185
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
186
|
+
event_type=IntermediateStepType.WORKFLOW_END,
|
|
187
|
+
name=workflow_name,
|
|
188
|
+
metadata=end_metadata,
|
|
189
|
+
data=StreamEventData(output=result)))
|
|
142
190
|
|
|
143
|
-
# Close the intermediate stream
|
|
144
191
|
event_stream = self._context_state.event_stream.get()
|
|
145
192
|
if event_stream:
|
|
146
193
|
event_stream.on_complete()
|
|
@@ -149,30 +196,77 @@ class Runner:
|
|
|
149
196
|
|
|
150
197
|
return result
|
|
151
198
|
except Exception as e:
|
|
152
|
-
|
|
199
|
+
err_msg = f": {e}" if str(e).strip() else "."
|
|
200
|
+
logger.error("Error running workflow%s", err_msg)
|
|
153
201
|
event_stream = self._context_state.event_stream.get()
|
|
154
202
|
if event_stream:
|
|
155
203
|
event_stream.on_complete()
|
|
156
204
|
self._state = RunnerState.FAILED
|
|
157
|
-
|
|
158
205
|
raise
|
|
206
|
+
finally:
|
|
207
|
+
if token_run_id is not None:
|
|
208
|
+
self._context_state.workflow_run_id.reset(token_run_id)
|
|
209
|
+
if token_trace_id is not None:
|
|
210
|
+
self._context_state.workflow_trace_id.reset(token_trace_id)
|
|
159
211
|
|
|
160
212
|
async def result_stream(self, to_type: type | None = None):
|
|
161
213
|
|
|
162
214
|
if (self._state != RunnerState.INITIALIZED):
|
|
163
215
|
raise ValueError("Cannot run the workflow without entering the context")
|
|
164
216
|
|
|
217
|
+
token_run_id = None
|
|
218
|
+
token_trace_id = None
|
|
165
219
|
try:
|
|
166
220
|
self._state = RunnerState.RUNNING
|
|
167
221
|
|
|
168
222
|
if (not self._entry_fn.has_streaming_output):
|
|
169
223
|
raise ValueError("Workflow does not support streaming output")
|
|
170
224
|
|
|
225
|
+
# Establish workflow run and trace identifiers
|
|
226
|
+
existing_run_id = self._context_state.workflow_run_id.get()
|
|
227
|
+
existing_trace_id = self._context_state.workflow_trace_id.get()
|
|
228
|
+
|
|
229
|
+
workflow_run_id = existing_run_id or str(uuid.uuid4())
|
|
230
|
+
|
|
231
|
+
workflow_trace_id = existing_trace_id or uuid.uuid4().int
|
|
232
|
+
|
|
233
|
+
token_run_id = self._context_state.workflow_run_id.set(workflow_run_id)
|
|
234
|
+
token_trace_id = self._context_state.workflow_trace_id.set(workflow_trace_id)
|
|
235
|
+
|
|
236
|
+
# Prepare workflow-level intermediate step identifiers
|
|
237
|
+
workflow_step_uuid = str(uuid.uuid4())
|
|
238
|
+
workflow_name = getattr(self._entry_fn, 'instance_name', None) or "workflow"
|
|
239
|
+
|
|
171
240
|
# Run the workflow
|
|
172
241
|
async with self._exporter_manager.start(context_state=self._context_state):
|
|
173
|
-
|
|
242
|
+
# Emit WORKFLOW_START
|
|
243
|
+
start_metadata = TraceMetadata(
|
|
244
|
+
provided_metadata={
|
|
245
|
+
"workflow_run_id": workflow_run_id,
|
|
246
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
247
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
248
|
+
})
|
|
249
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
250
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
251
|
+
event_type=IntermediateStepType.WORKFLOW_START,
|
|
252
|
+
name=workflow_name,
|
|
253
|
+
metadata=start_metadata))
|
|
254
|
+
|
|
255
|
+
async for m in self._entry_fn.astream(self._input_message, to_type=to_type): # type: ignore
|
|
174
256
|
yield m
|
|
175
257
|
|
|
258
|
+
# Emit WORKFLOW_END
|
|
259
|
+
end_metadata = TraceMetadata(
|
|
260
|
+
provided_metadata={
|
|
261
|
+
"workflow_run_id": workflow_run_id,
|
|
262
|
+
"workflow_trace_id": f"{workflow_trace_id:032x}",
|
|
263
|
+
"conversation_id": self._context_state.conversation_id.get(),
|
|
264
|
+
})
|
|
265
|
+
self._context.intermediate_step_manager.push_intermediate_step(
|
|
266
|
+
IntermediateStepPayload(UUID=workflow_step_uuid,
|
|
267
|
+
event_type=IntermediateStepType.WORKFLOW_END,
|
|
268
|
+
name=workflow_name,
|
|
269
|
+
metadata=end_metadata))
|
|
176
270
|
self._state = RunnerState.COMPLETED
|
|
177
271
|
|
|
178
272
|
# Close the intermediate stream
|
|
@@ -181,13 +275,17 @@ class Runner:
|
|
|
181
275
|
event_stream.on_complete()
|
|
182
276
|
|
|
183
277
|
except Exception as e:
|
|
184
|
-
logger.
|
|
278
|
+
logger.error("Error running workflow: %s", e)
|
|
185
279
|
event_stream = self._context_state.event_stream.get()
|
|
186
280
|
if event_stream:
|
|
187
281
|
event_stream.on_complete()
|
|
188
282
|
self._state = RunnerState.FAILED
|
|
189
|
-
|
|
190
283
|
raise
|
|
284
|
+
finally:
|
|
285
|
+
if token_run_id is not None:
|
|
286
|
+
self._context_state.workflow_run_id.reset(token_run_id)
|
|
287
|
+
if token_trace_id is not None:
|
|
288
|
+
self._context_state.workflow_trace_id.reset(token_trace_id)
|
|
191
289
|
|
|
192
290
|
|
|
193
291
|
# Compatibility aliases with previous releases
|
nat/runtime/session.py
CHANGED
|
@@ -16,12 +16,15 @@
|
|
|
16
16
|
import asyncio
|
|
17
17
|
import contextvars
|
|
18
18
|
import typing
|
|
19
|
+
import uuid
|
|
19
20
|
from collections.abc import Awaitable
|
|
20
21
|
from collections.abc import Callable
|
|
21
22
|
from contextlib import asynccontextmanager
|
|
22
23
|
from contextlib import nullcontext
|
|
23
24
|
|
|
25
|
+
from fastapi import WebSocket
|
|
24
26
|
from starlette.requests import HTTPConnection
|
|
27
|
+
from starlette.requests import Request
|
|
25
28
|
|
|
26
29
|
from nat.builder.context import Context
|
|
27
30
|
from nat.builder.context import ContextState
|
|
@@ -89,7 +92,8 @@ class SessionManager:
|
|
|
89
92
|
@asynccontextmanager
|
|
90
93
|
async def session(self,
|
|
91
94
|
user_manager=None,
|
|
92
|
-
|
|
95
|
+
http_connection: HTTPConnection | None = None,
|
|
96
|
+
user_message_id: str | None = None,
|
|
93
97
|
conversation_id: str | None = None,
|
|
94
98
|
user_input_callback: Callable[[InteractionPrompt], Awaitable[HumanResponse]] = None,
|
|
95
99
|
user_authentication_callback: Callable[[AuthProviderBaseConfig, AuthFlowType],
|
|
@@ -107,10 +111,11 @@ class SessionManager:
|
|
|
107
111
|
if user_authentication_callback is not None:
|
|
108
112
|
token_user_authentication = self._context_state.user_auth_callback.set(user_authentication_callback)
|
|
109
113
|
|
|
110
|
-
if
|
|
111
|
-
self.
|
|
114
|
+
if isinstance(http_connection, WebSocket):
|
|
115
|
+
self.set_metadata_from_websocket(http_connection, user_message_id, conversation_id)
|
|
112
116
|
|
|
113
|
-
|
|
117
|
+
if isinstance(http_connection, Request):
|
|
118
|
+
self.set_metadata_from_http_request(http_connection)
|
|
114
119
|
|
|
115
120
|
try:
|
|
116
121
|
yield self
|
|
@@ -135,14 +140,11 @@ class SessionManager:
|
|
|
135
140
|
async with self._workflow.run(message) as runner:
|
|
136
141
|
yield runner
|
|
137
142
|
|
|
138
|
-
def set_metadata_from_http_request(self, request:
|
|
143
|
+
def set_metadata_from_http_request(self, request: Request) -> None:
|
|
139
144
|
"""
|
|
140
145
|
Extracts and sets user metadata request attributes from a HTTP request.
|
|
141
146
|
If request is None, no attributes are set.
|
|
142
147
|
"""
|
|
143
|
-
if request is None:
|
|
144
|
-
return
|
|
145
|
-
|
|
146
148
|
self._context.metadata._request.method = getattr(request, "method", None)
|
|
147
149
|
self._context.metadata._request.url_path = request.url.path
|
|
148
150
|
self._context.metadata._request.url_port = request.url.port
|
|
@@ -157,6 +159,65 @@ class SessionManager:
|
|
|
157
159
|
if request.headers.get("conversation-id"):
|
|
158
160
|
self._context_state.conversation_id.set(request.headers["conversation-id"])
|
|
159
161
|
|
|
162
|
+
if request.headers.get("user-message-id"):
|
|
163
|
+
self._context_state.user_message_id.set(request.headers["user-message-id"])
|
|
164
|
+
|
|
165
|
+
# W3C Trace Context header: traceparent: 00-<trace-id>-<span-id>-<flags>
|
|
166
|
+
traceparent = request.headers.get("traceparent")
|
|
167
|
+
if traceparent:
|
|
168
|
+
try:
|
|
169
|
+
parts = traceparent.split("-")
|
|
170
|
+
if len(parts) >= 4:
|
|
171
|
+
trace_id_hex = parts[1]
|
|
172
|
+
if len(trace_id_hex) == 32:
|
|
173
|
+
trace_id_int = uuid.UUID(trace_id_hex).int
|
|
174
|
+
self._context_state.workflow_trace_id.set(trace_id_int)
|
|
175
|
+
except Exception:
|
|
176
|
+
pass
|
|
177
|
+
|
|
178
|
+
if not self._context_state.workflow_trace_id.get():
|
|
179
|
+
workflow_trace_id = request.headers.get("workflow-trace-id")
|
|
180
|
+
if workflow_trace_id:
|
|
181
|
+
try:
|
|
182
|
+
self._context_state.workflow_trace_id.set(uuid.UUID(workflow_trace_id).int)
|
|
183
|
+
except Exception:
|
|
184
|
+
pass
|
|
185
|
+
|
|
186
|
+
workflow_run_id = request.headers.get("workflow-run-id")
|
|
187
|
+
if workflow_run_id:
|
|
188
|
+
self._context_state.workflow_run_id.set(workflow_run_id)
|
|
189
|
+
|
|
190
|
+
def set_metadata_from_websocket(self,
|
|
191
|
+
websocket: WebSocket,
|
|
192
|
+
user_message_id: str | None,
|
|
193
|
+
conversation_id: str | None) -> None:
|
|
194
|
+
"""
|
|
195
|
+
Extracts and sets user metadata for Websocket connections.
|
|
196
|
+
"""
|
|
197
|
+
|
|
198
|
+
# Extract cookies from WebSocket headers (similar to HTTP request)
|
|
199
|
+
if websocket and hasattr(websocket, 'scope') and 'headers' in websocket.scope:
|
|
200
|
+
cookies = {}
|
|
201
|
+
for header_name, header_value in websocket.scope.get('headers', []):
|
|
202
|
+
if header_name == b'cookie':
|
|
203
|
+
cookie_header = header_value.decode('utf-8')
|
|
204
|
+
# Parse cookie header: "name1=value1; name2=value2"
|
|
205
|
+
for cookie in cookie_header.split(';'):
|
|
206
|
+
cookie = cookie.strip()
|
|
207
|
+
if '=' in cookie:
|
|
208
|
+
name, value = cookie.split('=', 1)
|
|
209
|
+
cookies[name.strip()] = value.strip()
|
|
210
|
+
|
|
211
|
+
# Set cookies in metadata (same as HTTP request)
|
|
212
|
+
self._context.metadata._request.cookies = cookies
|
|
213
|
+
self._context_state.metadata.set(self._context.metadata)
|
|
214
|
+
|
|
215
|
+
if conversation_id is not None:
|
|
216
|
+
self._context_state.conversation_id.set(conversation_id)
|
|
217
|
+
|
|
218
|
+
if user_message_id is not None:
|
|
219
|
+
self._context_state.user_message_id.set(user_message_id)
|
|
220
|
+
|
|
160
221
|
|
|
161
222
|
# Compatibility aliases with previous releases
|
|
162
223
|
AIQSessionManager = SessionManager
|
nat/settings/global_settings.py
CHANGED
|
@@ -47,6 +47,12 @@ class Settings(HashableBaseModel):
|
|
|
47
47
|
# Registry Handeler Configuration
|
|
48
48
|
channels: dict[str, RegistryHandlerBaseConfig] = {}
|
|
49
49
|
|
|
50
|
+
# Timezone fallback behavior
|
|
51
|
+
# Options:
|
|
52
|
+
# - "utc": default to UTC
|
|
53
|
+
# - "system": use the system's local timezone
|
|
54
|
+
fallback_timezone: typing.Literal["system", "utc"] = "utc"
|
|
55
|
+
|
|
50
56
|
_configuration_directory: typing.ClassVar[str]
|
|
51
57
|
_settings_changed_hooks: typing.ClassVar[list[Callable[[], None]]] = []
|
|
52
58
|
_settings_changed_hooks_active: bool = True
|
|
@@ -118,8 +124,7 @@ class Settings(HashableBaseModel):
|
|
|
118
124
|
if (short_names[key.local_name] == 1):
|
|
119
125
|
type_list.append((key.local_name, key.config_type))
|
|
120
126
|
|
|
121
|
-
|
|
122
|
-
return typing.Union[tuple(typing.Annotated[x_type, Tag(x_id)] for x_id, x_type in type_list)]
|
|
127
|
+
return typing.Union[*tuple(typing.Annotated[x_type, Tag(x_id)] for x_id, x_type in type_list)]
|
|
123
128
|
|
|
124
129
|
RegistryHandlerAnnotation = dict[
|
|
125
130
|
str,
|
|
@@ -164,8 +169,12 @@ class Settings(HashableBaseModel):
|
|
|
164
169
|
if (not os.path.exists(configuration_file)):
|
|
165
170
|
loaded_config = {}
|
|
166
171
|
else:
|
|
167
|
-
with open(file_path,
|
|
168
|
-
|
|
172
|
+
with open(file_path, encoding="utf-8") as f:
|
|
173
|
+
try:
|
|
174
|
+
loaded_config = json.load(f)
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.exception("Error loading configuration file %s: %s", file_path, e)
|
|
177
|
+
loaded_config = {}
|
|
169
178
|
|
|
170
179
|
settings = Settings(**loaded_config)
|
|
171
180
|
settings.set_configuration_directory(configuration_directory)
|
|
@@ -214,13 +223,15 @@ class Settings(HashableBaseModel):
|
|
|
214
223
|
match field:
|
|
215
224
|
case "channels":
|
|
216
225
|
self.channels = validated_data.channels
|
|
226
|
+
case "fallback_timezone":
|
|
227
|
+
self.fallback_timezone = validated_data.fallback_timezone
|
|
217
228
|
case _:
|
|
218
229
|
raise ValueError(f"Encountered invalid model field: {field}")
|
|
219
230
|
|
|
220
231
|
return True
|
|
221
232
|
|
|
222
233
|
except Exception as e:
|
|
223
|
-
logger.exception("Unable to validate user settings configuration: %s", e
|
|
234
|
+
logger.exception("Unable to validate user settings configuration: %s", e)
|
|
224
235
|
return False
|
|
225
236
|
|
|
226
237
|
def print_channel_settings(self, channel_type: str | None = None) -> None:
|
nat/tool/chat_completion.py
CHANGED
|
@@ -44,7 +44,7 @@ async def register_chat_completion(config: ChatCompletionConfig, builder: Builde
|
|
|
44
44
|
"""Registers a chat completion function that can handle natural language queries."""
|
|
45
45
|
|
|
46
46
|
# Get the LLM from the builder context using the configured LLM reference
|
|
47
|
-
# Use LangChain framework wrapper since we're using LangChain-based LLM
|
|
47
|
+
# Use LangChain/LangGraph framework wrapper since we're using LangChain/LangGraph-based LLM
|
|
48
48
|
llm = await builder.get_llm(config.llm_name, wrapper_type=LLMFrameworkEnum.LANGCHAIN)
|
|
49
49
|
|
|
50
50
|
async def _chat_completion(query: str) -> str:
|
|
@@ -63,7 +63,10 @@ async def register_chat_completion(config: ChatCompletionConfig, builder: Builde
|
|
|
63
63
|
# Generate response using the LLM
|
|
64
64
|
response = await llm.ainvoke(prompt)
|
|
65
65
|
|
|
66
|
-
|
|
66
|
+
if isinstance(response, str):
|
|
67
|
+
return response
|
|
68
|
+
|
|
69
|
+
return response.text()
|
|
67
70
|
|
|
68
71
|
except Exception as e:
|
|
69
72
|
# Fallback response if LLM call fails
|
|
@@ -62,7 +62,7 @@ class CodeExecutionResponse(Response):
|
|
|
62
62
|
super().__init__(status=status_code, mimetype="application/json", response=result.model_dump_json())
|
|
63
63
|
|
|
64
64
|
@classmethod
|
|
65
|
-
def with_error(cls, status_code: int, error_message: str) ->
|
|
65
|
+
def with_error(cls, status_code: int, error_message: str) -> CodeExecutionResponse:
|
|
66
66
|
return cls(status_code,
|
|
67
67
|
CodeExecutionResult(process_status=CodeExecutionStatus.ERROR, stdout="", stderr=error_message))
|
|
68
68
|
|
|
@@ -121,13 +121,13 @@ def execute_code_subprocess(generated_code: str, queue):
|
|
|
121
121
|
resource.setrlimit(resource.RLIMIT_AS, (limit, limit))
|
|
122
122
|
resource.setrlimit(resource.RLIMIT_DATA, (limit, limit))
|
|
123
123
|
except Exception as e:
|
|
124
|
-
logger.
|
|
124
|
+
logger.exception("Failed to set resource limits, PID: %s, error: %s", os.getpid(), e)
|
|
125
125
|
|
|
126
126
|
stdout_capture = StringIO()
|
|
127
127
|
stderr_capture = StringIO()
|
|
128
128
|
try:
|
|
129
129
|
with contextlib.redirect_stdout(stdout_capture), contextlib.redirect_stderr(stderr_capture):
|
|
130
|
-
exec(generated_code, {})
|
|
130
|
+
exec(generated_code, {})
|
|
131
131
|
logger.debug("execute_code_subprocess finished, PID: %s", os.getpid())
|
|
132
132
|
queue.put(CodeExecutionResult(stdout=stdout_capture.getvalue(), stderr=stderr_capture.getvalue()))
|
|
133
133
|
except Exception as e:
|
nat/tool/datetime_tools.py
CHANGED
|
@@ -13,30 +13,70 @@
|
|
|
13
13
|
# See the License for the specific language governing permissions and
|
|
14
14
|
# limitations under the License.
|
|
15
15
|
|
|
16
|
+
import datetime
|
|
17
|
+
import zoneinfo
|
|
18
|
+
|
|
19
|
+
from starlette.datastructures import Headers
|
|
20
|
+
|
|
16
21
|
from nat.builder.builder import Builder
|
|
17
22
|
from nat.builder.function_info import FunctionInfo
|
|
18
23
|
from nat.cli.register_workflow import register_function
|
|
19
24
|
from nat.data_models.function import FunctionBaseConfig
|
|
25
|
+
from nat.settings.global_settings import GlobalSettings
|
|
20
26
|
|
|
21
27
|
|
|
22
28
|
class CurrentTimeToolConfig(FunctionBaseConfig, name="current_datetime"):
|
|
23
29
|
"""
|
|
24
|
-
Simple tool which returns the current date and time in human readable format.
|
|
30
|
+
Simple tool which returns the current date and time in human readable format with timezone information. By default,
|
|
31
|
+
the timezone is in Etc/UTC. If the user provides a timezone in the header, we will use it. Timezone will be
|
|
32
|
+
provided in IANA zone name format. For example, "America/New_York" or "Etc/UTC".
|
|
25
33
|
"""
|
|
26
34
|
pass
|
|
27
35
|
|
|
28
36
|
|
|
29
|
-
|
|
30
|
-
|
|
37
|
+
def _get_timezone_obj(headers: Headers | None) -> zoneinfo.ZoneInfo | datetime.tzinfo:
|
|
38
|
+
# Default to UTC
|
|
39
|
+
timezone_obj = zoneinfo.ZoneInfo("Etc/UTC")
|
|
40
|
+
|
|
41
|
+
if headers:
|
|
42
|
+
# If user has provided a timezone in the header, we will prioritize on using it
|
|
43
|
+
timezone_header = headers.get("x-timezone")
|
|
44
|
+
if timezone_header:
|
|
45
|
+
try:
|
|
46
|
+
timezone_obj = zoneinfo.ZoneInfo(timezone_header)
|
|
47
|
+
except Exception:
|
|
48
|
+
pass
|
|
49
|
+
else:
|
|
50
|
+
# Only if a timezone is not in the header, we will determine default timezone based on global settings
|
|
51
|
+
fallback_tz = GlobalSettings.get().fallback_timezone
|
|
52
|
+
|
|
53
|
+
if fallback_tz == "system":
|
|
54
|
+
# Use the system's local timezone. Avoid requiring external deps.
|
|
55
|
+
timezone_obj = datetime.datetime.now().astimezone().tzinfo or zoneinfo.ZoneInfo("Etc/UTC")
|
|
56
|
+
|
|
57
|
+
return timezone_obj
|
|
31
58
|
|
|
32
|
-
|
|
59
|
+
|
|
60
|
+
@register_function(config_type=CurrentTimeToolConfig)
|
|
61
|
+
async def current_datetime(_config: CurrentTimeToolConfig, _builder: Builder):
|
|
33
62
|
|
|
34
63
|
async def _get_current_time(unused: str) -> str:
|
|
35
64
|
|
|
36
|
-
|
|
37
|
-
|
|
65
|
+
del unused # Unused parameter to avoid linting error
|
|
66
|
+
|
|
67
|
+
from nat.builder.context import Context
|
|
68
|
+
nat_context = Context.get()
|
|
69
|
+
|
|
70
|
+
headers: Headers | None = nat_context.metadata.headers
|
|
71
|
+
|
|
72
|
+
timezone_obj = _get_timezone_obj(headers)
|
|
73
|
+
|
|
74
|
+
now = datetime.datetime.now(timezone_obj)
|
|
75
|
+
now_machine_readable = now.strftime("%Y-%m-%d %H:%M:%S %z")
|
|
38
76
|
|
|
39
|
-
|
|
77
|
+
# Returns the current time in machine readable format with timezone offset.
|
|
78
|
+
return f"The current time of day is {now_machine_readable}"
|
|
40
79
|
|
|
41
|
-
yield FunctionInfo.from_fn(
|
|
42
|
-
|
|
80
|
+
yield FunctionInfo.from_fn(
|
|
81
|
+
_get_current_time,
|
|
82
|
+
description="Returns the current date and time in human readable format with timezone information.")
|
nat/tool/document_search.py
CHANGED
|
@@ -53,7 +53,7 @@ async def document_search(config: MilvusDocumentSearchToolConfig, builder: Build
|
|
|
53
53
|
from langchain_core.messages import HumanMessage
|
|
54
54
|
from langchain_core.messages import SystemMessage
|
|
55
55
|
from langchain_core.pydantic_v1 import BaseModel
|
|
56
|
-
from langchain_core.pydantic_v1 import Field
|
|
56
|
+
from langchain_core.pydantic_v1 import Field
|
|
57
57
|
|
|
58
58
|
# define collection store
|
|
59
59
|
# create a list of tuples using enumerate()
|
|
@@ -119,7 +119,7 @@ Return only the name of the predicted collection."""
|
|
|
119
119
|
if len(results["chunks"]) == 0:
|
|
120
120
|
return DocumentSearchOutput(collection_name=llm_pred.collection_name, documents="")
|
|
121
121
|
|
|
122
|
-
# parse docs from
|
|
122
|
+
# parse docs from LangChain/LangGraph Document object to string
|
|
123
123
|
parsed_docs = []
|
|
124
124
|
|
|
125
125
|
# iterate over results and store parsed content
|