nvidia-nat 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (435) hide show
  1. aiq/__init__.py +66 -0
  2. nat/agent/__init__.py +0 -0
  3. nat/agent/base.py +256 -0
  4. nat/agent/dual_node.py +67 -0
  5. nat/agent/react_agent/__init__.py +0 -0
  6. nat/agent/react_agent/agent.py +363 -0
  7. nat/agent/react_agent/output_parser.py +104 -0
  8. nat/agent/react_agent/prompt.py +44 -0
  9. nat/agent/react_agent/register.py +149 -0
  10. nat/agent/reasoning_agent/__init__.py +0 -0
  11. nat/agent/reasoning_agent/reasoning_agent.py +225 -0
  12. nat/agent/register.py +23 -0
  13. nat/agent/rewoo_agent/__init__.py +0 -0
  14. nat/agent/rewoo_agent/agent.py +415 -0
  15. nat/agent/rewoo_agent/prompt.py +110 -0
  16. nat/agent/rewoo_agent/register.py +157 -0
  17. nat/agent/tool_calling_agent/__init__.py +0 -0
  18. nat/agent/tool_calling_agent/agent.py +119 -0
  19. nat/agent/tool_calling_agent/register.py +106 -0
  20. nat/authentication/__init__.py +14 -0
  21. nat/authentication/api_key/__init__.py +14 -0
  22. nat/authentication/api_key/api_key_auth_provider.py +96 -0
  23. nat/authentication/api_key/api_key_auth_provider_config.py +124 -0
  24. nat/authentication/api_key/register.py +26 -0
  25. nat/authentication/exceptions/__init__.py +14 -0
  26. nat/authentication/exceptions/api_key_exceptions.py +38 -0
  27. nat/authentication/http_basic_auth/__init__.py +0 -0
  28. nat/authentication/http_basic_auth/http_basic_auth_provider.py +81 -0
  29. nat/authentication/http_basic_auth/register.py +30 -0
  30. nat/authentication/interfaces.py +93 -0
  31. nat/authentication/oauth2/__init__.py +14 -0
  32. nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +107 -0
  33. nat/authentication/oauth2/oauth2_auth_code_flow_provider_config.py +39 -0
  34. nat/authentication/oauth2/register.py +25 -0
  35. nat/authentication/register.py +21 -0
  36. nat/builder/__init__.py +0 -0
  37. nat/builder/builder.py +285 -0
  38. nat/builder/component_utils.py +316 -0
  39. nat/builder/context.py +270 -0
  40. nat/builder/embedder.py +24 -0
  41. nat/builder/eval_builder.py +161 -0
  42. nat/builder/evaluator.py +29 -0
  43. nat/builder/framework_enum.py +24 -0
  44. nat/builder/front_end.py +73 -0
  45. nat/builder/function.py +344 -0
  46. nat/builder/function_base.py +380 -0
  47. nat/builder/function_info.py +627 -0
  48. nat/builder/intermediate_step_manager.py +174 -0
  49. nat/builder/llm.py +25 -0
  50. nat/builder/retriever.py +25 -0
  51. nat/builder/user_interaction_manager.py +78 -0
  52. nat/builder/workflow.py +148 -0
  53. nat/builder/workflow_builder.py +1117 -0
  54. nat/cli/__init__.py +14 -0
  55. nat/cli/cli_utils/__init__.py +0 -0
  56. nat/cli/cli_utils/config_override.py +231 -0
  57. nat/cli/cli_utils/validation.py +37 -0
  58. nat/cli/commands/__init__.py +0 -0
  59. nat/cli/commands/configure/__init__.py +0 -0
  60. nat/cli/commands/configure/channel/__init__.py +0 -0
  61. nat/cli/commands/configure/channel/add.py +28 -0
  62. nat/cli/commands/configure/channel/channel.py +34 -0
  63. nat/cli/commands/configure/channel/remove.py +30 -0
  64. nat/cli/commands/configure/channel/update.py +30 -0
  65. nat/cli/commands/configure/configure.py +33 -0
  66. nat/cli/commands/evaluate.py +139 -0
  67. nat/cli/commands/info/__init__.py +14 -0
  68. nat/cli/commands/info/info.py +37 -0
  69. nat/cli/commands/info/list_channels.py +32 -0
  70. nat/cli/commands/info/list_components.py +129 -0
  71. nat/cli/commands/info/list_mcp.py +304 -0
  72. nat/cli/commands/registry/__init__.py +14 -0
  73. nat/cli/commands/registry/publish.py +88 -0
  74. nat/cli/commands/registry/pull.py +118 -0
  75. nat/cli/commands/registry/registry.py +36 -0
  76. nat/cli/commands/registry/remove.py +108 -0
  77. nat/cli/commands/registry/search.py +155 -0
  78. nat/cli/commands/sizing/__init__.py +14 -0
  79. nat/cli/commands/sizing/calc.py +297 -0
  80. nat/cli/commands/sizing/sizing.py +27 -0
  81. nat/cli/commands/start.py +246 -0
  82. nat/cli/commands/uninstall.py +81 -0
  83. nat/cli/commands/validate.py +47 -0
  84. nat/cli/commands/workflow/__init__.py +14 -0
  85. nat/cli/commands/workflow/templates/__init__.py.j2 +0 -0
  86. nat/cli/commands/workflow/templates/config.yml.j2 +16 -0
  87. nat/cli/commands/workflow/templates/pyproject.toml.j2 +22 -0
  88. nat/cli/commands/workflow/templates/register.py.j2 +5 -0
  89. nat/cli/commands/workflow/templates/workflow.py.j2 +36 -0
  90. nat/cli/commands/workflow/workflow.py +37 -0
  91. nat/cli/commands/workflow/workflow_commands.py +317 -0
  92. nat/cli/entrypoint.py +135 -0
  93. nat/cli/main.py +57 -0
  94. nat/cli/register_workflow.py +488 -0
  95. nat/cli/type_registry.py +1000 -0
  96. nat/data_models/__init__.py +14 -0
  97. nat/data_models/api_server.py +716 -0
  98. nat/data_models/authentication.py +231 -0
  99. nat/data_models/common.py +171 -0
  100. nat/data_models/component.py +58 -0
  101. nat/data_models/component_ref.py +168 -0
  102. nat/data_models/config.py +410 -0
  103. nat/data_models/dataset_handler.py +169 -0
  104. nat/data_models/discovery_metadata.py +305 -0
  105. nat/data_models/embedder.py +27 -0
  106. nat/data_models/evaluate.py +127 -0
  107. nat/data_models/evaluator.py +26 -0
  108. nat/data_models/front_end.py +26 -0
  109. nat/data_models/function.py +30 -0
  110. nat/data_models/function_dependencies.py +72 -0
  111. nat/data_models/interactive.py +246 -0
  112. nat/data_models/intermediate_step.py +302 -0
  113. nat/data_models/invocation_node.py +38 -0
  114. nat/data_models/llm.py +27 -0
  115. nat/data_models/logging.py +26 -0
  116. nat/data_models/memory.py +27 -0
  117. nat/data_models/object_store.py +44 -0
  118. nat/data_models/profiler.py +54 -0
  119. nat/data_models/registry_handler.py +26 -0
  120. nat/data_models/retriever.py +30 -0
  121. nat/data_models/retry_mixin.py +35 -0
  122. nat/data_models/span.py +190 -0
  123. nat/data_models/step_adaptor.py +64 -0
  124. nat/data_models/streaming.py +33 -0
  125. nat/data_models/swe_bench_model.py +54 -0
  126. nat/data_models/telemetry_exporter.py +26 -0
  127. nat/data_models/ttc_strategy.py +30 -0
  128. nat/embedder/__init__.py +0 -0
  129. nat/embedder/nim_embedder.py +59 -0
  130. nat/embedder/openai_embedder.py +43 -0
  131. nat/embedder/register.py +22 -0
  132. nat/eval/__init__.py +14 -0
  133. nat/eval/config.py +60 -0
  134. nat/eval/dataset_handler/__init__.py +0 -0
  135. nat/eval/dataset_handler/dataset_downloader.py +106 -0
  136. nat/eval/dataset_handler/dataset_filter.py +52 -0
  137. nat/eval/dataset_handler/dataset_handler.py +367 -0
  138. nat/eval/evaluate.py +510 -0
  139. nat/eval/evaluator/__init__.py +14 -0
  140. nat/eval/evaluator/base_evaluator.py +77 -0
  141. nat/eval/evaluator/evaluator_model.py +45 -0
  142. nat/eval/intermediate_step_adapter.py +99 -0
  143. nat/eval/rag_evaluator/__init__.py +0 -0
  144. nat/eval/rag_evaluator/evaluate.py +178 -0
  145. nat/eval/rag_evaluator/register.py +143 -0
  146. nat/eval/register.py +23 -0
  147. nat/eval/remote_workflow.py +133 -0
  148. nat/eval/runners/__init__.py +14 -0
  149. nat/eval/runners/config.py +39 -0
  150. nat/eval/runners/multi_eval_runner.py +54 -0
  151. nat/eval/runtime_event_subscriber.py +52 -0
  152. nat/eval/swe_bench_evaluator/__init__.py +0 -0
  153. nat/eval/swe_bench_evaluator/evaluate.py +215 -0
  154. nat/eval/swe_bench_evaluator/register.py +36 -0
  155. nat/eval/trajectory_evaluator/__init__.py +0 -0
  156. nat/eval/trajectory_evaluator/evaluate.py +75 -0
  157. nat/eval/trajectory_evaluator/register.py +40 -0
  158. nat/eval/tunable_rag_evaluator/__init__.py +0 -0
  159. nat/eval/tunable_rag_evaluator/evaluate.py +245 -0
  160. nat/eval/tunable_rag_evaluator/register.py +52 -0
  161. nat/eval/usage_stats.py +41 -0
  162. nat/eval/utils/__init__.py +0 -0
  163. nat/eval/utils/output_uploader.py +140 -0
  164. nat/eval/utils/tqdm_position_registry.py +40 -0
  165. nat/eval/utils/weave_eval.py +184 -0
  166. nat/experimental/__init__.py +0 -0
  167. nat/experimental/decorators/__init__.py +0 -0
  168. nat/experimental/decorators/experimental_warning_decorator.py +134 -0
  169. nat/experimental/test_time_compute/__init__.py +0 -0
  170. nat/experimental/test_time_compute/editing/__init__.py +0 -0
  171. nat/experimental/test_time_compute/editing/iterative_plan_refinement_editor.py +147 -0
  172. nat/experimental/test_time_compute/editing/llm_as_a_judge_editor.py +204 -0
  173. nat/experimental/test_time_compute/editing/motivation_aware_summarization.py +107 -0
  174. nat/experimental/test_time_compute/functions/__init__.py +0 -0
  175. nat/experimental/test_time_compute/functions/execute_score_select_function.py +105 -0
  176. nat/experimental/test_time_compute/functions/plan_select_execute_function.py +224 -0
  177. nat/experimental/test_time_compute/functions/ttc_tool_orchestration_function.py +205 -0
  178. nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +146 -0
  179. nat/experimental/test_time_compute/models/__init__.py +0 -0
  180. nat/experimental/test_time_compute/models/editor_config.py +132 -0
  181. nat/experimental/test_time_compute/models/scoring_config.py +112 -0
  182. nat/experimental/test_time_compute/models/search_config.py +120 -0
  183. nat/experimental/test_time_compute/models/selection_config.py +154 -0
  184. nat/experimental/test_time_compute/models/stage_enums.py +43 -0
  185. nat/experimental/test_time_compute/models/strategy_base.py +66 -0
  186. nat/experimental/test_time_compute/models/tool_use_config.py +41 -0
  187. nat/experimental/test_time_compute/models/ttc_item.py +48 -0
  188. nat/experimental/test_time_compute/register.py +36 -0
  189. nat/experimental/test_time_compute/scoring/__init__.py +0 -0
  190. nat/experimental/test_time_compute/scoring/llm_based_agent_scorer.py +168 -0
  191. nat/experimental/test_time_compute/scoring/llm_based_plan_scorer.py +168 -0
  192. nat/experimental/test_time_compute/scoring/motivation_aware_scorer.py +111 -0
  193. nat/experimental/test_time_compute/search/__init__.py +0 -0
  194. nat/experimental/test_time_compute/search/multi_llm_planner.py +128 -0
  195. nat/experimental/test_time_compute/search/multi_query_retrieval_search.py +122 -0
  196. nat/experimental/test_time_compute/search/single_shot_multi_plan_planner.py +128 -0
  197. nat/experimental/test_time_compute/selection/__init__.py +0 -0
  198. nat/experimental/test_time_compute/selection/best_of_n_selector.py +63 -0
  199. nat/experimental/test_time_compute/selection/llm_based_agent_output_selector.py +131 -0
  200. nat/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +159 -0
  201. nat/experimental/test_time_compute/selection/llm_based_plan_selector.py +128 -0
  202. nat/experimental/test_time_compute/selection/threshold_selector.py +58 -0
  203. nat/front_ends/__init__.py +14 -0
  204. nat/front_ends/console/__init__.py +14 -0
  205. nat/front_ends/console/authentication_flow_handler.py +233 -0
  206. nat/front_ends/console/console_front_end_config.py +32 -0
  207. nat/front_ends/console/console_front_end_plugin.py +96 -0
  208. nat/front_ends/console/register.py +25 -0
  209. nat/front_ends/cron/__init__.py +14 -0
  210. nat/front_ends/fastapi/__init__.py +14 -0
  211. nat/front_ends/fastapi/auth_flow_handlers/__init__.py +0 -0
  212. nat/front_ends/fastapi/auth_flow_handlers/http_flow_handler.py +27 -0
  213. nat/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +107 -0
  214. nat/front_ends/fastapi/fastapi_front_end_config.py +241 -0
  215. nat/front_ends/fastapi/fastapi_front_end_controller.py +68 -0
  216. nat/front_ends/fastapi/fastapi_front_end_plugin.py +116 -0
  217. nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +1087 -0
  218. nat/front_ends/fastapi/html_snippets/__init__.py +14 -0
  219. nat/front_ends/fastapi/html_snippets/auth_code_grant_success.py +35 -0
  220. nat/front_ends/fastapi/intermediate_steps_subscriber.py +80 -0
  221. nat/front_ends/fastapi/job_store.py +183 -0
  222. nat/front_ends/fastapi/main.py +72 -0
  223. nat/front_ends/fastapi/message_handler.py +320 -0
  224. nat/front_ends/fastapi/message_validator.py +352 -0
  225. nat/front_ends/fastapi/register.py +25 -0
  226. nat/front_ends/fastapi/response_helpers.py +195 -0
  227. nat/front_ends/fastapi/step_adaptor.py +319 -0
  228. nat/front_ends/mcp/__init__.py +14 -0
  229. nat/front_ends/mcp/mcp_front_end_config.py +36 -0
  230. nat/front_ends/mcp/mcp_front_end_plugin.py +81 -0
  231. nat/front_ends/mcp/mcp_front_end_plugin_worker.py +143 -0
  232. nat/front_ends/mcp/register.py +27 -0
  233. nat/front_ends/mcp/tool_converter.py +241 -0
  234. nat/front_ends/register.py +22 -0
  235. nat/front_ends/simple_base/__init__.py +14 -0
  236. nat/front_ends/simple_base/simple_front_end_plugin_base.py +54 -0
  237. nat/llm/__init__.py +0 -0
  238. nat/llm/aws_bedrock_llm.py +57 -0
  239. nat/llm/nim_llm.py +46 -0
  240. nat/llm/openai_llm.py +46 -0
  241. nat/llm/register.py +23 -0
  242. nat/llm/utils/__init__.py +14 -0
  243. nat/llm/utils/env_config_value.py +94 -0
  244. nat/llm/utils/error.py +17 -0
  245. nat/memory/__init__.py +20 -0
  246. nat/memory/interfaces.py +183 -0
  247. nat/memory/models.py +112 -0
  248. nat/meta/pypi.md +58 -0
  249. nat/object_store/__init__.py +20 -0
  250. nat/object_store/in_memory_object_store.py +76 -0
  251. nat/object_store/interfaces.py +84 -0
  252. nat/object_store/models.py +38 -0
  253. nat/object_store/register.py +20 -0
  254. nat/observability/__init__.py +14 -0
  255. nat/observability/exporter/__init__.py +14 -0
  256. nat/observability/exporter/base_exporter.py +449 -0
  257. nat/observability/exporter/exporter.py +78 -0
  258. nat/observability/exporter/file_exporter.py +33 -0
  259. nat/observability/exporter/processing_exporter.py +322 -0
  260. nat/observability/exporter/raw_exporter.py +52 -0
  261. nat/observability/exporter/span_exporter.py +288 -0
  262. nat/observability/exporter_manager.py +335 -0
  263. nat/observability/mixin/__init__.py +14 -0
  264. nat/observability/mixin/batch_config_mixin.py +26 -0
  265. nat/observability/mixin/collector_config_mixin.py +23 -0
  266. nat/observability/mixin/file_mixin.py +288 -0
  267. nat/observability/mixin/file_mode.py +23 -0
  268. nat/observability/mixin/resource_conflict_mixin.py +134 -0
  269. nat/observability/mixin/serialize_mixin.py +61 -0
  270. nat/observability/mixin/type_introspection_mixin.py +183 -0
  271. nat/observability/processor/__init__.py +14 -0
  272. nat/observability/processor/batching_processor.py +310 -0
  273. nat/observability/processor/callback_processor.py +42 -0
  274. nat/observability/processor/intermediate_step_serializer.py +28 -0
  275. nat/observability/processor/processor.py +71 -0
  276. nat/observability/register.py +96 -0
  277. nat/observability/utils/__init__.py +14 -0
  278. nat/observability/utils/dict_utils.py +236 -0
  279. nat/observability/utils/time_utils.py +31 -0
  280. nat/plugins/.namespace +1 -0
  281. nat/profiler/__init__.py +0 -0
  282. nat/profiler/calc/__init__.py +14 -0
  283. nat/profiler/calc/calc_runner.py +627 -0
  284. nat/profiler/calc/calculations.py +288 -0
  285. nat/profiler/calc/data_models.py +188 -0
  286. nat/profiler/calc/plot.py +345 -0
  287. nat/profiler/callbacks/__init__.py +0 -0
  288. nat/profiler/callbacks/agno_callback_handler.py +295 -0
  289. nat/profiler/callbacks/base_callback_class.py +20 -0
  290. nat/profiler/callbacks/langchain_callback_handler.py +290 -0
  291. nat/profiler/callbacks/llama_index_callback_handler.py +205 -0
  292. nat/profiler/callbacks/semantic_kernel_callback_handler.py +238 -0
  293. nat/profiler/callbacks/token_usage_base_model.py +27 -0
  294. nat/profiler/data_frame_row.py +51 -0
  295. nat/profiler/data_models.py +24 -0
  296. nat/profiler/decorators/__init__.py +0 -0
  297. nat/profiler/decorators/framework_wrapper.py +131 -0
  298. nat/profiler/decorators/function_tracking.py +254 -0
  299. nat/profiler/forecasting/__init__.py +0 -0
  300. nat/profiler/forecasting/config.py +18 -0
  301. nat/profiler/forecasting/model_trainer.py +75 -0
  302. nat/profiler/forecasting/models/__init__.py +22 -0
  303. nat/profiler/forecasting/models/forecasting_base_model.py +40 -0
  304. nat/profiler/forecasting/models/linear_model.py +197 -0
  305. nat/profiler/forecasting/models/random_forest_regressor.py +269 -0
  306. nat/profiler/inference_metrics_model.py +28 -0
  307. nat/profiler/inference_optimization/__init__.py +0 -0
  308. nat/profiler/inference_optimization/bottleneck_analysis/__init__.py +0 -0
  309. nat/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +460 -0
  310. nat/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +258 -0
  311. nat/profiler/inference_optimization/data_models.py +386 -0
  312. nat/profiler/inference_optimization/experimental/__init__.py +0 -0
  313. nat/profiler/inference_optimization/experimental/concurrency_spike_analysis.py +468 -0
  314. nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +405 -0
  315. nat/profiler/inference_optimization/llm_metrics.py +212 -0
  316. nat/profiler/inference_optimization/prompt_caching.py +163 -0
  317. nat/profiler/inference_optimization/token_uniqueness.py +107 -0
  318. nat/profiler/inference_optimization/workflow_runtimes.py +72 -0
  319. nat/profiler/intermediate_property_adapter.py +102 -0
  320. nat/profiler/profile_runner.py +473 -0
  321. nat/profiler/utils.py +184 -0
  322. nat/registry_handlers/__init__.py +0 -0
  323. nat/registry_handlers/local/__init__.py +0 -0
  324. nat/registry_handlers/local/local_handler.py +176 -0
  325. nat/registry_handlers/local/register_local.py +37 -0
  326. nat/registry_handlers/metadata_factory.py +60 -0
  327. nat/registry_handlers/package_utils.py +571 -0
  328. nat/registry_handlers/pypi/__init__.py +0 -0
  329. nat/registry_handlers/pypi/pypi_handler.py +251 -0
  330. nat/registry_handlers/pypi/register_pypi.py +40 -0
  331. nat/registry_handlers/register.py +21 -0
  332. nat/registry_handlers/registry_handler_base.py +157 -0
  333. nat/registry_handlers/rest/__init__.py +0 -0
  334. nat/registry_handlers/rest/register_rest.py +56 -0
  335. nat/registry_handlers/rest/rest_handler.py +237 -0
  336. nat/registry_handlers/schemas/__init__.py +0 -0
  337. nat/registry_handlers/schemas/headers.py +42 -0
  338. nat/registry_handlers/schemas/package.py +68 -0
  339. nat/registry_handlers/schemas/publish.py +68 -0
  340. nat/registry_handlers/schemas/pull.py +82 -0
  341. nat/registry_handlers/schemas/remove.py +36 -0
  342. nat/registry_handlers/schemas/search.py +91 -0
  343. nat/registry_handlers/schemas/status.py +47 -0
  344. nat/retriever/__init__.py +0 -0
  345. nat/retriever/interface.py +41 -0
  346. nat/retriever/milvus/__init__.py +14 -0
  347. nat/retriever/milvus/register.py +81 -0
  348. nat/retriever/milvus/retriever.py +228 -0
  349. nat/retriever/models.py +77 -0
  350. nat/retriever/nemo_retriever/__init__.py +14 -0
  351. nat/retriever/nemo_retriever/register.py +60 -0
  352. nat/retriever/nemo_retriever/retriever.py +190 -0
  353. nat/retriever/register.py +22 -0
  354. nat/runtime/__init__.py +14 -0
  355. nat/runtime/loader.py +220 -0
  356. nat/runtime/runner.py +195 -0
  357. nat/runtime/session.py +162 -0
  358. nat/runtime/user_metadata.py +130 -0
  359. nat/settings/__init__.py +0 -0
  360. nat/settings/global_settings.py +318 -0
  361. nat/test/.namespace +1 -0
  362. nat/tool/__init__.py +0 -0
  363. nat/tool/chat_completion.py +74 -0
  364. nat/tool/code_execution/README.md +151 -0
  365. nat/tool/code_execution/__init__.py +0 -0
  366. nat/tool/code_execution/code_sandbox.py +267 -0
  367. nat/tool/code_execution/local_sandbox/.gitignore +1 -0
  368. nat/tool/code_execution/local_sandbox/Dockerfile.sandbox +60 -0
  369. nat/tool/code_execution/local_sandbox/__init__.py +13 -0
  370. nat/tool/code_execution/local_sandbox/local_sandbox_server.py +198 -0
  371. nat/tool/code_execution/local_sandbox/sandbox.requirements.txt +6 -0
  372. nat/tool/code_execution/local_sandbox/start_local_sandbox.sh +50 -0
  373. nat/tool/code_execution/register.py +74 -0
  374. nat/tool/code_execution/test_code_execution_sandbox.py +414 -0
  375. nat/tool/code_execution/utils.py +100 -0
  376. nat/tool/datetime_tools.py +42 -0
  377. nat/tool/document_search.py +141 -0
  378. nat/tool/github_tools/__init__.py +0 -0
  379. nat/tool/github_tools/create_github_commit.py +133 -0
  380. nat/tool/github_tools/create_github_issue.py +87 -0
  381. nat/tool/github_tools/create_github_pr.py +106 -0
  382. nat/tool/github_tools/get_github_file.py +106 -0
  383. nat/tool/github_tools/get_github_issue.py +166 -0
  384. nat/tool/github_tools/get_github_pr.py +256 -0
  385. nat/tool/github_tools/update_github_issue.py +100 -0
  386. nat/tool/mcp/__init__.py +14 -0
  387. nat/tool/mcp/exceptions.py +142 -0
  388. nat/tool/mcp/mcp_client.py +255 -0
  389. nat/tool/mcp/mcp_tool.py +96 -0
  390. nat/tool/memory_tools/__init__.py +0 -0
  391. nat/tool/memory_tools/add_memory_tool.py +79 -0
  392. nat/tool/memory_tools/delete_memory_tool.py +67 -0
  393. nat/tool/memory_tools/get_memory_tool.py +72 -0
  394. nat/tool/nvidia_rag.py +95 -0
  395. nat/tool/register.py +38 -0
  396. nat/tool/retriever.py +94 -0
  397. nat/tool/server_tools.py +66 -0
  398. nat/utils/__init__.py +0 -0
  399. nat/utils/data_models/__init__.py +0 -0
  400. nat/utils/data_models/schema_validator.py +58 -0
  401. nat/utils/debugging_utils.py +43 -0
  402. nat/utils/dump_distro_mapping.py +32 -0
  403. nat/utils/exception_handlers/__init__.py +0 -0
  404. nat/utils/exception_handlers/automatic_retries.py +289 -0
  405. nat/utils/exception_handlers/mcp.py +211 -0
  406. nat/utils/exception_handlers/schemas.py +114 -0
  407. nat/utils/io/__init__.py +0 -0
  408. nat/utils/io/model_processing.py +28 -0
  409. nat/utils/io/yaml_tools.py +119 -0
  410. nat/utils/log_utils.py +37 -0
  411. nat/utils/metadata_utils.py +74 -0
  412. nat/utils/optional_imports.py +142 -0
  413. nat/utils/producer_consumer_queue.py +178 -0
  414. nat/utils/reactive/__init__.py +0 -0
  415. nat/utils/reactive/base/__init__.py +0 -0
  416. nat/utils/reactive/base/observable_base.py +65 -0
  417. nat/utils/reactive/base/observer_base.py +55 -0
  418. nat/utils/reactive/base/subject_base.py +79 -0
  419. nat/utils/reactive/observable.py +59 -0
  420. nat/utils/reactive/observer.py +76 -0
  421. nat/utils/reactive/subject.py +131 -0
  422. nat/utils/reactive/subscription.py +49 -0
  423. nat/utils/settings/__init__.py +0 -0
  424. nat/utils/settings/global_settings.py +197 -0
  425. nat/utils/string_utils.py +38 -0
  426. nat/utils/type_converter.py +290 -0
  427. nat/utils/type_utils.py +484 -0
  428. nat/utils/url_utils.py +27 -0
  429. nvidia_nat-1.2.0.dist-info/METADATA +365 -0
  430. nvidia_nat-1.2.0.dist-info/RECORD +435 -0
  431. nvidia_nat-1.2.0.dist-info/WHEEL +5 -0
  432. nvidia_nat-1.2.0.dist-info/entry_points.txt +21 -0
  433. nvidia_nat-1.2.0.dist-info/licenses/LICENSE-3rd-party.txt +5478 -0
  434. nvidia_nat-1.2.0.dist-info/licenses/LICENSE.md +201 -0
  435. nvidia_nat-1.2.0.dist-info/top_level.txt +2 -0
@@ -0,0 +1,322 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import asyncio
17
+ import logging
18
+ from abc import abstractmethod
19
+ from collections.abc import Coroutine
20
+ from typing import Any
21
+ from typing import Generic
22
+ from typing import TypeVar
23
+
24
+ from nat.builder.context import ContextState
25
+ from nat.data_models.intermediate_step import IntermediateStep
26
+ from nat.observability.exporter.base_exporter import BaseExporter
27
+ from nat.observability.mixin.type_introspection_mixin import TypeIntrospectionMixin
28
+ from nat.observability.processor.callback_processor import CallbackProcessor
29
+ from nat.observability.processor.processor import Processor
30
+ from nat.utils.type_utils import DecomposedType
31
+ from nat.utils.type_utils import override
32
+
33
+ PipelineInputT = TypeVar("PipelineInputT")
34
+ PipelineOutputT = TypeVar("PipelineOutputT")
35
+
36
+ logger = logging.getLogger(__name__)
37
+
38
+
39
+ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter, TypeIntrospectionMixin):
40
+ """A base class for telemetry exporters with processing pipeline support.
41
+
42
+ This class extends BaseExporter to add processor pipeline functionality.
43
+ It manages a chain of processors that can transform items before export.
44
+
45
+ The generic types work as follows:
46
+ - PipelineInputT: The type of items that enter the processing pipeline (e.g., Span)
47
+ - PipelineOutputT: The type of items after processing through the pipeline (e.g., converted format)
48
+
49
+ Key Features:
50
+ - Processor pipeline management (add, remove, clear)
51
+ - Type compatibility validation between processors
52
+ - Pipeline processing with error handling
53
+ - Automatic type validation before export
54
+ """
55
+
56
+ def __init__(self, context_state: ContextState | None = None):
57
+ """Initialize the processing exporter.
58
+
59
+ Args:
60
+ context_state: The context state to use for the exporter.
61
+ """
62
+ super().__init__(context_state)
63
+ self._processors: list[Processor] = [] # List of processors that implement process(item) -> item
64
+
65
+ def add_processor(self, processor: Processor) -> None:
66
+ """Add a processor to the processing pipeline.
67
+
68
+ Processors are executed in the order they are added.
69
+ Processors can transform between any types (T -> U).
70
+
71
+ Args:
72
+ processor: The processor to add to the pipeline
73
+ """
74
+
75
+ # Check if the processor is compatible with the last processor in the pipeline
76
+ if len(self._processors) > 0:
77
+ try:
78
+ if not issubclass(processor.input_class, self._processors[-1].output_class):
79
+ raise ValueError(f"Processor {processor.__class__.__name__} input type {processor.input_type} "
80
+ f"is not compatible with the {self._processors[-1].__class__.__name__} "
81
+ f"output type {self._processors[-1].output_type}")
82
+ except TypeError:
83
+ # Handle cases where input_class or output_class are generic types that can't be used with issubclass
84
+ # Fall back to type comparison for generic types
85
+ logger.warning(
86
+ "Cannot use issubclass() for type compatibility check between "
87
+ "%s (%s) and %s (%s). Skipping compatibility check.",
88
+ processor.__class__.__name__,
89
+ processor.input_type,
90
+ self._processors[-1].__class__.__name__,
91
+ self._processors[-1].output_type)
92
+ self._processors.append(processor)
93
+
94
+ # Set up pipeline continuation callback for processors that support it
95
+ if isinstance(processor, CallbackProcessor):
96
+ # Create a callback that continues processing through the rest of the pipeline
97
+ async def pipeline_callback(item):
98
+ await self._continue_pipeline_after(processor, item)
99
+
100
+ processor.set_done_callback(pipeline_callback)
101
+
102
+ def remove_processor(self, processor: Processor) -> None:
103
+ """Remove a processor from the processing pipeline.
104
+
105
+ Args:
106
+ processor: The processor to remove from the pipeline
107
+ """
108
+ if processor in self._processors:
109
+ self._processors.remove(processor)
110
+
111
+ def clear_processors(self) -> None:
112
+ """Clear all processors from the pipeline."""
113
+ self._processors.clear()
114
+
115
+ async def _pre_start(self) -> None:
116
+ if len(self._processors) > 0:
117
+ first_processor = self._processors[0]
118
+ last_processor = self._processors[-1]
119
+
120
+ # validate that the first processor's input type is compatible with the exporter's input type
121
+ try:
122
+ if not issubclass(first_processor.input_class, self.input_class):
123
+ raise ValueError(f"Processor {first_processor.__class__.__name__} input type "
124
+ f"{first_processor.input_type} is not compatible with the "
125
+ f"{self.input_type} input type")
126
+ except TypeError as e:
127
+ # Handle cases where classes are generic types that can't be used with issubclass
128
+ logger.warning(
129
+ "Cannot validate type compatibility between %s (%s) "
130
+ "and exporter (%s): %s. Skipping validation.",
131
+ first_processor.__class__.__name__,
132
+ first_processor.input_type,
133
+ self.input_type,
134
+ e)
135
+
136
+ # Validate that the last processor's output type is compatible with the exporter's output type
137
+ try:
138
+ if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
139
+ raise ValueError(f"Processor {last_processor.__class__.__name__} output type "
140
+ f"{last_processor.output_type} is not compatible with the "
141
+ f"{self.output_type} output type")
142
+ except TypeError as e:
143
+ # Handle cases where classes are generic types that can't be used with issubclass
144
+ logger.warning(
145
+ "Cannot validate type compatibility between %s (%s) "
146
+ "and exporter (%s): %s. Skipping validation.",
147
+ last_processor.__class__.__name__,
148
+ last_processor.output_type,
149
+ self.output_type,
150
+ e)
151
+
152
+ async def _process_pipeline(self, item: PipelineInputT) -> PipelineOutputT:
153
+ """Process item through all registered processors.
154
+
155
+ Args:
156
+ item (PipelineInputT): The item to process (starts as PipelineInputT, can transform to PipelineOutputT)
157
+
158
+ Returns:
159
+ PipelineOutputT: The processed item after running through all processors
160
+ """
161
+ return await self._process_through_processors(self._processors, item) # type: ignore
162
+
163
+ async def _process_through_processors(self, processors: list[Processor], item: Any) -> Any:
164
+ """Process an item through a list of processors.
165
+
166
+ Args:
167
+ processors (list[Processor]): List of processors to run the item through
168
+ item (Any): The item to process
169
+
170
+ Returns:
171
+ The processed item after running through all processors
172
+ """
173
+ processed_item = item
174
+ for processor in processors:
175
+ try:
176
+ processed_item = await processor.process(processed_item)
177
+ except Exception as e:
178
+ logger.error("Error in processor %s: %s", processor.__class__.__name__, e, exc_info=True)
179
+ # Continue with unprocessed item rather than failing
180
+ return processed_item
181
+
182
+ async def _export_final_item(self, processed_item: Any, raise_on_invalid: bool = False) -> None:
183
+ """Export a processed item with proper type handling.
184
+
185
+ Args:
186
+ processed_item (Any): The item to export
187
+ raise_on_invalid (bool): If True, raise ValueError for invalid types instead of logging warning
188
+ """
189
+ if isinstance(processed_item, list):
190
+ if len(processed_item) > 0:
191
+ await self.export_processed(processed_item)
192
+ else:
193
+ logger.debug("Skipping export of empty batch")
194
+ elif isinstance(processed_item, self.output_class):
195
+ await self.export_processed(processed_item)
196
+ else:
197
+ if raise_on_invalid:
198
+ raise ValueError(f"Processed item {processed_item} is not a valid output type. "
199
+ f"Expected {self.output_class} or list[{self.output_class}]")
200
+ logger.warning("Processed item %s is not a valid output type for export", processed_item)
201
+
202
+ async def _continue_pipeline_after(self, source_processor: Processor, item: Any) -> None:
203
+ """Continue processing an item through the pipeline after a specific processor.
204
+
205
+ This is used when processors (like BatchingProcessor) need to inject items
206
+ back into the pipeline flow to continue through downstream processors.
207
+
208
+ Args:
209
+ source_processor (Processor): The processor that generated the item
210
+ item (Any): The item to continue processing through the remaining pipeline
211
+ """
212
+ try:
213
+ # Find the source processor's position
214
+ try:
215
+ source_index = self._processors.index(source_processor)
216
+ except ValueError:
217
+ logger.error("Source processor %s not found in pipeline", source_processor.__class__.__name__)
218
+ return
219
+
220
+ # Process through remaining processors (skip the source processor)
221
+ remaining_processors = self._processors[source_index + 1:]
222
+ processed_item = await self._process_through_processors(remaining_processors, item)
223
+
224
+ # Export the final result
225
+ await self._export_final_item(processed_item)
226
+
227
+ except Exception as e:
228
+ logger.error("Failed to continue pipeline processing after %s: %s",
229
+ source_processor.__class__.__name__,
230
+ e,
231
+ exc_info=True)
232
+
233
+ async def _export_with_processing(self, item: PipelineInputT) -> None:
234
+ """Export an item after processing it through the pipeline.
235
+
236
+ Args:
237
+ item: The item to export
238
+ """
239
+ try:
240
+ # Then, run through the processor pipeline
241
+ final_item: PipelineOutputT = await self._process_pipeline(item)
242
+
243
+ # Handle different output types from batch processors
244
+ if isinstance(final_item, list) and len(final_item) == 0:
245
+ logger.debug("Skipping export of empty batch from processor pipeline")
246
+ return
247
+
248
+ await self._export_final_item(final_item, raise_on_invalid=True)
249
+
250
+ except Exception as e:
251
+ logger.error("Failed to export item '%s': %s", item, e, exc_info=True)
252
+ raise
253
+
254
+ @override
255
+ def export(self, event: IntermediateStep) -> None:
256
+ """Export an IntermediateStep event through the processing pipeline.
257
+
258
+ This method converts the IntermediateStep to the expected PipelineInputT type,
259
+ processes it through the pipeline, and exports the result.
260
+
261
+ Args:
262
+ event (IntermediateStep): The event to be exported.
263
+ """
264
+ # Convert IntermediateStep to PipelineInputT and create export task
265
+ if isinstance(event, self.input_class):
266
+ input_item: PipelineInputT = event # type: ignore
267
+ coro = self._export_with_processing(input_item)
268
+ self._create_export_task(coro)
269
+ else:
270
+ logger.warning("Event %s is not compatible with input type %s", event, self.input_type)
271
+
272
+ @abstractmethod
273
+ async def export_processed(self, item: PipelineOutputT | list[PipelineOutputT]) -> None:
274
+ """Export the processed item.
275
+
276
+ This method must be implemented by concrete exporters to handle
277
+ the actual export logic after the item has been processed through the pipeline.
278
+
279
+ Args:
280
+ item: The processed item to export (PipelineOutputT type)
281
+ """
282
+ pass
283
+
284
+ def _create_export_task(self, coro: Coroutine):
285
+ """Create task with minimal overhead but proper tracking."""
286
+ if not self._running:
287
+ logger.warning("%s: Attempted to create export task while not running", self.name)
288
+ return
289
+
290
+ try:
291
+ task = asyncio.create_task(coro)
292
+ self._tasks.add(task)
293
+ task.add_done_callback(self._tasks.discard)
294
+
295
+ except Exception as e:
296
+ logger.error("%s: Failed to create task: %s", self.name, e, exc_info=True)
297
+ raise
298
+
299
+ @override
300
+ async def _cleanup(self):
301
+ """Enhanced cleanup that shuts down all shutdown-aware processors.
302
+
303
+ Each processor is responsible for its own cleanup, including routing
304
+ any final batches through the remaining pipeline via their done callbacks.
305
+ """
306
+ # Shutdown all processors that support it
307
+ shutdown_tasks = []
308
+ for processor in getattr(self, '_processors', []):
309
+ shutdown_method = getattr(processor, 'shutdown', None)
310
+ if shutdown_method:
311
+ logger.debug("Shutting down processor: %s", processor.__class__.__name__)
312
+ shutdown_tasks.append(shutdown_method())
313
+
314
+ if shutdown_tasks:
315
+ try:
316
+ await asyncio.gather(*shutdown_tasks, return_exceptions=True)
317
+ logger.debug("Successfully shut down %d processors", len(shutdown_tasks))
318
+ except Exception as e:
319
+ logger.error("Error shutting down processors: %s", e, exc_info=True)
320
+
321
+ # Call parent cleanup
322
+ await super()._cleanup()
@@ -0,0 +1,52 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import logging
17
+ from abc import abstractmethod
18
+ from typing import TypeVar
19
+
20
+ from nat.data_models.intermediate_step import IntermediateStep
21
+ from nat.observability.exporter.processing_exporter import ProcessingExporter
22
+ from nat.utils.type_utils import override
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ InputT = TypeVar("InputT")
27
+ OutputT = TypeVar("OutputT")
28
+
29
+
30
+ class RawExporter(ProcessingExporter[InputT, OutputT]):
31
+ """A base class for exporting raw intermediate steps.
32
+
33
+ This class provides a base implementation for telemetry exporters that
34
+ work directly with IntermediateStep objects. It can optionally process
35
+ them through a pipeline before export.
36
+
37
+ The flow is: IntermediateStep -> [Processing Pipeline] -> OutputT -> Export
38
+
39
+ Args:
40
+ context_state (ContextState, optional): The context state to use for the exporter. Defaults to None.
41
+ """
42
+
43
+ @abstractmethod
44
+ async def export_processed(self, item: OutputT):
45
+ pass
46
+
47
+ @override
48
+ def export(self, event: IntermediateStep):
49
+ if not isinstance(event, IntermediateStep):
50
+ return
51
+
52
+ self._create_export_task(self._export_with_processing(event)) # type: ignore
@@ -0,0 +1,288 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import logging
17
+ import os
18
+ import re
19
+ import typing
20
+ from abc import abstractmethod
21
+ from typing import TypeVar
22
+
23
+ from nat.data_models.intermediate_step import IntermediateStep
24
+ from nat.data_models.intermediate_step import IntermediateStepState
25
+ from nat.data_models.intermediate_step import TraceMetadata
26
+ from nat.data_models.span import MimeTypes
27
+ from nat.data_models.span import Span
28
+ from nat.data_models.span import SpanAttributes
29
+ from nat.data_models.span import SpanContext
30
+ from nat.data_models.span import event_type_to_span_kind
31
+ from nat.observability.exporter.base_exporter import IsolatedAttribute
32
+ from nat.observability.exporter.processing_exporter import ProcessingExporter
33
+ from nat.observability.mixin.serialize_mixin import SerializeMixin
34
+ from nat.observability.utils.dict_utils import merge_dicts
35
+ from nat.observability.utils.time_utils import ns_timestamp
36
+ from nat.utils.type_utils import override
37
+
38
+ if typing.TYPE_CHECKING:
39
+ from nat.builder.context import ContextState
40
+
41
+ logger = logging.getLogger(__name__)
42
+
43
+ InputSpanT = TypeVar("InputSpanT")
44
+ OutputSpanT = TypeVar("OutputSpanT")
45
+
46
+
47
+ class SpanExporter(ProcessingExporter[InputSpanT, OutputSpanT], SerializeMixin):
48
+ """Abstract base class for span exporters with processing pipeline support.
49
+
50
+ This class specializes ProcessingExporter for span-based telemetry export. It converts
51
+ IntermediateStep events into Span objects and supports processing pipelines for
52
+ span transformation before export.
53
+
54
+ The generic types work as follows:
55
+ - InputSpanT: The type of spans that enter the processing pipeline (typically Span)
56
+ - OutputSpanT: The type of spans after processing through the pipeline (e.g., OtelSpan)
57
+
58
+ Key Features:
59
+ - Automatic span creation from IntermediateStep events
60
+ - Span lifecycle management (start/end event tracking)
61
+ - Processing pipeline support via ProcessingExporter
62
+ - Metadata and attribute handling
63
+ - Usage information tracking
64
+ - Automatic isolation of mutable state for concurrent execution using descriptors
65
+
66
+ Inheritance Hierarchy:
67
+ - BaseExporter: Core event subscription and lifecycle management + DescriptorIsolationMixin
68
+ - ProcessingExporter: Adds processor pipeline functionality
69
+ - SpanExporter: Specializes for span creation and export
70
+
71
+ Event Processing Flow:
72
+ 1. IntermediateStep (START) → Create Span → Add to tracking
73
+ 2. IntermediateStep (END) → Complete Span → Process through pipeline → Export
74
+
75
+ Parameters
76
+ ----------
77
+ context_state: `ContextState`, optional
78
+ The context state to use for the exporter. Defaults to None.
79
+ span_prefix: `str`, optional
80
+ The prefix name to use for span attributes. If `None` the value of the `NAT_SPAN_PREFIX` environment
81
+ variable is used. Defaults to `"nat"` if neither are defined.
82
+ """
83
+
84
+ # Use descriptors for automatic isolation of span-specific state
85
+ _outstanding_spans: IsolatedAttribute[dict] = IsolatedAttribute(dict)
86
+ _span_stack: IsolatedAttribute[dict] = IsolatedAttribute(dict)
87
+ _metadata_stack: IsolatedAttribute[dict] = IsolatedAttribute(dict)
88
+
89
+ def __init__(self, context_state: "ContextState | None" = None, span_prefix: str | None = None):
90
+ super().__init__(context_state=context_state)
91
+ if span_prefix is None:
92
+ span_prefix = os.getenv("NAT_SPAN_PREFIX", "nat").strip() or "nat"
93
+
94
+ self._span_prefix = span_prefix
95
+
96
+ @abstractmethod
97
+ async def export_processed(self, item: OutputSpanT) -> None:
98
+ """Export the processed span.
99
+
100
+ Args:
101
+ item (OutputSpanT): The processed span to export.
102
+ """
103
+ pass
104
+
105
+ @override
106
+ def export(self, event: IntermediateStep) -> None:
107
+ """The main logic that reacts to each IntermediateStep.
108
+
109
+ Args:
110
+ event (IntermediateStep): The event to process.
111
+ """
112
+ if not isinstance(event, IntermediateStep):
113
+ return
114
+
115
+ if (event.event_state == IntermediateStepState.START):
116
+ self._process_start_event(event)
117
+ elif (event.event_state == IntermediateStepState.END):
118
+ self._process_end_event(event)
119
+
120
+ def _process_start_event(self, event: IntermediateStep):
121
+ """Process the start event of an intermediate step.
122
+
123
+ Args:
124
+ event (IntermediateStep): The event to process.
125
+ """
126
+
127
+ parent_span = None
128
+ span_ctx = None
129
+
130
+ # Look up the parent span to establish hierarchy
131
+ # event.parent_id is the UUID of the last START step with a different UUID from current step
132
+ # This maintains proper parent-child relationships in the span tree
133
+ # Skip lookup if parent_id is "root" (indicates this is a top-level span)
134
+ if len(self._span_stack) > 0 and event.parent_id and event.parent_id != "root":
135
+
136
+ parent_span = self._span_stack.get(event.parent_id, None)
137
+ if parent_span is None:
138
+ logger.warning("No parent span found for step %s", event.UUID)
139
+ return
140
+
141
+ parent_span = parent_span.model_copy() if isinstance(parent_span, Span) else None
142
+ if parent_span and parent_span.context:
143
+ span_ctx = SpanContext(trace_id=parent_span.context.trace_id)
144
+
145
+ # Extract start/end times from the step
146
+ # By convention, `span_event_timestamp` is the time we started, `event_timestamp` is the time we ended.
147
+ # If span_event_timestamp is missing, we default to event_timestamp (meaning zero-length).
148
+ s_ts = event.payload.span_event_timestamp or event.payload.event_timestamp
149
+ start_ns = ns_timestamp(s_ts)
150
+
151
+ # Optional: embed the LLM/tool name if present
152
+ if event.payload.name:
153
+ sub_span_name = f"{event.payload.name}"
154
+ else:
155
+ sub_span_name = f"{event.payload.event_type}"
156
+
157
+ sub_span = Span(name=sub_span_name,
158
+ parent=parent_span,
159
+ context=span_ctx,
160
+ attributes={
161
+ f"{self._span_prefix}.event_type":
162
+ event.payload.event_type.value,
163
+ f"{self._span_prefix}.function.id":
164
+ event.function_ancestry.function_id if event.function_ancestry else "unknown",
165
+ f"{self._span_prefix}.function.name":
166
+ event.function_ancestry.function_name if event.function_ancestry else "unknown",
167
+ f"{self._span_prefix}.subspan.name":
168
+ event.payload.name or "",
169
+ f"{self._span_prefix}.event_timestamp":
170
+ event.event_timestamp,
171
+ f"{self._span_prefix}.framework":
172
+ event.payload.framework.value if event.payload.framework else "unknown",
173
+ },
174
+ start_time=start_ns)
175
+
176
+ span_kind = event_type_to_span_kind(event.event_type)
177
+ sub_span.set_attribute(f"{self._span_prefix}.span.kind", span_kind.value)
178
+
179
+ if event.payload.data and event.payload.data.input:
180
+ match = re.search(r"Human:\s*Question:\s*(.*)", str(event.payload.data.input))
181
+ if match:
182
+ human_question = match.group(1).strip()
183
+ sub_span.set_attribute(SpanAttributes.INPUT_VALUE.value, human_question)
184
+ else:
185
+ serialized_input, is_json = self._serialize_payload(event.payload.data.input)
186
+ sub_span.set_attribute(SpanAttributes.INPUT_VALUE.value, serialized_input)
187
+ sub_span.set_attribute(SpanAttributes.INPUT_MIME_TYPE.value,
188
+ MimeTypes.JSON.value if is_json else MimeTypes.TEXT.value)
189
+
190
+ # Add metadata to the metadata stack
191
+ start_metadata = event.payload.metadata or {}
192
+
193
+ if isinstance(start_metadata, dict):
194
+ self._metadata_stack[event.UUID] = start_metadata # type: ignore
195
+ elif isinstance(start_metadata, TraceMetadata):
196
+ self._metadata_stack[event.UUID] = start_metadata.model_dump() # type: ignore
197
+ else:
198
+ logger.warning("Invalid metadata type for step %s", event.UUID)
199
+ return
200
+
201
+ self._span_stack[event.UUID] = sub_span # type: ignore
202
+ self._outstanding_spans[event.UUID] = sub_span # type: ignore
203
+
204
+ logger.debug(
205
+ "Added span to tracking (outstanding: %d, stack: %d, event_id: %s)",
206
+ len(self._outstanding_spans), # type: ignore
207
+ len(self._span_stack), # type: ignore
208
+ event.UUID)
209
+
210
+ def _process_end_event(self, event: IntermediateStep):
211
+ """Process the end event of an intermediate step.
212
+
213
+ Args:
214
+ event (IntermediateStep): The event to process.
215
+ """
216
+
217
+ # Find the subspan that was created in the start event
218
+ sub_span: Span | None = self._outstanding_spans.pop(event.UUID, None) # type: ignore
219
+
220
+ if sub_span is None:
221
+ logger.warning("No subspan found for step %s", event.UUID)
222
+ return
223
+
224
+ self._span_stack.pop(event.UUID, None) # type: ignore
225
+
226
+ # Optionally add more attributes from usage_info or data
227
+ usage_info = event.payload.usage_info
228
+ if usage_info:
229
+ sub_span.set_attribute(SpanAttributes.NAT_USAGE_NUM_LLM_CALLS.value,
230
+ usage_info.num_llm_calls if usage_info.num_llm_calls else 0)
231
+ sub_span.set_attribute(SpanAttributes.NAT_USAGE_SECONDS_BETWEEN_CALLS.value,
232
+ usage_info.seconds_between_calls if usage_info.seconds_between_calls else 0)
233
+ sub_span.set_attribute(SpanAttributes.LLM_TOKEN_COUNT_PROMPT.value,
234
+ usage_info.token_usage.prompt_tokens if usage_info.token_usage else 0)
235
+ sub_span.set_attribute(SpanAttributes.LLM_TOKEN_COUNT_COMPLETION.value,
236
+ usage_info.token_usage.completion_tokens if usage_info.token_usage else 0)
237
+ sub_span.set_attribute(SpanAttributes.LLM_TOKEN_COUNT_TOTAL.value,
238
+ usage_info.token_usage.total_tokens if usage_info.token_usage else 0)
239
+
240
+ if event.payload.data and event.payload.data.output is not None:
241
+ serialized_output, is_json = self._serialize_payload(event.payload.data.output)
242
+ sub_span.set_attribute(SpanAttributes.OUTPUT_VALUE.value, serialized_output)
243
+ sub_span.set_attribute(SpanAttributes.OUTPUT_MIME_TYPE.value,
244
+ MimeTypes.JSON.value if is_json else MimeTypes.TEXT.value)
245
+
246
+ # Merge metadata from start event with end event metadata
247
+ start_metadata = self._metadata_stack.pop(event.UUID) # type: ignore
248
+
249
+ if start_metadata is None:
250
+ logger.warning("No metadata found for step %s", event.UUID)
251
+ return
252
+
253
+ end_metadata = event.payload.metadata or {}
254
+
255
+ if not isinstance(end_metadata, (dict, TraceMetadata)):
256
+ logger.warning("Invalid metadata type for step %s", event.UUID)
257
+ return
258
+
259
+ if isinstance(end_metadata, TraceMetadata):
260
+ end_metadata = end_metadata.model_dump()
261
+
262
+ merged_metadata = merge_dicts(start_metadata, end_metadata)
263
+ serialized_metadata, is_json = self._serialize_payload(merged_metadata)
264
+ sub_span.set_attribute(f"{self._span_prefix}.metadata", serialized_metadata)
265
+ sub_span.set_attribute(f"{self._span_prefix}.metadata.mime_type",
266
+ MimeTypes.JSON.value if is_json else MimeTypes.TEXT.value)
267
+
268
+ end_ns = ns_timestamp(event.payload.event_timestamp)
269
+
270
+ # End the subspan
271
+ sub_span.end(end_time=end_ns)
272
+
273
+ # Export the span with processing pipeline
274
+ self._create_export_task(self._export_with_processing(sub_span)) # type: ignore
275
+
276
+ @override
277
+ async def _cleanup(self):
278
+ """Clean up any remaining spans."""
279
+ if self._outstanding_spans: # type: ignore
280
+ logger.warning("Not all spans were closed. Remaining: %s", self._outstanding_spans) # type: ignore
281
+
282
+ for span_info in self._outstanding_spans.values(): # type: ignore
283
+ span_info.end()
284
+
285
+ self._outstanding_spans.clear() # type: ignore
286
+ self._span_stack.clear() # type: ignore
287
+ self._metadata_stack.clear() # type: ignore
288
+ await super()._cleanup()