nvidia-nat 1.2.0rc5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (435) hide show
  1. aiq/agent/__init__.py +0 -0
  2. aiq/agent/base.py +239 -0
  3. aiq/agent/dual_node.py +67 -0
  4. aiq/agent/react_agent/__init__.py +0 -0
  5. aiq/agent/react_agent/agent.py +355 -0
  6. aiq/agent/react_agent/output_parser.py +104 -0
  7. aiq/agent/react_agent/prompt.py +41 -0
  8. aiq/agent/react_agent/register.py +149 -0
  9. aiq/agent/reasoning_agent/__init__.py +0 -0
  10. aiq/agent/reasoning_agent/reasoning_agent.py +225 -0
  11. aiq/agent/register.py +23 -0
  12. aiq/agent/rewoo_agent/__init__.py +0 -0
  13. aiq/agent/rewoo_agent/agent.py +411 -0
  14. aiq/agent/rewoo_agent/prompt.py +108 -0
  15. aiq/agent/rewoo_agent/register.py +158 -0
  16. aiq/agent/tool_calling_agent/__init__.py +0 -0
  17. aiq/agent/tool_calling_agent/agent.py +119 -0
  18. aiq/agent/tool_calling_agent/register.py +106 -0
  19. aiq/authentication/__init__.py +14 -0
  20. aiq/authentication/api_key/__init__.py +14 -0
  21. aiq/authentication/api_key/api_key_auth_provider.py +96 -0
  22. aiq/authentication/api_key/api_key_auth_provider_config.py +124 -0
  23. aiq/authentication/api_key/register.py +26 -0
  24. aiq/authentication/exceptions/__init__.py +14 -0
  25. aiq/authentication/exceptions/api_key_exceptions.py +38 -0
  26. aiq/authentication/http_basic_auth/__init__.py +0 -0
  27. aiq/authentication/http_basic_auth/http_basic_auth_provider.py +81 -0
  28. aiq/authentication/http_basic_auth/register.py +30 -0
  29. aiq/authentication/interfaces.py +93 -0
  30. aiq/authentication/oauth2/__init__.py +14 -0
  31. aiq/authentication/oauth2/oauth2_auth_code_flow_provider.py +107 -0
  32. aiq/authentication/oauth2/oauth2_auth_code_flow_provider_config.py +39 -0
  33. aiq/authentication/oauth2/register.py +25 -0
  34. aiq/authentication/register.py +21 -0
  35. aiq/builder/__init__.py +0 -0
  36. aiq/builder/builder.py +285 -0
  37. aiq/builder/component_utils.py +316 -0
  38. aiq/builder/context.py +264 -0
  39. aiq/builder/embedder.py +24 -0
  40. aiq/builder/eval_builder.py +161 -0
  41. aiq/builder/evaluator.py +29 -0
  42. aiq/builder/framework_enum.py +24 -0
  43. aiq/builder/front_end.py +73 -0
  44. aiq/builder/function.py +344 -0
  45. aiq/builder/function_base.py +380 -0
  46. aiq/builder/function_info.py +627 -0
  47. aiq/builder/intermediate_step_manager.py +174 -0
  48. aiq/builder/llm.py +25 -0
  49. aiq/builder/retriever.py +25 -0
  50. aiq/builder/user_interaction_manager.py +74 -0
  51. aiq/builder/workflow.py +148 -0
  52. aiq/builder/workflow_builder.py +1117 -0
  53. aiq/cli/__init__.py +14 -0
  54. aiq/cli/cli_utils/__init__.py +0 -0
  55. aiq/cli/cli_utils/config_override.py +231 -0
  56. aiq/cli/cli_utils/validation.py +37 -0
  57. aiq/cli/commands/__init__.py +0 -0
  58. aiq/cli/commands/configure/__init__.py +0 -0
  59. aiq/cli/commands/configure/channel/__init__.py +0 -0
  60. aiq/cli/commands/configure/channel/add.py +28 -0
  61. aiq/cli/commands/configure/channel/channel.py +36 -0
  62. aiq/cli/commands/configure/channel/remove.py +30 -0
  63. aiq/cli/commands/configure/channel/update.py +30 -0
  64. aiq/cli/commands/configure/configure.py +33 -0
  65. aiq/cli/commands/evaluate.py +139 -0
  66. aiq/cli/commands/info/__init__.py +14 -0
  67. aiq/cli/commands/info/info.py +39 -0
  68. aiq/cli/commands/info/list_channels.py +32 -0
  69. aiq/cli/commands/info/list_components.py +129 -0
  70. aiq/cli/commands/info/list_mcp.py +213 -0
  71. aiq/cli/commands/registry/__init__.py +14 -0
  72. aiq/cli/commands/registry/publish.py +88 -0
  73. aiq/cli/commands/registry/pull.py +118 -0
  74. aiq/cli/commands/registry/registry.py +38 -0
  75. aiq/cli/commands/registry/remove.py +108 -0
  76. aiq/cli/commands/registry/search.py +155 -0
  77. aiq/cli/commands/sizing/__init__.py +14 -0
  78. aiq/cli/commands/sizing/calc.py +297 -0
  79. aiq/cli/commands/sizing/sizing.py +27 -0
  80. aiq/cli/commands/start.py +246 -0
  81. aiq/cli/commands/uninstall.py +81 -0
  82. aiq/cli/commands/validate.py +47 -0
  83. aiq/cli/commands/workflow/__init__.py +14 -0
  84. aiq/cli/commands/workflow/templates/__init__.py.j2 +0 -0
  85. aiq/cli/commands/workflow/templates/config.yml.j2 +16 -0
  86. aiq/cli/commands/workflow/templates/pyproject.toml.j2 +22 -0
  87. aiq/cli/commands/workflow/templates/register.py.j2 +5 -0
  88. aiq/cli/commands/workflow/templates/workflow.py.j2 +36 -0
  89. aiq/cli/commands/workflow/workflow.py +37 -0
  90. aiq/cli/commands/workflow/workflow_commands.py +313 -0
  91. aiq/cli/entrypoint.py +135 -0
  92. aiq/cli/main.py +44 -0
  93. aiq/cli/register_workflow.py +488 -0
  94. aiq/cli/type_registry.py +1000 -0
  95. aiq/data_models/__init__.py +14 -0
  96. aiq/data_models/api_server.py +694 -0
  97. aiq/data_models/authentication.py +231 -0
  98. aiq/data_models/common.py +171 -0
  99. aiq/data_models/component.py +54 -0
  100. aiq/data_models/component_ref.py +168 -0
  101. aiq/data_models/config.py +406 -0
  102. aiq/data_models/dataset_handler.py +123 -0
  103. aiq/data_models/discovery_metadata.py +335 -0
  104. aiq/data_models/embedder.py +27 -0
  105. aiq/data_models/evaluate.py +127 -0
  106. aiq/data_models/evaluator.py +26 -0
  107. aiq/data_models/front_end.py +26 -0
  108. aiq/data_models/function.py +30 -0
  109. aiq/data_models/function_dependencies.py +72 -0
  110. aiq/data_models/interactive.py +246 -0
  111. aiq/data_models/intermediate_step.py +302 -0
  112. aiq/data_models/invocation_node.py +38 -0
  113. aiq/data_models/llm.py +27 -0
  114. aiq/data_models/logging.py +26 -0
  115. aiq/data_models/memory.py +27 -0
  116. aiq/data_models/object_store.py +44 -0
  117. aiq/data_models/profiler.py +54 -0
  118. aiq/data_models/registry_handler.py +26 -0
  119. aiq/data_models/retriever.py +30 -0
  120. aiq/data_models/retry_mixin.py +35 -0
  121. aiq/data_models/span.py +187 -0
  122. aiq/data_models/step_adaptor.py +64 -0
  123. aiq/data_models/streaming.py +33 -0
  124. aiq/data_models/swe_bench_model.py +54 -0
  125. aiq/data_models/telemetry_exporter.py +26 -0
  126. aiq/data_models/ttc_strategy.py +30 -0
  127. aiq/embedder/__init__.py +0 -0
  128. aiq/embedder/langchain_client.py +41 -0
  129. aiq/embedder/nim_embedder.py +59 -0
  130. aiq/embedder/openai_embedder.py +43 -0
  131. aiq/embedder/register.py +24 -0
  132. aiq/eval/__init__.py +14 -0
  133. aiq/eval/config.py +60 -0
  134. aiq/eval/dataset_handler/__init__.py +0 -0
  135. aiq/eval/dataset_handler/dataset_downloader.py +106 -0
  136. aiq/eval/dataset_handler/dataset_filter.py +52 -0
  137. aiq/eval/dataset_handler/dataset_handler.py +254 -0
  138. aiq/eval/evaluate.py +506 -0
  139. aiq/eval/evaluator/__init__.py +14 -0
  140. aiq/eval/evaluator/base_evaluator.py +73 -0
  141. aiq/eval/evaluator/evaluator_model.py +45 -0
  142. aiq/eval/intermediate_step_adapter.py +99 -0
  143. aiq/eval/rag_evaluator/__init__.py +0 -0
  144. aiq/eval/rag_evaluator/evaluate.py +178 -0
  145. aiq/eval/rag_evaluator/register.py +143 -0
  146. aiq/eval/register.py +23 -0
  147. aiq/eval/remote_workflow.py +133 -0
  148. aiq/eval/runners/__init__.py +14 -0
  149. aiq/eval/runners/config.py +39 -0
  150. aiq/eval/runners/multi_eval_runner.py +54 -0
  151. aiq/eval/runtime_event_subscriber.py +52 -0
  152. aiq/eval/swe_bench_evaluator/__init__.py +0 -0
  153. aiq/eval/swe_bench_evaluator/evaluate.py +215 -0
  154. aiq/eval/swe_bench_evaluator/register.py +36 -0
  155. aiq/eval/trajectory_evaluator/__init__.py +0 -0
  156. aiq/eval/trajectory_evaluator/evaluate.py +75 -0
  157. aiq/eval/trajectory_evaluator/register.py +40 -0
  158. aiq/eval/tunable_rag_evaluator/__init__.py +0 -0
  159. aiq/eval/tunable_rag_evaluator/evaluate.py +245 -0
  160. aiq/eval/tunable_rag_evaluator/register.py +52 -0
  161. aiq/eval/usage_stats.py +41 -0
  162. aiq/eval/utils/__init__.py +0 -0
  163. aiq/eval/utils/output_uploader.py +140 -0
  164. aiq/eval/utils/tqdm_position_registry.py +40 -0
  165. aiq/eval/utils/weave_eval.py +184 -0
  166. aiq/experimental/__init__.py +0 -0
  167. aiq/experimental/decorators/__init__.py +0 -0
  168. aiq/experimental/decorators/experimental_warning_decorator.py +130 -0
  169. aiq/experimental/test_time_compute/__init__.py +0 -0
  170. aiq/experimental/test_time_compute/editing/__init__.py +0 -0
  171. aiq/experimental/test_time_compute/editing/iterative_plan_refinement_editor.py +147 -0
  172. aiq/experimental/test_time_compute/editing/llm_as_a_judge_editor.py +204 -0
  173. aiq/experimental/test_time_compute/editing/motivation_aware_summarization.py +107 -0
  174. aiq/experimental/test_time_compute/functions/__init__.py +0 -0
  175. aiq/experimental/test_time_compute/functions/execute_score_select_function.py +105 -0
  176. aiq/experimental/test_time_compute/functions/its_tool_orchestration_function.py +205 -0
  177. aiq/experimental/test_time_compute/functions/its_tool_wrapper_function.py +146 -0
  178. aiq/experimental/test_time_compute/functions/plan_select_execute_function.py +224 -0
  179. aiq/experimental/test_time_compute/models/__init__.py +0 -0
  180. aiq/experimental/test_time_compute/models/editor_config.py +132 -0
  181. aiq/experimental/test_time_compute/models/scoring_config.py +112 -0
  182. aiq/experimental/test_time_compute/models/search_config.py +120 -0
  183. aiq/experimental/test_time_compute/models/selection_config.py +154 -0
  184. aiq/experimental/test_time_compute/models/stage_enums.py +43 -0
  185. aiq/experimental/test_time_compute/models/strategy_base.py +66 -0
  186. aiq/experimental/test_time_compute/models/tool_use_config.py +41 -0
  187. aiq/experimental/test_time_compute/models/ttc_item.py +48 -0
  188. aiq/experimental/test_time_compute/register.py +36 -0
  189. aiq/experimental/test_time_compute/scoring/__init__.py +0 -0
  190. aiq/experimental/test_time_compute/scoring/llm_based_agent_scorer.py +168 -0
  191. aiq/experimental/test_time_compute/scoring/llm_based_plan_scorer.py +168 -0
  192. aiq/experimental/test_time_compute/scoring/motivation_aware_scorer.py +111 -0
  193. aiq/experimental/test_time_compute/search/__init__.py +0 -0
  194. aiq/experimental/test_time_compute/search/multi_llm_planner.py +128 -0
  195. aiq/experimental/test_time_compute/search/multi_query_retrieval_search.py +122 -0
  196. aiq/experimental/test_time_compute/search/single_shot_multi_plan_planner.py +128 -0
  197. aiq/experimental/test_time_compute/selection/__init__.py +0 -0
  198. aiq/experimental/test_time_compute/selection/best_of_n_selector.py +63 -0
  199. aiq/experimental/test_time_compute/selection/llm_based_agent_output_selector.py +131 -0
  200. aiq/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +159 -0
  201. aiq/experimental/test_time_compute/selection/llm_based_plan_selector.py +128 -0
  202. aiq/experimental/test_time_compute/selection/threshold_selector.py +58 -0
  203. aiq/front_ends/__init__.py +14 -0
  204. aiq/front_ends/console/__init__.py +14 -0
  205. aiq/front_ends/console/authentication_flow_handler.py +233 -0
  206. aiq/front_ends/console/console_front_end_config.py +32 -0
  207. aiq/front_ends/console/console_front_end_plugin.py +96 -0
  208. aiq/front_ends/console/register.py +25 -0
  209. aiq/front_ends/cron/__init__.py +14 -0
  210. aiq/front_ends/fastapi/__init__.py +14 -0
  211. aiq/front_ends/fastapi/auth_flow_handlers/__init__.py +0 -0
  212. aiq/front_ends/fastapi/auth_flow_handlers/http_flow_handler.py +27 -0
  213. aiq/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +107 -0
  214. aiq/front_ends/fastapi/fastapi_front_end_config.py +234 -0
  215. aiq/front_ends/fastapi/fastapi_front_end_controller.py +68 -0
  216. aiq/front_ends/fastapi/fastapi_front_end_plugin.py +116 -0
  217. aiq/front_ends/fastapi/fastapi_front_end_plugin_worker.py +1092 -0
  218. aiq/front_ends/fastapi/html_snippets/__init__.py +14 -0
  219. aiq/front_ends/fastapi/html_snippets/auth_code_grant_success.py +35 -0
  220. aiq/front_ends/fastapi/intermediate_steps_subscriber.py +80 -0
  221. aiq/front_ends/fastapi/job_store.py +183 -0
  222. aiq/front_ends/fastapi/main.py +72 -0
  223. aiq/front_ends/fastapi/message_handler.py +298 -0
  224. aiq/front_ends/fastapi/message_validator.py +345 -0
  225. aiq/front_ends/fastapi/register.py +25 -0
  226. aiq/front_ends/fastapi/response_helpers.py +195 -0
  227. aiq/front_ends/fastapi/step_adaptor.py +321 -0
  228. aiq/front_ends/mcp/__init__.py +14 -0
  229. aiq/front_ends/mcp/mcp_front_end_config.py +32 -0
  230. aiq/front_ends/mcp/mcp_front_end_plugin.py +93 -0
  231. aiq/front_ends/mcp/register.py +27 -0
  232. aiq/front_ends/mcp/tool_converter.py +242 -0
  233. aiq/front_ends/register.py +22 -0
  234. aiq/front_ends/simple_base/__init__.py +14 -0
  235. aiq/front_ends/simple_base/simple_front_end_plugin_base.py +54 -0
  236. aiq/llm/__init__.py +0 -0
  237. aiq/llm/aws_bedrock_llm.py +57 -0
  238. aiq/llm/nim_llm.py +46 -0
  239. aiq/llm/openai_llm.py +46 -0
  240. aiq/llm/register.py +23 -0
  241. aiq/llm/utils/__init__.py +14 -0
  242. aiq/llm/utils/env_config_value.py +94 -0
  243. aiq/llm/utils/error.py +17 -0
  244. aiq/memory/__init__.py +20 -0
  245. aiq/memory/interfaces.py +183 -0
  246. aiq/memory/models.py +112 -0
  247. aiq/meta/module_to_distro.json +3 -0
  248. aiq/meta/pypi.md +58 -0
  249. aiq/object_store/__init__.py +20 -0
  250. aiq/object_store/in_memory_object_store.py +76 -0
  251. aiq/object_store/interfaces.py +84 -0
  252. aiq/object_store/models.py +36 -0
  253. aiq/object_store/register.py +20 -0
  254. aiq/observability/__init__.py +14 -0
  255. aiq/observability/exporter/__init__.py +14 -0
  256. aiq/observability/exporter/base_exporter.py +449 -0
  257. aiq/observability/exporter/exporter.py +78 -0
  258. aiq/observability/exporter/file_exporter.py +33 -0
  259. aiq/observability/exporter/processing_exporter.py +322 -0
  260. aiq/observability/exporter/raw_exporter.py +52 -0
  261. aiq/observability/exporter/span_exporter.py +265 -0
  262. aiq/observability/exporter_manager.py +335 -0
  263. aiq/observability/mixin/__init__.py +14 -0
  264. aiq/observability/mixin/batch_config_mixin.py +26 -0
  265. aiq/observability/mixin/collector_config_mixin.py +23 -0
  266. aiq/observability/mixin/file_mixin.py +288 -0
  267. aiq/observability/mixin/file_mode.py +23 -0
  268. aiq/observability/mixin/resource_conflict_mixin.py +134 -0
  269. aiq/observability/mixin/serialize_mixin.py +61 -0
  270. aiq/observability/mixin/type_introspection_mixin.py +183 -0
  271. aiq/observability/processor/__init__.py +14 -0
  272. aiq/observability/processor/batching_processor.py +310 -0
  273. aiq/observability/processor/callback_processor.py +42 -0
  274. aiq/observability/processor/intermediate_step_serializer.py +28 -0
  275. aiq/observability/processor/processor.py +71 -0
  276. aiq/observability/register.py +96 -0
  277. aiq/observability/utils/__init__.py +14 -0
  278. aiq/observability/utils/dict_utils.py +236 -0
  279. aiq/observability/utils/time_utils.py +31 -0
  280. aiq/plugins/.namespace +1 -0
  281. aiq/profiler/__init__.py +0 -0
  282. aiq/profiler/calc/__init__.py +14 -0
  283. aiq/profiler/calc/calc_runner.py +627 -0
  284. aiq/profiler/calc/calculations.py +288 -0
  285. aiq/profiler/calc/data_models.py +188 -0
  286. aiq/profiler/calc/plot.py +345 -0
  287. aiq/profiler/callbacks/__init__.py +0 -0
  288. aiq/profiler/callbacks/agno_callback_handler.py +295 -0
  289. aiq/profiler/callbacks/base_callback_class.py +20 -0
  290. aiq/profiler/callbacks/langchain_callback_handler.py +290 -0
  291. aiq/profiler/callbacks/llama_index_callback_handler.py +205 -0
  292. aiq/profiler/callbacks/semantic_kernel_callback_handler.py +238 -0
  293. aiq/profiler/callbacks/token_usage_base_model.py +27 -0
  294. aiq/profiler/data_frame_row.py +51 -0
  295. aiq/profiler/data_models.py +24 -0
  296. aiq/profiler/decorators/__init__.py +0 -0
  297. aiq/profiler/decorators/framework_wrapper.py +131 -0
  298. aiq/profiler/decorators/function_tracking.py +254 -0
  299. aiq/profiler/forecasting/__init__.py +0 -0
  300. aiq/profiler/forecasting/config.py +18 -0
  301. aiq/profiler/forecasting/model_trainer.py +75 -0
  302. aiq/profiler/forecasting/models/__init__.py +22 -0
  303. aiq/profiler/forecasting/models/forecasting_base_model.py +40 -0
  304. aiq/profiler/forecasting/models/linear_model.py +196 -0
  305. aiq/profiler/forecasting/models/random_forest_regressor.py +268 -0
  306. aiq/profiler/inference_metrics_model.py +28 -0
  307. aiq/profiler/inference_optimization/__init__.py +0 -0
  308. aiq/profiler/inference_optimization/bottleneck_analysis/__init__.py +0 -0
  309. aiq/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +460 -0
  310. aiq/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +258 -0
  311. aiq/profiler/inference_optimization/data_models.py +386 -0
  312. aiq/profiler/inference_optimization/experimental/__init__.py +0 -0
  313. aiq/profiler/inference_optimization/experimental/concurrency_spike_analysis.py +468 -0
  314. aiq/profiler/inference_optimization/experimental/prefix_span_analysis.py +405 -0
  315. aiq/profiler/inference_optimization/llm_metrics.py +212 -0
  316. aiq/profiler/inference_optimization/prompt_caching.py +163 -0
  317. aiq/profiler/inference_optimization/token_uniqueness.py +107 -0
  318. aiq/profiler/inference_optimization/workflow_runtimes.py +72 -0
  319. aiq/profiler/intermediate_property_adapter.py +102 -0
  320. aiq/profiler/profile_runner.py +473 -0
  321. aiq/profiler/utils.py +184 -0
  322. aiq/registry_handlers/__init__.py +0 -0
  323. aiq/registry_handlers/local/__init__.py +0 -0
  324. aiq/registry_handlers/local/local_handler.py +176 -0
  325. aiq/registry_handlers/local/register_local.py +37 -0
  326. aiq/registry_handlers/metadata_factory.py +60 -0
  327. aiq/registry_handlers/package_utils.py +567 -0
  328. aiq/registry_handlers/pypi/__init__.py +0 -0
  329. aiq/registry_handlers/pypi/pypi_handler.py +251 -0
  330. aiq/registry_handlers/pypi/register_pypi.py +40 -0
  331. aiq/registry_handlers/register.py +21 -0
  332. aiq/registry_handlers/registry_handler_base.py +157 -0
  333. aiq/registry_handlers/rest/__init__.py +0 -0
  334. aiq/registry_handlers/rest/register_rest.py +56 -0
  335. aiq/registry_handlers/rest/rest_handler.py +237 -0
  336. aiq/registry_handlers/schemas/__init__.py +0 -0
  337. aiq/registry_handlers/schemas/headers.py +42 -0
  338. aiq/registry_handlers/schemas/package.py +68 -0
  339. aiq/registry_handlers/schemas/publish.py +63 -0
  340. aiq/registry_handlers/schemas/pull.py +82 -0
  341. aiq/registry_handlers/schemas/remove.py +36 -0
  342. aiq/registry_handlers/schemas/search.py +91 -0
  343. aiq/registry_handlers/schemas/status.py +47 -0
  344. aiq/retriever/__init__.py +0 -0
  345. aiq/retriever/interface.py +37 -0
  346. aiq/retriever/milvus/__init__.py +14 -0
  347. aiq/retriever/milvus/register.py +81 -0
  348. aiq/retriever/milvus/retriever.py +228 -0
  349. aiq/retriever/models.py +74 -0
  350. aiq/retriever/nemo_retriever/__init__.py +14 -0
  351. aiq/retriever/nemo_retriever/register.py +60 -0
  352. aiq/retriever/nemo_retriever/retriever.py +190 -0
  353. aiq/retriever/register.py +22 -0
  354. aiq/runtime/__init__.py +14 -0
  355. aiq/runtime/loader.py +215 -0
  356. aiq/runtime/runner.py +190 -0
  357. aiq/runtime/session.py +158 -0
  358. aiq/runtime/user_metadata.py +130 -0
  359. aiq/settings/__init__.py +0 -0
  360. aiq/settings/global_settings.py +318 -0
  361. aiq/test/.namespace +1 -0
  362. aiq/tool/__init__.py +0 -0
  363. aiq/tool/chat_completion.py +74 -0
  364. aiq/tool/code_execution/README.md +151 -0
  365. aiq/tool/code_execution/__init__.py +0 -0
  366. aiq/tool/code_execution/code_sandbox.py +267 -0
  367. aiq/tool/code_execution/local_sandbox/.gitignore +1 -0
  368. aiq/tool/code_execution/local_sandbox/Dockerfile.sandbox +60 -0
  369. aiq/tool/code_execution/local_sandbox/__init__.py +13 -0
  370. aiq/tool/code_execution/local_sandbox/local_sandbox_server.py +198 -0
  371. aiq/tool/code_execution/local_sandbox/sandbox.requirements.txt +6 -0
  372. aiq/tool/code_execution/local_sandbox/start_local_sandbox.sh +50 -0
  373. aiq/tool/code_execution/register.py +74 -0
  374. aiq/tool/code_execution/test_code_execution_sandbox.py +414 -0
  375. aiq/tool/code_execution/utils.py +100 -0
  376. aiq/tool/datetime_tools.py +42 -0
  377. aiq/tool/document_search.py +141 -0
  378. aiq/tool/github_tools/__init__.py +0 -0
  379. aiq/tool/github_tools/create_github_commit.py +133 -0
  380. aiq/tool/github_tools/create_github_issue.py +87 -0
  381. aiq/tool/github_tools/create_github_pr.py +106 -0
  382. aiq/tool/github_tools/get_github_file.py +106 -0
  383. aiq/tool/github_tools/get_github_issue.py +166 -0
  384. aiq/tool/github_tools/get_github_pr.py +256 -0
  385. aiq/tool/github_tools/update_github_issue.py +100 -0
  386. aiq/tool/mcp/__init__.py +14 -0
  387. aiq/tool/mcp/exceptions.py +142 -0
  388. aiq/tool/mcp/mcp_client.py +255 -0
  389. aiq/tool/mcp/mcp_tool.py +96 -0
  390. aiq/tool/memory_tools/__init__.py +0 -0
  391. aiq/tool/memory_tools/add_memory_tool.py +79 -0
  392. aiq/tool/memory_tools/delete_memory_tool.py +67 -0
  393. aiq/tool/memory_tools/get_memory_tool.py +72 -0
  394. aiq/tool/nvidia_rag.py +95 -0
  395. aiq/tool/register.py +38 -0
  396. aiq/tool/retriever.py +89 -0
  397. aiq/tool/server_tools.py +66 -0
  398. aiq/utils/__init__.py +0 -0
  399. aiq/utils/data_models/__init__.py +0 -0
  400. aiq/utils/data_models/schema_validator.py +58 -0
  401. aiq/utils/debugging_utils.py +43 -0
  402. aiq/utils/dump_distro_mapping.py +32 -0
  403. aiq/utils/exception_handlers/__init__.py +0 -0
  404. aiq/utils/exception_handlers/automatic_retries.py +289 -0
  405. aiq/utils/exception_handlers/mcp.py +211 -0
  406. aiq/utils/exception_handlers/schemas.py +114 -0
  407. aiq/utils/io/__init__.py +0 -0
  408. aiq/utils/io/model_processing.py +28 -0
  409. aiq/utils/io/yaml_tools.py +119 -0
  410. aiq/utils/log_utils.py +37 -0
  411. aiq/utils/metadata_utils.py +74 -0
  412. aiq/utils/optional_imports.py +142 -0
  413. aiq/utils/producer_consumer_queue.py +178 -0
  414. aiq/utils/reactive/__init__.py +0 -0
  415. aiq/utils/reactive/base/__init__.py +0 -0
  416. aiq/utils/reactive/base/observable_base.py +65 -0
  417. aiq/utils/reactive/base/observer_base.py +55 -0
  418. aiq/utils/reactive/base/subject_base.py +79 -0
  419. aiq/utils/reactive/observable.py +59 -0
  420. aiq/utils/reactive/observer.py +76 -0
  421. aiq/utils/reactive/subject.py +131 -0
  422. aiq/utils/reactive/subscription.py +49 -0
  423. aiq/utils/settings/__init__.py +0 -0
  424. aiq/utils/settings/global_settings.py +197 -0
  425. aiq/utils/string_utils.py +38 -0
  426. aiq/utils/type_converter.py +290 -0
  427. aiq/utils/type_utils.py +484 -0
  428. aiq/utils/url_utils.py +27 -0
  429. nvidia_nat-1.2.0rc5.dist-info/METADATA +363 -0
  430. nvidia_nat-1.2.0rc5.dist-info/RECORD +435 -0
  431. nvidia_nat-1.2.0rc5.dist-info/WHEEL +5 -0
  432. nvidia_nat-1.2.0rc5.dist-info/entry_points.txt +20 -0
  433. nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE-3rd-party.txt +3686 -0
  434. nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE.md +201 -0
  435. nvidia_nat-1.2.0rc5.dist-info/top_level.txt +1 -0
@@ -0,0 +1,322 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import asyncio
17
+ import logging
18
+ from abc import abstractmethod
19
+ from collections.abc import Coroutine
20
+ from typing import Any
21
+ from typing import Generic
22
+ from typing import TypeVar
23
+
24
+ from aiq.builder.context import AIQContextState
25
+ from aiq.data_models.intermediate_step import IntermediateStep
26
+ from aiq.observability.exporter.base_exporter import BaseExporter
27
+ from aiq.observability.mixin.type_introspection_mixin import TypeIntrospectionMixin
28
+ from aiq.observability.processor.callback_processor import CallbackProcessor
29
+ from aiq.observability.processor.processor import Processor
30
+ from aiq.utils.type_utils import DecomposedType
31
+ from aiq.utils.type_utils import override
32
+
33
+ PipelineInputT = TypeVar("PipelineInputT")
34
+ PipelineOutputT = TypeVar("PipelineOutputT")
35
+
36
+ logger = logging.getLogger(__name__)
37
+
38
+
39
+ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter, TypeIntrospectionMixin):
40
+ """A base class for telemetry exporters with processing pipeline support.
41
+
42
+ This class extends BaseExporter to add processor pipeline functionality.
43
+ It manages a chain of processors that can transform items before export.
44
+
45
+ The generic types work as follows:
46
+ - PipelineInputT: The type of items that enter the processing pipeline (e.g., Span)
47
+ - PipelineOutputT: The type of items after processing through the pipeline (e.g., converted format)
48
+
49
+ Key Features:
50
+ - Processor pipeline management (add, remove, clear)
51
+ - Type compatibility validation between processors
52
+ - Pipeline processing with error handling
53
+ - Automatic type validation before export
54
+ """
55
+
56
+ def __init__(self, context_state: AIQContextState | None = None):
57
+ """Initialize the processing exporter.
58
+
59
+ Args:
60
+ context_state: The context state to use for the exporter.
61
+ """
62
+ super().__init__(context_state)
63
+ self._processors: list[Processor] = [] # List of processors that implement process(item) -> item
64
+
65
+ def add_processor(self, processor: Processor) -> None:
66
+ """Add a processor to the processing pipeline.
67
+
68
+ Processors are executed in the order they are added.
69
+ Processors can transform between any types (T -> U).
70
+
71
+ Args:
72
+ processor: The processor to add to the pipeline
73
+ """
74
+
75
+ # Check if the processor is compatible with the last processor in the pipeline
76
+ if len(self._processors) > 0:
77
+ try:
78
+ if not issubclass(processor.input_class, self._processors[-1].output_class):
79
+ raise ValueError(f"Processor {processor.__class__.__name__} input type {processor.input_type} "
80
+ f"is not compatible with the {self._processors[-1].__class__.__name__} "
81
+ f"output type {self._processors[-1].output_type}")
82
+ except TypeError:
83
+ # Handle cases where input_class or output_class are generic types that can't be used with issubclass
84
+ # Fall back to type comparison for generic types
85
+ logger.warning(
86
+ "Cannot use issubclass() for type compatibility check between "
87
+ "%s (%s) and %s (%s). Skipping compatibility check.",
88
+ processor.__class__.__name__,
89
+ processor.input_type,
90
+ self._processors[-1].__class__.__name__,
91
+ self._processors[-1].output_type)
92
+ self._processors.append(processor)
93
+
94
+ # Set up pipeline continuation callback for processors that support it
95
+ if isinstance(processor, CallbackProcessor):
96
+ # Create a callback that continues processing through the rest of the pipeline
97
+ async def pipeline_callback(item):
98
+ await self._continue_pipeline_after(processor, item)
99
+
100
+ processor.set_done_callback(pipeline_callback)
101
+
102
+ def remove_processor(self, processor: Processor) -> None:
103
+ """Remove a processor from the processing pipeline.
104
+
105
+ Args:
106
+ processor: The processor to remove from the pipeline
107
+ """
108
+ if processor in self._processors:
109
+ self._processors.remove(processor)
110
+
111
+ def clear_processors(self) -> None:
112
+ """Clear all processors from the pipeline."""
113
+ self._processors.clear()
114
+
115
+ async def _pre_start(self) -> None:
116
+ if len(self._processors) > 0:
117
+ first_processor = self._processors[0]
118
+ last_processor = self._processors[-1]
119
+
120
+ # validate that the first processor's input type is compatible with the exporter's input type
121
+ try:
122
+ if not issubclass(first_processor.input_class, self.input_class):
123
+ raise ValueError(f"Processor {first_processor.__class__.__name__} input type "
124
+ f"{first_processor.input_type} is not compatible with the "
125
+ f"{self.input_type} input type")
126
+ except TypeError as e:
127
+ # Handle cases where classes are generic types that can't be used with issubclass
128
+ logger.warning(
129
+ "Cannot validate type compatibility between %s (%s) "
130
+ "and exporter (%s): %s. Skipping validation.",
131
+ first_processor.__class__.__name__,
132
+ first_processor.input_type,
133
+ self.input_type,
134
+ e)
135
+
136
+ # Validate that the last processor's output type is compatible with the exporter's output type
137
+ try:
138
+ if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
139
+ raise ValueError(f"Processor {last_processor.__class__.__name__} output type "
140
+ f"{last_processor.output_type} is not compatible with the "
141
+ f"{self.output_type} output type")
142
+ except TypeError as e:
143
+ # Handle cases where classes are generic types that can't be used with issubclass
144
+ logger.warning(
145
+ "Cannot validate type compatibility between %s (%s) "
146
+ "and exporter (%s): %s. Skipping validation.",
147
+ last_processor.__class__.__name__,
148
+ last_processor.output_type,
149
+ self.output_type,
150
+ e)
151
+
152
+ async def _process_pipeline(self, item: PipelineInputT) -> PipelineOutputT:
153
+ """Process item through all registered processors.
154
+
155
+ Args:
156
+ item (PipelineInputT): The item to process (starts as PipelineInputT, can transform to PipelineOutputT)
157
+
158
+ Returns:
159
+ PipelineOutputT: The processed item after running through all processors
160
+ """
161
+ return await self._process_through_processors(self._processors, item) # type: ignore
162
+
163
+ async def _process_through_processors(self, processors: list[Processor], item: Any) -> Any:
164
+ """Process an item through a list of processors.
165
+
166
+ Args:
167
+ processors (list[Processor]): List of processors to run the item through
168
+ item (Any): The item to process
169
+
170
+ Returns:
171
+ The processed item after running through all processors
172
+ """
173
+ processed_item = item
174
+ for processor in processors:
175
+ try:
176
+ processed_item = await processor.process(processed_item)
177
+ except Exception as e:
178
+ logger.error("Error in processor %s: %s", processor.__class__.__name__, e, exc_info=True)
179
+ # Continue with unprocessed item rather than failing
180
+ return processed_item
181
+
182
+ async def _export_final_item(self, processed_item: Any, raise_on_invalid: bool = False) -> None:
183
+ """Export a processed item with proper type handling.
184
+
185
+ Args:
186
+ processed_item (Any): The item to export
187
+ raise_on_invalid (bool): If True, raise ValueError for invalid types instead of logging warning
188
+ """
189
+ if isinstance(processed_item, list):
190
+ if len(processed_item) > 0:
191
+ await self.export_processed(processed_item)
192
+ else:
193
+ logger.debug("Skipping export of empty batch")
194
+ elif isinstance(processed_item, self.output_class):
195
+ await self.export_processed(processed_item)
196
+ else:
197
+ if raise_on_invalid:
198
+ raise ValueError(f"Processed item {processed_item} is not a valid output type. "
199
+ f"Expected {self.output_class} or list[{self.output_class}]")
200
+ logger.warning("Processed item %s is not a valid output type for export", processed_item)
201
+
202
+ async def _continue_pipeline_after(self, source_processor: Processor, item: Any) -> None:
203
+ """Continue processing an item through the pipeline after a specific processor.
204
+
205
+ This is used when processors (like BatchingProcessor) need to inject items
206
+ back into the pipeline flow to continue through downstream processors.
207
+
208
+ Args:
209
+ source_processor (Processor): The processor that generated the item
210
+ item (Any): The item to continue processing through the remaining pipeline
211
+ """
212
+ try:
213
+ # Find the source processor's position
214
+ try:
215
+ source_index = self._processors.index(source_processor)
216
+ except ValueError:
217
+ logger.error("Source processor %s not found in pipeline", source_processor.__class__.__name__)
218
+ return
219
+
220
+ # Process through remaining processors (skip the source processor)
221
+ remaining_processors = self._processors[source_index + 1:]
222
+ processed_item = await self._process_through_processors(remaining_processors, item)
223
+
224
+ # Export the final result
225
+ await self._export_final_item(processed_item)
226
+
227
+ except Exception as e:
228
+ logger.error("Failed to continue pipeline processing after %s: %s",
229
+ source_processor.__class__.__name__,
230
+ e,
231
+ exc_info=True)
232
+
233
+ async def _export_with_processing(self, item: PipelineInputT) -> None:
234
+ """Export an item after processing it through the pipeline.
235
+
236
+ Args:
237
+ item: The item to export
238
+ """
239
+ try:
240
+ # Then, run through the processor pipeline
241
+ final_item: PipelineOutputT = await self._process_pipeline(item)
242
+
243
+ # Handle different output types from batch processors
244
+ if isinstance(final_item, list) and len(final_item) == 0:
245
+ logger.debug("Skipping export of empty batch from processor pipeline")
246
+ return
247
+
248
+ await self._export_final_item(final_item, raise_on_invalid=True)
249
+
250
+ except Exception as e:
251
+ logger.error("Failed to export item '%s': %s", item, e, exc_info=True)
252
+ raise
253
+
254
+ @override
255
+ def export(self, event: IntermediateStep) -> None:
256
+ """Export an IntermediateStep event through the processing pipeline.
257
+
258
+ This method converts the IntermediateStep to the expected PipelineInputT type,
259
+ processes it through the pipeline, and exports the result.
260
+
261
+ Args:
262
+ event (IntermediateStep): The event to be exported.
263
+ """
264
+ # Convert IntermediateStep to PipelineInputT and create export task
265
+ if isinstance(event, self.input_class):
266
+ input_item: PipelineInputT = event # type: ignore
267
+ coro = self._export_with_processing(input_item)
268
+ self._create_export_task(coro)
269
+ else:
270
+ logger.warning("Event %s is not compatible with input type %s", event, self.input_type)
271
+
272
+ @abstractmethod
273
+ async def export_processed(self, item: PipelineOutputT | list[PipelineOutputT]) -> None:
274
+ """Export the processed item.
275
+
276
+ This method must be implemented by concrete exporters to handle
277
+ the actual export logic after the item has been processed through the pipeline.
278
+
279
+ Args:
280
+ item: The processed item to export (PipelineOutputT type)
281
+ """
282
+ pass
283
+
284
+ def _create_export_task(self, coro: Coroutine):
285
+ """Create task with minimal overhead but proper tracking."""
286
+ if not self._running:
287
+ logger.warning("%s: Attempted to create export task while not running", self.name)
288
+ return
289
+
290
+ try:
291
+ task = asyncio.create_task(coro)
292
+ self._tasks.add(task)
293
+ task.add_done_callback(self._tasks.discard)
294
+
295
+ except Exception as e:
296
+ logger.error("%s: Failed to create task: %s", self.name, e, exc_info=True)
297
+ raise
298
+
299
+ @override
300
+ async def _cleanup(self):
301
+ """Enhanced cleanup that shuts down all shutdown-aware processors.
302
+
303
+ Each processor is responsible for its own cleanup, including routing
304
+ any final batches through the remaining pipeline via their done callbacks.
305
+ """
306
+ # Shutdown all processors that support it
307
+ shutdown_tasks = []
308
+ for processor in getattr(self, '_processors', []):
309
+ shutdown_method = getattr(processor, 'shutdown', None)
310
+ if shutdown_method:
311
+ logger.debug("Shutting down processor: %s", processor.__class__.__name__)
312
+ shutdown_tasks.append(shutdown_method())
313
+
314
+ if shutdown_tasks:
315
+ try:
316
+ await asyncio.gather(*shutdown_tasks, return_exceptions=True)
317
+ logger.debug("Successfully shut down %d processors", len(shutdown_tasks))
318
+ except Exception as e:
319
+ logger.error("Error shutting down processors: %s", e, exc_info=True)
320
+
321
+ # Call parent cleanup
322
+ await super()._cleanup()
@@ -0,0 +1,52 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import logging
17
+ from abc import abstractmethod
18
+ from typing import TypeVar
19
+
20
+ from aiq.data_models.intermediate_step import IntermediateStep
21
+ from aiq.observability.exporter.processing_exporter import ProcessingExporter
22
+ from aiq.utils.type_utils import override
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ InputT = TypeVar("InputT")
27
+ OutputT = TypeVar("OutputT")
28
+
29
+
30
+ class RawExporter(ProcessingExporter[InputT, OutputT]):
31
+ """A base class for exporting raw intermediate steps.
32
+
33
+ This class provides a base implementation for telemetry exporters that
34
+ work directly with IntermediateStep objects. It can optionally process
35
+ them through a pipeline before export.
36
+
37
+ The flow is: IntermediateStep -> [Processing Pipeline] -> OutputT -> Export
38
+
39
+ Args:
40
+ context_state (AIQContextState, optional): The context state to use for the exporter. Defaults to None.
41
+ """
42
+
43
+ @abstractmethod
44
+ async def export_processed(self, item: OutputT):
45
+ pass
46
+
47
+ @override
48
+ def export(self, event: IntermediateStep):
49
+ if not isinstance(event, IntermediateStep):
50
+ return
51
+
52
+ self._create_export_task(self._export_with_processing(event)) # type: ignore
@@ -0,0 +1,265 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import logging
17
+ import re
18
+ from abc import abstractmethod
19
+ from typing import TypeVar
20
+
21
+ from aiq.data_models.intermediate_step import IntermediateStep
22
+ from aiq.data_models.intermediate_step import IntermediateStepState
23
+ from aiq.data_models.intermediate_step import TraceMetadata
24
+ from aiq.data_models.span import MimeTypes
25
+ from aiq.data_models.span import Span
26
+ from aiq.data_models.span import SpanAttributes
27
+ from aiq.data_models.span import SpanContext
28
+ from aiq.data_models.span import event_type_to_span_kind
29
+ from aiq.observability.exporter.base_exporter import IsolatedAttribute
30
+ from aiq.observability.exporter.processing_exporter import ProcessingExporter
31
+ from aiq.observability.mixin.serialize_mixin import SerializeMixin
32
+ from aiq.observability.utils.dict_utils import merge_dicts
33
+ from aiq.observability.utils.time_utils import ns_timestamp
34
+ from aiq.utils.type_utils import override
35
+
36
+ logger = logging.getLogger(__name__)
37
+
38
+ InputSpanT = TypeVar("InputSpanT")
39
+ OutputSpanT = TypeVar("OutputSpanT")
40
+
41
+
42
+ class SpanExporter(ProcessingExporter[InputSpanT, OutputSpanT], SerializeMixin):
43
+ """Abstract base class for span exporters with processing pipeline support.
44
+
45
+ This class specializes ProcessingExporter for span-based telemetry export. It converts
46
+ IntermediateStep events into Span objects and supports processing pipelines for
47
+ span transformation before export.
48
+
49
+ The generic types work as follows:
50
+ - InputSpanT: The type of spans that enter the processing pipeline (typically Span)
51
+ - OutputSpanT: The type of spans after processing through the pipeline (e.g., OtelSpan)
52
+
53
+ Key Features:
54
+ - Automatic span creation from IntermediateStep events
55
+ - Span lifecycle management (start/end event tracking)
56
+ - Processing pipeline support via ProcessingExporter
57
+ - Metadata and attribute handling
58
+ - Usage information tracking
59
+ - Automatic isolation of mutable state for concurrent execution using descriptors
60
+
61
+ Inheritance Hierarchy:
62
+ - BaseExporter: Core event subscription and lifecycle management + DescriptorIsolationMixin
63
+ - ProcessingExporter: Adds processor pipeline functionality
64
+ - SpanExporter: Specializes for span creation and export
65
+
66
+ Event Processing Flow:
67
+ 1. IntermediateStep (START) → Create Span → Add to tracking
68
+ 2. IntermediateStep (END) → Complete Span → Process through pipeline → Export
69
+
70
+ Args:
71
+ context_state (AIQContextState, optional): The context state to use for the exporter. Defaults to None.
72
+ """
73
+
74
+ # Use descriptors for automatic isolation of span-specific state
75
+ _outstanding_spans: IsolatedAttribute[dict] = IsolatedAttribute(dict)
76
+ _span_stack: IsolatedAttribute[dict] = IsolatedAttribute(dict)
77
+ _metadata_stack: IsolatedAttribute[dict] = IsolatedAttribute(dict)
78
+
79
+ @abstractmethod
80
+ async def export_processed(self, item: OutputSpanT) -> None:
81
+ """Export the processed span.
82
+
83
+ Args:
84
+ item (OutputSpanT): The processed span to export.
85
+ """
86
+ pass
87
+
88
+ @override
89
+ def export(self, event: IntermediateStep) -> None:
90
+ """The main logic that reacts to each IntermediateStep.
91
+
92
+ Args:
93
+ event (IntermediateStep): The event to process.
94
+ """
95
+ if not isinstance(event, IntermediateStep):
96
+ return
97
+
98
+ if (event.event_state == IntermediateStepState.START):
99
+ self._process_start_event(event)
100
+ elif (event.event_state == IntermediateStepState.END):
101
+ self._process_end_event(event)
102
+
103
+ def _process_start_event(self, event: IntermediateStep):
104
+ """Process the start event of an intermediate step.
105
+
106
+ Args:
107
+ event (IntermediateStep): The event to process.
108
+ """
109
+
110
+ parent_span = None
111
+ span_ctx = None
112
+
113
+ # Look up the parent span to establish hierarchy
114
+ # event.parent_id is the UUID of the last START step with a different UUID from current step
115
+ # This maintains proper parent-child relationships in the span tree
116
+ # Skip lookup if parent_id is "root" (indicates this is a top-level span)
117
+ if len(self._span_stack) > 0 and event.parent_id and event.parent_id != "root":
118
+
119
+ parent_span = self._span_stack.get(event.parent_id, None)
120
+ if parent_span is None:
121
+ logger.warning("No parent span found for step %s", event.UUID)
122
+ return
123
+
124
+ parent_span = parent_span.model_copy() if isinstance(parent_span, Span) else None
125
+ if parent_span and parent_span.context:
126
+ span_ctx = SpanContext(trace_id=parent_span.context.trace_id)
127
+
128
+ # Extract start/end times from the step
129
+ # By convention, `span_event_timestamp` is the time we started, `event_timestamp` is the time we ended.
130
+ # If span_event_timestamp is missing, we default to event_timestamp (meaning zero-length).
131
+ s_ts = event.payload.span_event_timestamp or event.payload.event_timestamp
132
+ start_ns = ns_timestamp(s_ts)
133
+
134
+ # Optional: embed the LLM/tool name if present
135
+ if event.payload.name:
136
+ sub_span_name = f"{event.payload.name}"
137
+ else:
138
+ sub_span_name = f"{event.payload.event_type}"
139
+
140
+ sub_span = Span(
141
+ name=sub_span_name,
142
+ parent=parent_span,
143
+ context=span_ctx,
144
+ attributes={
145
+ "aiq.event_type": event.payload.event_type.value,
146
+ "aiq.function.id": event.function_ancestry.function_id if event.function_ancestry else "unknown",
147
+ "aiq.function.name": event.function_ancestry.function_name if event.function_ancestry else "unknown",
148
+ "aiq.subspan.name": event.payload.name or "",
149
+ "aiq.event_timestamp": event.event_timestamp,
150
+ "aiq.framework": event.payload.framework.value if event.payload.framework else "unknown",
151
+ },
152
+ start_time=start_ns)
153
+
154
+ span_kind = event_type_to_span_kind(event.event_type)
155
+ sub_span.set_attribute("aiq.span.kind", span_kind.value)
156
+
157
+ if event.payload.data and event.payload.data.input:
158
+ match = re.search(r"Human:\s*Question:\s*(.*)", str(event.payload.data.input))
159
+ if match:
160
+ human_question = match.group(1).strip()
161
+ sub_span.set_attribute(SpanAttributes.INPUT_VALUE.value, human_question)
162
+ else:
163
+ serialized_input, is_json = self._serialize_payload(event.payload.data.input)
164
+ sub_span.set_attribute(SpanAttributes.INPUT_VALUE.value, serialized_input)
165
+ sub_span.set_attribute(SpanAttributes.INPUT_MIME_TYPE.value,
166
+ MimeTypes.JSON.value if is_json else MimeTypes.TEXT.value)
167
+
168
+ # Add metadata to the metadata stack
169
+ start_metadata = event.payload.metadata or {}
170
+
171
+ if isinstance(start_metadata, dict):
172
+ self._metadata_stack[event.UUID] = start_metadata # type: ignore
173
+ elif isinstance(start_metadata, TraceMetadata):
174
+ self._metadata_stack[event.UUID] = start_metadata.model_dump() # type: ignore
175
+ else:
176
+ logger.warning("Invalid metadata type for step %s", event.UUID)
177
+ return
178
+
179
+ self._span_stack[event.UUID] = sub_span # type: ignore
180
+ self._outstanding_spans[event.UUID] = sub_span # type: ignore
181
+
182
+ logger.debug(
183
+ "Added span to tracking (outstanding: %d, stack: %d, event_id: %s)",
184
+ len(self._outstanding_spans), # type: ignore
185
+ len(self._span_stack), # type: ignore
186
+ event.UUID)
187
+
188
+ def _process_end_event(self, event: IntermediateStep):
189
+ """Process the end event of an intermediate step.
190
+
191
+ Args:
192
+ event (IntermediateStep): The event to process.
193
+ """
194
+
195
+ # Find the subspan that was created in the start event
196
+ sub_span: Span | None = self._outstanding_spans.pop(event.UUID, None) # type: ignore
197
+
198
+ if sub_span is None:
199
+ logger.warning("No subspan found for step %s", event.UUID)
200
+ return
201
+
202
+ self._span_stack.pop(event.UUID, None) # type: ignore
203
+
204
+ # Optionally add more attributes from usage_info or data
205
+ usage_info = event.payload.usage_info
206
+ if usage_info:
207
+ sub_span.set_attribute(SpanAttributes.AIQ_USAGE_NUM_LLM_CALLS.value,
208
+ usage_info.num_llm_calls if usage_info.num_llm_calls else 0)
209
+ sub_span.set_attribute(SpanAttributes.AIQ_USAGE_SECONDS_BETWEEN_CALLS.value,
210
+ usage_info.seconds_between_calls if usage_info.seconds_between_calls else 0)
211
+ sub_span.set_attribute(SpanAttributes.LLM_TOKEN_COUNT_PROMPT.value,
212
+ usage_info.token_usage.prompt_tokens if usage_info.token_usage else 0)
213
+ sub_span.set_attribute(SpanAttributes.LLM_TOKEN_COUNT_COMPLETION.value,
214
+ usage_info.token_usage.completion_tokens if usage_info.token_usage else 0)
215
+ sub_span.set_attribute(SpanAttributes.LLM_TOKEN_COUNT_TOTAL.value,
216
+ usage_info.token_usage.total_tokens if usage_info.token_usage else 0)
217
+
218
+ if event.payload.data and event.payload.data.output is not None:
219
+ serialized_output, is_json = self._serialize_payload(event.payload.data.output)
220
+ sub_span.set_attribute(SpanAttributes.OUTPUT_VALUE.value, serialized_output)
221
+ sub_span.set_attribute(SpanAttributes.OUTPUT_MIME_TYPE.value,
222
+ MimeTypes.JSON.value if is_json else MimeTypes.TEXT.value)
223
+
224
+ # Merge metadata from start event with end event metadata
225
+ start_metadata = self._metadata_stack.pop(event.UUID) # type: ignore
226
+
227
+ if start_metadata is None:
228
+ logger.warning("No metadata found for step %s", event.UUID)
229
+ return
230
+
231
+ end_metadata = event.payload.metadata or {}
232
+
233
+ if not isinstance(end_metadata, (dict, TraceMetadata)):
234
+ logger.warning("Invalid metadata type for step %s", event.UUID)
235
+ return
236
+
237
+ if isinstance(end_metadata, TraceMetadata):
238
+ end_metadata = end_metadata.model_dump()
239
+
240
+ merged_metadata = merge_dicts(start_metadata, end_metadata)
241
+ serialized_metadata, is_json = self._serialize_payload(merged_metadata)
242
+ sub_span.set_attribute("aiq.metadata", serialized_metadata)
243
+ sub_span.set_attribute("aiq.metadata.mime_type", MimeTypes.JSON.value if is_json else MimeTypes.TEXT.value)
244
+
245
+ end_ns = ns_timestamp(event.payload.event_timestamp)
246
+
247
+ # End the subspan
248
+ sub_span.end(end_time=end_ns)
249
+
250
+ # Export the span with processing pipeline
251
+ self._create_export_task(self._export_with_processing(sub_span)) # type: ignore
252
+
253
+ @override
254
+ async def _cleanup(self):
255
+ """Clean up any remaining spans."""
256
+ if self._outstanding_spans: # type: ignore
257
+ logger.warning("Not all spans were closed. Remaining: %s", self._outstanding_spans) # type: ignore
258
+
259
+ for span_info in self._outstanding_spans.values(): # type: ignore
260
+ span_info.end()
261
+
262
+ self._outstanding_spans.clear() # type: ignore
263
+ self._span_stack.clear() # type: ignore
264
+ self._metadata_stack.clear() # type: ignore
265
+ await super()._cleanup()