nvidia-nat 1.3.0.dev2__py3-none-any.whl → 1.3.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (250) hide show
  1. aiq/__init__.py +2 -2
  2. nat/agent/base.py +24 -15
  3. nat/agent/dual_node.py +9 -4
  4. nat/agent/prompt_optimizer/prompt.py +68 -0
  5. nat/agent/prompt_optimizer/register.py +149 -0
  6. nat/agent/react_agent/agent.py +79 -47
  7. nat/agent/react_agent/register.py +50 -22
  8. nat/agent/reasoning_agent/reasoning_agent.py +11 -9
  9. nat/agent/register.py +1 -1
  10. nat/agent/rewoo_agent/agent.py +326 -148
  11. nat/agent/rewoo_agent/prompt.py +19 -22
  12. nat/agent/rewoo_agent/register.py +54 -27
  13. nat/agent/tool_calling_agent/agent.py +84 -28
  14. nat/agent/tool_calling_agent/register.py +51 -28
  15. nat/authentication/api_key/api_key_auth_provider.py +2 -2
  16. nat/authentication/credential_validator/bearer_token_validator.py +557 -0
  17. nat/authentication/http_basic_auth/http_basic_auth_provider.py +1 -1
  18. nat/authentication/interfaces.py +5 -2
  19. nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +69 -36
  20. nat/authentication/oauth2/oauth2_resource_server_config.py +124 -0
  21. nat/authentication/register.py +0 -1
  22. nat/builder/builder.py +56 -24
  23. nat/builder/component_utils.py +9 -5
  24. nat/builder/context.py +68 -17
  25. nat/builder/eval_builder.py +16 -11
  26. nat/builder/framework_enum.py +1 -0
  27. nat/builder/front_end.py +1 -1
  28. nat/builder/function.py +378 -8
  29. nat/builder/function_base.py +3 -3
  30. nat/builder/function_info.py +6 -8
  31. nat/builder/user_interaction_manager.py +2 -2
  32. nat/builder/workflow.py +13 -1
  33. nat/builder/workflow_builder.py +281 -76
  34. nat/cli/cli_utils/config_override.py +2 -2
  35. nat/cli/commands/evaluate.py +1 -1
  36. nat/cli/commands/info/info.py +16 -6
  37. nat/cli/commands/info/list_channels.py +1 -1
  38. nat/cli/commands/info/list_components.py +7 -8
  39. nat/cli/commands/mcp/__init__.py +14 -0
  40. nat/cli/commands/mcp/mcp.py +986 -0
  41. nat/cli/commands/object_store/__init__.py +14 -0
  42. nat/cli/commands/object_store/object_store.py +227 -0
  43. nat/cli/commands/optimize.py +90 -0
  44. nat/cli/commands/registry/publish.py +2 -2
  45. nat/cli/commands/registry/pull.py +2 -2
  46. nat/cli/commands/registry/remove.py +2 -2
  47. nat/cli/commands/registry/search.py +15 -17
  48. nat/cli/commands/start.py +16 -5
  49. nat/cli/commands/uninstall.py +1 -1
  50. nat/cli/commands/workflow/templates/config.yml.j2 +14 -13
  51. nat/cli/commands/workflow/templates/pyproject.toml.j2 +4 -1
  52. nat/cli/commands/workflow/templates/register.py.j2 +2 -3
  53. nat/cli/commands/workflow/templates/workflow.py.j2 +35 -21
  54. nat/cli/commands/workflow/workflow_commands.py +62 -22
  55. nat/cli/entrypoint.py +8 -10
  56. nat/cli/main.py +3 -0
  57. nat/cli/register_workflow.py +38 -4
  58. nat/cli/type_registry.py +75 -6
  59. nat/control_flow/__init__.py +0 -0
  60. nat/control_flow/register.py +20 -0
  61. nat/control_flow/router_agent/__init__.py +0 -0
  62. nat/control_flow/router_agent/agent.py +329 -0
  63. nat/control_flow/router_agent/prompt.py +48 -0
  64. nat/control_flow/router_agent/register.py +91 -0
  65. nat/control_flow/sequential_executor.py +166 -0
  66. nat/data_models/agent.py +34 -0
  67. nat/data_models/api_server.py +74 -66
  68. nat/data_models/authentication.py +23 -9
  69. nat/data_models/common.py +1 -1
  70. nat/data_models/component.py +2 -0
  71. nat/data_models/component_ref.py +11 -0
  72. nat/data_models/config.py +41 -17
  73. nat/data_models/dataset_handler.py +1 -1
  74. nat/data_models/discovery_metadata.py +4 -4
  75. nat/data_models/evaluate.py +4 -1
  76. nat/data_models/function.py +34 -0
  77. nat/data_models/function_dependencies.py +14 -6
  78. nat/data_models/gated_field_mixin.py +242 -0
  79. nat/data_models/intermediate_step.py +3 -3
  80. nat/data_models/optimizable.py +119 -0
  81. nat/data_models/optimizer.py +149 -0
  82. nat/data_models/span.py +41 -3
  83. nat/data_models/swe_bench_model.py +1 -1
  84. nat/data_models/temperature_mixin.py +44 -0
  85. nat/data_models/thinking_mixin.py +86 -0
  86. nat/data_models/top_p_mixin.py +44 -0
  87. nat/embedder/nim_embedder.py +1 -1
  88. nat/embedder/openai_embedder.py +1 -1
  89. nat/embedder/register.py +0 -1
  90. nat/eval/config.py +3 -1
  91. nat/eval/dataset_handler/dataset_handler.py +71 -7
  92. nat/eval/evaluate.py +86 -31
  93. nat/eval/evaluator/base_evaluator.py +1 -1
  94. nat/eval/evaluator/evaluator_model.py +13 -0
  95. nat/eval/intermediate_step_adapter.py +1 -1
  96. nat/eval/rag_evaluator/evaluate.py +2 -2
  97. nat/eval/rag_evaluator/register.py +3 -3
  98. nat/eval/register.py +4 -1
  99. nat/eval/remote_workflow.py +3 -3
  100. nat/eval/runtime_evaluator/__init__.py +14 -0
  101. nat/eval/runtime_evaluator/evaluate.py +123 -0
  102. nat/eval/runtime_evaluator/register.py +100 -0
  103. nat/eval/swe_bench_evaluator/evaluate.py +6 -6
  104. nat/eval/trajectory_evaluator/evaluate.py +1 -1
  105. nat/eval/trajectory_evaluator/register.py +1 -1
  106. nat/eval/tunable_rag_evaluator/evaluate.py +4 -7
  107. nat/eval/utils/eval_trace_ctx.py +89 -0
  108. nat/eval/utils/weave_eval.py +18 -9
  109. nat/experimental/decorators/experimental_warning_decorator.py +27 -7
  110. nat/experimental/test_time_compute/functions/plan_select_execute_function.py +7 -3
  111. nat/experimental/test_time_compute/functions/ttc_tool_orchestration_function.py +3 -3
  112. nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +1 -1
  113. nat/experimental/test_time_compute/models/strategy_base.py +5 -4
  114. nat/experimental/test_time_compute/register.py +0 -1
  115. nat/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +1 -3
  116. nat/front_ends/console/authentication_flow_handler.py +82 -30
  117. nat/front_ends/console/console_front_end_plugin.py +8 -5
  118. nat/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +52 -17
  119. nat/front_ends/fastapi/dask_client_mixin.py +65 -0
  120. nat/front_ends/fastapi/fastapi_front_end_config.py +36 -5
  121. nat/front_ends/fastapi/fastapi_front_end_controller.py +4 -4
  122. nat/front_ends/fastapi/fastapi_front_end_plugin.py +135 -4
  123. nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +452 -282
  124. nat/front_ends/fastapi/job_store.py +518 -99
  125. nat/front_ends/fastapi/main.py +11 -19
  126. nat/front_ends/fastapi/message_handler.py +13 -14
  127. nat/front_ends/fastapi/message_validator.py +19 -19
  128. nat/front_ends/fastapi/response_helpers.py +4 -4
  129. nat/front_ends/fastapi/step_adaptor.py +2 -2
  130. nat/front_ends/fastapi/utils.py +57 -0
  131. nat/front_ends/mcp/introspection_token_verifier.py +73 -0
  132. nat/front_ends/mcp/mcp_front_end_config.py +10 -1
  133. nat/front_ends/mcp/mcp_front_end_plugin.py +45 -13
  134. nat/front_ends/mcp/mcp_front_end_plugin_worker.py +116 -8
  135. nat/front_ends/mcp/tool_converter.py +44 -14
  136. nat/front_ends/register.py +0 -1
  137. nat/front_ends/simple_base/simple_front_end_plugin_base.py +3 -1
  138. nat/llm/aws_bedrock_llm.py +24 -12
  139. nat/llm/azure_openai_llm.py +13 -6
  140. nat/llm/litellm_llm.py +69 -0
  141. nat/llm/nim_llm.py +20 -8
  142. nat/llm/openai_llm.py +14 -6
  143. nat/llm/register.py +4 -1
  144. nat/llm/utils/env_config_value.py +2 -3
  145. nat/llm/utils/thinking.py +215 -0
  146. nat/meta/pypi.md +9 -9
  147. nat/object_store/register.py +0 -1
  148. nat/observability/exporter/base_exporter.py +3 -3
  149. nat/observability/exporter/file_exporter.py +1 -1
  150. nat/observability/exporter/processing_exporter.py +309 -81
  151. nat/observability/exporter/span_exporter.py +35 -15
  152. nat/observability/exporter_manager.py +7 -7
  153. nat/observability/mixin/file_mixin.py +7 -7
  154. nat/observability/mixin/redaction_config_mixin.py +42 -0
  155. nat/observability/mixin/tagging_config_mixin.py +62 -0
  156. nat/observability/mixin/type_introspection_mixin.py +420 -107
  157. nat/observability/processor/batching_processor.py +5 -7
  158. nat/observability/processor/falsy_batch_filter_processor.py +55 -0
  159. nat/observability/processor/processor.py +3 -0
  160. nat/observability/processor/processor_factory.py +70 -0
  161. nat/observability/processor/redaction/__init__.py +24 -0
  162. nat/observability/processor/redaction/contextual_redaction_processor.py +125 -0
  163. nat/observability/processor/redaction/contextual_span_redaction_processor.py +66 -0
  164. nat/observability/processor/redaction/redaction_processor.py +177 -0
  165. nat/observability/processor/redaction/span_header_redaction_processor.py +92 -0
  166. nat/observability/processor/span_tagging_processor.py +68 -0
  167. nat/observability/register.py +6 -4
  168. nat/profiler/calc/calc_runner.py +3 -4
  169. nat/profiler/callbacks/agno_callback_handler.py +1 -1
  170. nat/profiler/callbacks/langchain_callback_handler.py +6 -6
  171. nat/profiler/callbacks/llama_index_callback_handler.py +3 -3
  172. nat/profiler/callbacks/semantic_kernel_callback_handler.py +3 -3
  173. nat/profiler/data_frame_row.py +1 -1
  174. nat/profiler/decorators/framework_wrapper.py +62 -13
  175. nat/profiler/decorators/function_tracking.py +160 -3
  176. nat/profiler/forecasting/models/forecasting_base_model.py +3 -1
  177. nat/profiler/forecasting/models/linear_model.py +1 -1
  178. nat/profiler/forecasting/models/random_forest_regressor.py +1 -1
  179. nat/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +1 -1
  180. nat/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +1 -1
  181. nat/profiler/inference_optimization/data_models.py +3 -3
  182. nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +8 -9
  183. nat/profiler/inference_optimization/token_uniqueness.py +1 -1
  184. nat/profiler/parameter_optimization/__init__.py +0 -0
  185. nat/profiler/parameter_optimization/optimizable_utils.py +93 -0
  186. nat/profiler/parameter_optimization/optimizer_runtime.py +67 -0
  187. nat/profiler/parameter_optimization/parameter_optimizer.py +153 -0
  188. nat/profiler/parameter_optimization/parameter_selection.py +107 -0
  189. nat/profiler/parameter_optimization/pareto_visualizer.py +380 -0
  190. nat/profiler/parameter_optimization/prompt_optimizer.py +384 -0
  191. nat/profiler/parameter_optimization/update_helpers.py +66 -0
  192. nat/profiler/profile_runner.py +14 -9
  193. nat/profiler/utils.py +4 -2
  194. nat/registry_handlers/local/local_handler.py +2 -2
  195. nat/registry_handlers/package_utils.py +1 -2
  196. nat/registry_handlers/pypi/pypi_handler.py +23 -26
  197. nat/registry_handlers/register.py +3 -4
  198. nat/registry_handlers/rest/rest_handler.py +12 -13
  199. nat/retriever/milvus/retriever.py +2 -2
  200. nat/retriever/nemo_retriever/retriever.py +1 -1
  201. nat/retriever/register.py +0 -1
  202. nat/runtime/loader.py +2 -2
  203. nat/runtime/runner.py +106 -8
  204. nat/runtime/session.py +69 -8
  205. nat/settings/global_settings.py +16 -5
  206. nat/tool/chat_completion.py +5 -2
  207. nat/tool/code_execution/local_sandbox/local_sandbox_server.py +3 -3
  208. nat/tool/datetime_tools.py +49 -9
  209. nat/tool/document_search.py +2 -2
  210. nat/tool/github_tools.py +450 -0
  211. nat/tool/memory_tools/get_memory_tool.py +1 -1
  212. nat/tool/nvidia_rag.py +1 -1
  213. nat/tool/register.py +2 -9
  214. nat/tool/retriever.py +3 -2
  215. nat/utils/callable_utils.py +70 -0
  216. nat/utils/data_models/schema_validator.py +3 -3
  217. nat/utils/decorators.py +210 -0
  218. nat/utils/exception_handlers/automatic_retries.py +104 -51
  219. nat/utils/exception_handlers/schemas.py +1 -1
  220. nat/utils/io/yaml_tools.py +2 -2
  221. nat/utils/log_levels.py +25 -0
  222. nat/utils/reactive/base/observable_base.py +2 -2
  223. nat/utils/reactive/base/observer_base.py +1 -1
  224. nat/utils/reactive/observable.py +2 -2
  225. nat/utils/reactive/observer.py +4 -4
  226. nat/utils/reactive/subscription.py +1 -1
  227. nat/utils/settings/global_settings.py +6 -8
  228. nat/utils/type_converter.py +4 -3
  229. nat/utils/type_utils.py +9 -5
  230. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/METADATA +42 -18
  231. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/RECORD +238 -196
  232. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/entry_points.txt +1 -0
  233. nat/cli/commands/info/list_mcp.py +0 -304
  234. nat/tool/github_tools/create_github_commit.py +0 -133
  235. nat/tool/github_tools/create_github_issue.py +0 -87
  236. nat/tool/github_tools/create_github_pr.py +0 -106
  237. nat/tool/github_tools/get_github_file.py +0 -106
  238. nat/tool/github_tools/get_github_issue.py +0 -166
  239. nat/tool/github_tools/get_github_pr.py +0 -256
  240. nat/tool/github_tools/update_github_issue.py +0 -100
  241. nat/tool/mcp/exceptions.py +0 -142
  242. nat/tool/mcp/mcp_client.py +0 -255
  243. nat/tool/mcp/mcp_tool.py +0 -96
  244. nat/utils/exception_handlers/mcp.py +0 -211
  245. /nat/{tool/github_tools → agent/prompt_optimizer}/__init__.py +0 -0
  246. /nat/{tool/mcp → authentication/credential_validator}/__init__.py +0 -0
  247. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/WHEEL +0 -0
  248. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
  249. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/licenses/LICENSE.md +0 -0
  250. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc2.dist-info}/top_level.txt +0 -0
@@ -50,46 +50,76 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
50
50
  - Processor pipeline management (add, remove, clear)
51
51
  - Type compatibility validation between processors
52
52
  - Pipeline processing with error handling
53
+ - Configurable None filtering: processors returning None can drop items from pipeline
53
54
  - Automatic type validation before export
54
55
  """
56
+ # All ProcessingExporter instances automatically use this for signature checking
57
+ _signature_method = '_process_pipeline'
55
58
 
56
- def __init__(self, context_state: ContextState | None = None):
59
+ def __init__(self, context_state: ContextState | None = None, drop_nones: bool = True):
57
60
  """Initialize the processing exporter.
58
61
 
59
62
  Args:
60
- context_state: The context state to use for the exporter.
63
+ context_state (ContextState | None): The context state to use for the exporter.
64
+ drop_nones (bool): Whether to drop items when processors return None (default: True).
61
65
  """
62
66
  super().__init__(context_state)
63
67
  self._processors: list[Processor] = [] # List of processors that implement process(item) -> item
64
-
65
- def add_processor(self, processor: Processor) -> None:
68
+ self._processor_names: dict[str, int] = {} # Maps processor names to their positions
69
+ self._pipeline_locked: bool = False # Prevents modifications after startup
70
+ self._drop_nones: bool = drop_nones # Whether to drop None values between processors
71
+
72
+ def add_processor(self,
73
+ processor: Processor,
74
+ name: str | None = None,
75
+ position: int | None = None,
76
+ before: str | None = None,
77
+ after: str | None = None) -> None:
66
78
  """Add a processor to the processing pipeline.
67
79
 
68
- Processors are executed in the order they are added.
69
- Processors can transform between any types (T -> U).
80
+ Processors are executed in the order they are added. Processes can transform between any types (T -> U).
81
+ Supports flexible positioning using names, positions, or relative placement.
70
82
 
71
83
  Args:
72
- processor: The processor to add to the pipeline
84
+ processor (Processor): The processor to add to the pipeline
85
+ name (str | None): Name for the processor (for later reference). Must be unique.
86
+ position (int | None): Specific position to insert at (0-based index, -1 for append)
87
+ before (str | None): Insert before the named processor
88
+ after (str | None): Insert after the named processor
89
+
90
+ Raises:
91
+ RuntimeError: If pipeline is locked (after startup)
92
+ ValueError: If positioning arguments conflict or named processor not found
73
93
  """
94
+ self._check_pipeline_locked()
74
95
 
75
- # Check if the processor is compatible with the last processor in the pipeline
76
- if len(self._processors) > 0:
77
- try:
78
- if not issubclass(processor.input_class, self._processors[-1].output_class):
79
- raise ValueError(f"Processor {processor.__class__.__name__} input type {processor.input_type} "
80
- f"is not compatible with the {self._processors[-1].__class__.__name__} "
81
- f"output type {self._processors[-1].output_type}")
82
- except TypeError:
83
- # Handle cases where input_class or output_class are generic types that can't be used with issubclass
84
- # Fall back to type comparison for generic types
85
- logger.warning(
86
- "Cannot use issubclass() for type compatibility check between "
87
- "%s (%s) and %s (%s). Skipping compatibility check.",
88
- processor.__class__.__name__,
89
- processor.input_type,
90
- self._processors[-1].__class__.__name__,
91
- self._processors[-1].output_type)
92
- self._processors.append(processor)
96
+ # Determine insertion position
97
+ insert_position = self._calculate_insertion_position(position, before, after)
98
+
99
+ # Validate type compatibility at insertion point
100
+ self._validate_insertion_compatibility(processor, insert_position)
101
+
102
+ # Pre-validate name (no side effects yet)
103
+ if name is not None:
104
+ if not isinstance(name, str):
105
+ raise TypeError(f"Processor name must be a string, got {type(name).__name__}")
106
+ if name in self._processor_names:
107
+ raise ValueError(f"Processor name '{name}' already exists")
108
+
109
+ # Shift existing name positions (do this before list mutation)
110
+ for proc_name, pos in list(self._processor_names.items()):
111
+ if pos >= insert_position:
112
+ self._processor_names[proc_name] = pos + 1
113
+
114
+ # Insert the processor
115
+ if insert_position == len(self._processors):
116
+ self._processors.append(processor)
117
+ else:
118
+ self._processors.insert(insert_position, processor)
119
+
120
+ # Record the new processor name, if provided
121
+ if name is not None:
122
+ self._processor_names[name] = insert_position
93
123
 
94
124
  # Set up pipeline continuation callback for processors that support it
95
125
  if isinstance(processor, CallbackProcessor):
@@ -99,64 +129,240 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
99
129
 
100
130
  processor.set_done_callback(pipeline_callback)
101
131
 
102
- def remove_processor(self, processor: Processor) -> None:
132
+ def remove_processor(self, processor: Processor | str | int) -> None:
103
133
  """Remove a processor from the processing pipeline.
104
134
 
105
135
  Args:
106
- processor: The processor to remove from the pipeline
136
+ processor (Processor | str | int): The processor to remove (by name, position, or object).
137
+
138
+ Raises:
139
+ RuntimeError: If pipeline is locked (after startup)
140
+ ValueError: If named processor or position not found
141
+ TypeError: If processor argument has invalid type
107
142
  """
108
- if processor in self._processors:
109
- self._processors.remove(processor)
143
+ self._check_pipeline_locked()
144
+
145
+ # Determine processor and position to remove
146
+ if isinstance(processor, str):
147
+ # Remove by name
148
+ if processor not in self._processor_names:
149
+ raise ValueError(f"Processor '{processor}' not found in pipeline")
150
+ position = self._processor_names[processor]
151
+ processor_obj = self._processors[position]
152
+ elif isinstance(processor, int):
153
+ # Remove by position
154
+ if not (0 <= processor < len(self._processors)):
155
+ raise ValueError(f"Position {processor} is out of range [0, {len(self._processors) - 1}]")
156
+ position = processor
157
+ processor_obj = self._processors[position]
158
+ elif isinstance(processor, Processor):
159
+ # Remove by object (existing behavior)
160
+ if processor not in self._processors:
161
+ return # Silently ignore if not found (existing behavior)
162
+ position = self._processors.index(processor)
163
+ processor_obj = processor
164
+ else:
165
+ raise TypeError(f"Processor must be a Processor object, string name, or int position, "
166
+ f"got {type(processor).__name__}")
167
+
168
+ # Remove the processor
169
+ self._processors.remove(processor_obj)
170
+
171
+ # Remove from name mapping and update positions
172
+ name_to_remove = None
173
+ for name, pos in self._processor_names.items():
174
+ if pos == position:
175
+ name_to_remove = name
176
+ break
177
+
178
+ if name_to_remove:
179
+ del self._processor_names[name_to_remove]
180
+
181
+ # Update positions for processors that shifted
182
+ for name, pos in self._processor_names.items():
183
+ if pos > position:
184
+ self._processor_names[name] = pos - 1
110
185
 
111
186
  def clear_processors(self) -> None:
112
187
  """Clear all processors from the pipeline."""
188
+ self._check_pipeline_locked()
113
189
  self._processors.clear()
190
+ self._processor_names.clear()
191
+
192
+ def reset_pipeline(self) -> None:
193
+ """Reset the pipeline to allow modifications.
194
+
195
+ This unlocks the pipeline and clears all processors, allowing
196
+ the pipeline to be reconfigured. Can only be called when the
197
+ exporter is stopped.
198
+
199
+ Raises:
200
+ RuntimeError: If exporter is currently running
201
+ """
202
+ if self._running:
203
+ raise RuntimeError("Cannot reset pipeline while exporter is running. "
204
+ "Call stop() first, then reset_pipeline().")
205
+
206
+ self._pipeline_locked = False
207
+ self._processors.clear()
208
+ self._processor_names.clear()
209
+ logger.debug("Pipeline reset - unlocked and cleared all processors")
210
+
211
+ def get_processor_by_name(self, name: str) -> Processor | None:
212
+ """Get a processor by its name.
213
+
214
+ Args:
215
+ name (str): The name of the processor to retrieve
216
+
217
+ Returns:
218
+ Processor | None: The processor with the given name, or None if not found
219
+ """
220
+ if not isinstance(name, str):
221
+ raise TypeError(f"Processor name must be a string, got {type(name).__name__}")
222
+ if name in self._processor_names:
223
+ position = self._processor_names[name]
224
+ return self._processors[position]
225
+ logger.debug("Processor '%s' not found in pipeline", name)
226
+ return None
227
+
228
+ def _check_pipeline_locked(self) -> None:
229
+ """Check if pipeline is locked and raise error if it is."""
230
+ if self._pipeline_locked:
231
+ raise RuntimeError("Cannot modify processor pipeline after exporter has started. "
232
+ "Pipeline must be fully configured before calling start().")
233
+
234
+ def _calculate_insertion_position(self, position: int | None, before: str | None, after: str | None) -> int:
235
+ """Calculate the insertion position based on provided arguments.
236
+
237
+ Args:
238
+ position (int | None): Explicit position (0-based index, -1 for append)
239
+ before (str | None): Insert before this named processor
240
+ after (str | None): Insert after this named processor
241
+
242
+ Returns:
243
+ int: The calculated insertion position
244
+
245
+ Raises:
246
+ ValueError: If arguments conflict or named processor not found
247
+ """
248
+ # Check for conflicting arguments
249
+ args_provided = sum(x is not None for x in [position, before, after])
250
+ if args_provided > 1:
251
+ raise ValueError("Only one of position, before, or after can be specified")
252
+
253
+ # Default to append
254
+ if args_provided == 0:
255
+ return len(self._processors)
256
+
257
+ # Handle explicit position
258
+ if position is not None:
259
+ if position == -1:
260
+ return len(self._processors)
261
+ if 0 <= position <= len(self._processors):
262
+ return position
263
+ raise ValueError(f"Position {position} is out of range [0, {len(self._processors)}]")
264
+
265
+ # Handle before/after named processors
266
+ if before is not None:
267
+ if not isinstance(before, str):
268
+ raise TypeError(f"'before' parameter must be a string, got {type(before).__name__}")
269
+ if before not in self._processor_names:
270
+ raise ValueError(f"Processor '{before}' not found in pipeline")
271
+ return self._processor_names[before]
272
+
273
+ if after is not None:
274
+ if not isinstance(after, str):
275
+ raise TypeError(f"'after' parameter must be a string, got {type(after).__name__}")
276
+ if after not in self._processor_names:
277
+ raise ValueError(f"Processor '{after}' not found in pipeline")
278
+ return self._processor_names[after] + 1
279
+
280
+ # Should never reach here
281
+ return len(self._processors)
282
+
283
+ def _validate_insertion_compatibility(self, processor: Processor, position: int) -> None:
284
+ """Validate type compatibility for processor insertion.
285
+
286
+ Args:
287
+ processor (Processor): The processor to insert
288
+ position (int): The position where it will be inserted
289
+
290
+ Raises:
291
+ ValueError: If processor is not compatible with neighbors
292
+ """
293
+ # Check compatibility with neighbors
294
+ if position > 0:
295
+ predecessor = self._processors[position - 1]
296
+ self._check_processor_compatibility(predecessor,
297
+ processor,
298
+ "predecessor",
299
+ str(predecessor.output_type),
300
+ str(processor.input_type))
301
+
302
+ if position < len(self._processors):
303
+ successor = self._processors[position]
304
+ self._check_processor_compatibility(processor,
305
+ successor,
306
+ "successor",
307
+ str(processor.output_type),
308
+ str(successor.input_type))
309
+
310
+ def _check_processor_compatibility(self,
311
+ source_processor: Processor,
312
+ target_processor: Processor,
313
+ relationship: str,
314
+ source_type: str,
315
+ target_type: str) -> None:
316
+ """Check type compatibility between two processors using Pydantic validation.
317
+
318
+ Args:
319
+ source_processor (Processor): The processor providing output
320
+ target_processor (Processor): The processor receiving input
321
+ relationship (str): Description of relationship ("predecessor" or "successor")
322
+ source_type (str): String representation of source type
323
+ target_type (str): String representation of target type
324
+ """
325
+ # Use Pydantic-based type compatibility checking
326
+ if not source_processor.is_output_compatible_with(target_processor.input_type):
327
+ raise ValueError(f"Processor {target_processor.__class__.__name__} input type {target_type} "
328
+ f"is not compatible with {relationship} {source_processor.__class__.__name__} "
329
+ f"output type {source_type}")
114
330
 
115
331
  async def _pre_start(self) -> None:
332
+
333
+ # Validate that the pipeline is compatible with the exporter
116
334
  if len(self._processors) > 0:
117
335
  first_processor = self._processors[0]
118
336
  last_processor = self._processors[-1]
119
337
 
120
338
  # validate that the first processor's input type is compatible with the exporter's input type
121
- try:
122
- if not issubclass(first_processor.input_class, self.input_class):
123
- raise ValueError(f"Processor {first_processor.__class__.__name__} input type "
124
- f"{first_processor.input_type} is not compatible with the "
125
- f"{self.input_type} input type")
126
- except TypeError as e:
127
- # Handle cases where classes are generic types that can't be used with issubclass
128
- logger.warning(
129
- "Cannot validate type compatibility between %s (%s) "
130
- "and exporter (%s): %s. Skipping validation.",
131
- first_processor.__class__.__name__,
132
- first_processor.input_type,
133
- self.input_type,
134
- e)
135
-
339
+ if not first_processor.is_compatible_with_input(self.input_type):
340
+ logger.error("First processor %s input=%s incompatible with exporter input=%s",
341
+ first_processor.__class__.__name__,
342
+ first_processor.input_type,
343
+ self.input_type)
344
+ raise ValueError("First processor incompatible with exporter input")
136
345
  # Validate that the last processor's output type is compatible with the exporter's output type
137
- try:
138
- if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
139
- raise ValueError(f"Processor {last_processor.__class__.__name__} output type "
140
- f"{last_processor.output_type} is not compatible with the "
141
- f"{self.output_type} output type")
142
- except TypeError as e:
143
- # Handle cases where classes are generic types that can't be used with issubclass
144
- logger.warning(
145
- "Cannot validate type compatibility between %s (%s) "
146
- "and exporter (%s): %s. Skipping validation.",
147
- last_processor.__class__.__name__,
148
- last_processor.output_type,
149
- self.output_type,
150
- e)
151
-
152
- async def _process_pipeline(self, item: PipelineInputT) -> PipelineOutputT:
346
+ # Use DecomposedType.is_type_compatible for the final export stage to allow batch compatibility
347
+ # This enables BatchingProcessor[T] -> Exporter[T] patterns where the exporter handles both T and list[T]
348
+ if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
349
+ logger.error("Last processor %s output=%s incompatible with exporter output=%s",
350
+ last_processor.__class__.__name__,
351
+ last_processor.output_type,
352
+ self.output_type)
353
+ raise ValueError("Last processor incompatible with exporter output")
354
+
355
+ # Lock the pipeline to prevent further modifications
356
+ self._pipeline_locked = True
357
+
358
+ async def _process_pipeline(self, item: PipelineInputT) -> PipelineOutputT | None:
153
359
  """Process item through all registered processors.
154
360
 
155
361
  Args:
156
362
  item (PipelineInputT): The item to process (starts as PipelineInputT, can transform to PipelineOutputT)
157
363
 
158
364
  Returns:
159
- PipelineOutputT: The processed item after running through all processors
365
+ PipelineOutputT | None: The processed item after running through all processors
160
366
  """
161
367
  return await self._process_through_processors(self._processors, item) # type: ignore
162
368
 
@@ -168,14 +374,20 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
168
374
  item (Any): The item to process
169
375
 
170
376
  Returns:
171
- The processed item after running through all processors
377
+ Any: The processed item after running through all processors, or None if
378
+ drop_nones is True and any processor returned None
172
379
  """
173
380
  processed_item = item
174
381
  for processor in processors:
175
382
  try:
176
383
  processed_item = await processor.process(processed_item)
384
+ # Drop None values between processors if configured to do so
385
+ if self._drop_nones and processed_item is None:
386
+ logger.debug("Processor %s returned None, dropping item from pipeline",
387
+ processor.__class__.__name__)
388
+ return None
177
389
  except Exception as e:
178
- logger.error("Error in processor %s: %s", processor.__class__.__name__, e, exc_info=True)
390
+ logger.exception("Error in processor %s: %s", processor.__class__.__name__, e)
179
391
  # Continue with unprocessed item rather than failing
180
392
  return processed_item
181
393
 
@@ -191,12 +403,15 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
191
403
  await self.export_processed(processed_item)
192
404
  else:
193
405
  logger.debug("Skipping export of empty batch")
194
- elif isinstance(processed_item, self.output_class):
406
+ elif self.validate_output_type(processed_item):
195
407
  await self.export_processed(processed_item)
196
408
  else:
197
409
  if raise_on_invalid:
198
- raise ValueError(f"Processed item {processed_item} is not a valid output type. "
199
- f"Expected {self.output_class} or list[{self.output_class}]")
410
+ logger.error("Invalid processed item type for export: %s (expected %s or list[%s])",
411
+ type(processed_item),
412
+ self.output_type,
413
+ self.output_type)
414
+ raise ValueError("Invalid processed item type for export")
200
415
  logger.warning("Processed item %s is not a valid output type for export", processed_item)
201
416
 
202
417
  async def _continue_pipeline_after(self, source_processor: Processor, item: Any) -> None:
@@ -214,31 +429,40 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
214
429
  try:
215
430
  source_index = self._processors.index(source_processor)
216
431
  except ValueError:
217
- logger.error("Source processor %s not found in pipeline", source_processor.__class__.__name__)
432
+ logger.exception("Source processor %s not found in pipeline", source_processor.__class__.__name__)
218
433
  return
219
434
 
220
435
  # Process through remaining processors (skip the source processor)
221
436
  remaining_processors = self._processors[source_index + 1:]
222
437
  processed_item = await self._process_through_processors(remaining_processors, item)
223
438
 
439
+ # Skip export if remaining pipeline dropped the item (returned None)
440
+ if processed_item is None:
441
+ logger.debug("Item was dropped by remaining processor pipeline, skipping export")
442
+ return
443
+
224
444
  # Export the final result
225
445
  await self._export_final_item(processed_item)
226
446
 
227
447
  except Exception as e:
228
- logger.error("Failed to continue pipeline processing after %s: %s",
229
- source_processor.__class__.__name__,
230
- e,
231
- exc_info=True)
448
+ logger.exception("Failed to continue pipeline processing after %s: %s",
449
+ source_processor.__class__.__name__,
450
+ e)
232
451
 
233
452
  async def _export_with_processing(self, item: PipelineInputT) -> None:
234
453
  """Export an item after processing it through the pipeline.
235
454
 
236
455
  Args:
237
- item: The item to export
456
+ item (PipelineInputT): The item to export
238
457
  """
239
458
  try:
240
459
  # Then, run through the processor pipeline
241
- final_item: PipelineOutputT = await self._process_pipeline(item)
460
+ final_item: PipelineOutputT | None = await self._process_pipeline(item)
461
+
462
+ # Skip export if pipeline dropped the item (returned None)
463
+ if final_item is None:
464
+ logger.debug("Item was dropped by processor pipeline, skipping export")
465
+ return
242
466
 
243
467
  # Handle different output types from batch processors
244
468
  if isinstance(final_item, list) and len(final_item) == 0:
@@ -248,7 +472,7 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
248
472
  await self._export_final_item(final_item, raise_on_invalid=True)
249
473
 
250
474
  except Exception as e:
251
- logger.error("Failed to export item '%s': %s", item, e, exc_info=True)
475
+ logger.error("Failed to export item '%s': %s", item, e)
252
476
  raise
253
477
 
254
478
  @override
@@ -262,7 +486,7 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
262
486
  event (IntermediateStep): The event to be exported.
263
487
  """
264
488
  # Convert IntermediateStep to PipelineInputT and create export task
265
- if isinstance(event, self.input_class):
489
+ if self.validate_input_type(event):
266
490
  input_item: PipelineInputT = event # type: ignore
267
491
  coro = self._export_with_processing(input_item)
268
492
  self._create_export_task(coro)
@@ -277,12 +501,16 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
277
501
  the actual export logic after the item has been processed through the pipeline.
278
502
 
279
503
  Args:
280
- item: The processed item to export (PipelineOutputT type)
504
+ item (PipelineOutputT | list[PipelineOutputT]): The processed item to export (PipelineOutputT type)
281
505
  """
282
506
  pass
283
507
 
284
- def _create_export_task(self, coro: Coroutine):
285
- """Create task with minimal overhead but proper tracking."""
508
+ def _create_export_task(self, coro: Coroutine) -> None:
509
+ """Create task with minimal overhead but proper tracking.
510
+
511
+ Args:
512
+ coro: The coroutine to create a task for
513
+ """
286
514
  if not self._running:
287
515
  logger.warning("%s: Attempted to create export task while not running", self.name)
288
516
  return
@@ -293,11 +521,11 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
293
521
  task.add_done_callback(self._tasks.discard)
294
522
 
295
523
  except Exception as e:
296
- logger.error("%s: Failed to create task: %s", self.name, e, exc_info=True)
524
+ logger.error("%s: Failed to create task: %s", self.name, e)
297
525
  raise
298
526
 
299
527
  @override
300
- async def _cleanup(self):
528
+ async def _cleanup(self) -> None:
301
529
  """Enhanced cleanup that shuts down all shutdown-aware processors.
302
530
 
303
531
  Each processor is responsible for its own cleanup, including routing
@@ -316,7 +544,7 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
316
544
  await asyncio.gather(*shutdown_tasks, return_exceptions=True)
317
545
  logger.debug("Successfully shut down %d processors", len(shutdown_tasks))
318
546
  except Exception as e:
319
- logger.error("Error shutting down processors: %s", e, exc_info=True)
547
+ logger.exception("Error shutting down processors: %s", e)
320
548
 
321
549
  # Call parent cleanup
322
550
  await super()._cleanup()
@@ -126,6 +126,7 @@ class SpanExporter(ProcessingExporter[InputSpanT, OutputSpanT], SerializeMixin):
126
126
 
127
127
  parent_span = None
128
128
  span_ctx = None
129
+ workflow_trace_id = self._context_state.workflow_trace_id.get()
129
130
 
130
131
  # Look up the parent span to establish hierarchy
131
132
  # event.parent_id is the UUID of the last START step with a different UUID from current step
@@ -141,6 +142,9 @@ class SpanExporter(ProcessingExporter[InputSpanT, OutputSpanT], SerializeMixin):
141
142
  parent_span = parent_span.model_copy() if isinstance(parent_span, Span) else None
142
143
  if parent_span and parent_span.context:
143
144
  span_ctx = SpanContext(trace_id=parent_span.context.trace_id)
145
+ # No parent: adopt workflow trace id if available to keep all spans in the same trace
146
+ if span_ctx is None and workflow_trace_id:
147
+ span_ctx = SpanContext(trace_id=workflow_trace_id)
144
148
 
145
149
  # Extract start/end times from the step
146
150
  # By convention, `span_event_timestamp` is the time we started, `event_timestamp` is the time we ended.
@@ -154,23 +158,39 @@ class SpanExporter(ProcessingExporter[InputSpanT, OutputSpanT], SerializeMixin):
154
158
  else:
155
159
  sub_span_name = f"{event.payload.event_type}"
156
160
 
161
+ # Prefer parent/context trace id for attribute, else workflow trace id
162
+ _attr_trace_id = None
163
+ if span_ctx is not None:
164
+ _attr_trace_id = span_ctx.trace_id
165
+ elif parent_span and parent_span.context:
166
+ _attr_trace_id = parent_span.context.trace_id
167
+ elif workflow_trace_id:
168
+ _attr_trace_id = workflow_trace_id
169
+
170
+ attributes = {
171
+ f"{self._span_prefix}.event_type":
172
+ event.payload.event_type.value,
173
+ f"{self._span_prefix}.function.id":
174
+ event.function_ancestry.function_id if event.function_ancestry else "unknown",
175
+ f"{self._span_prefix}.function.name":
176
+ event.function_ancestry.function_name if event.function_ancestry else "unknown",
177
+ f"{self._span_prefix}.subspan.name":
178
+ event.payload.name or "",
179
+ f"{self._span_prefix}.event_timestamp":
180
+ event.event_timestamp,
181
+ f"{self._span_prefix}.framework":
182
+ event.payload.framework.value if event.payload.framework else "unknown",
183
+ f"{self._span_prefix}.conversation.id":
184
+ self._context_state.conversation_id.get() or "unknown",
185
+ f"{self._span_prefix}.workflow.run_id":
186
+ self._context_state.workflow_run_id.get() or "unknown",
187
+ f"{self._span_prefix}.workflow.trace_id": (f"{_attr_trace_id:032x}" if _attr_trace_id else "unknown"),
188
+ }
189
+
157
190
  sub_span = Span(name=sub_span_name,
158
191
  parent=parent_span,
159
192
  context=span_ctx,
160
- attributes={
161
- f"{self._span_prefix}.event_type":
162
- event.payload.event_type.value,
163
- f"{self._span_prefix}.function.id":
164
- event.function_ancestry.function_id if event.function_ancestry else "unknown",
165
- f"{self._span_prefix}.function.name":
166
- event.function_ancestry.function_name if event.function_ancestry else "unknown",
167
- f"{self._span_prefix}.subspan.name":
168
- event.payload.name or "",
169
- f"{self._span_prefix}.event_timestamp":
170
- event.event_timestamp,
171
- f"{self._span_prefix}.framework":
172
- event.payload.framework.value if event.payload.framework else "unknown",
173
- },
193
+ attributes=attributes,
174
194
  start_time=start_ns)
175
195
 
176
196
  span_kind = event_type_to_span_kind(event.event_type)
@@ -252,7 +272,7 @@ class SpanExporter(ProcessingExporter[InputSpanT, OutputSpanT], SerializeMixin):
252
272
 
253
273
  end_metadata = event.payload.metadata or {}
254
274
 
255
- if not isinstance(end_metadata, (dict, TraceMetadata)):
275
+ if not isinstance(end_metadata, dict | TraceMetadata):
256
276
  logger.warning("Invalid metadata type for step %s", event.UUID)
257
277
  return
258
278