nvidia-nat 1.3.0.dev2__py3-none-any.whl → 1.3.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (242) hide show
  1. aiq/__init__.py +2 -2
  2. nat/agent/base.py +24 -15
  3. nat/agent/dual_node.py +9 -4
  4. nat/agent/prompt_optimizer/prompt.py +68 -0
  5. nat/agent/prompt_optimizer/register.py +149 -0
  6. nat/agent/react_agent/agent.py +79 -47
  7. nat/agent/react_agent/register.py +41 -21
  8. nat/agent/reasoning_agent/reasoning_agent.py +11 -9
  9. nat/agent/register.py +1 -1
  10. nat/agent/rewoo_agent/agent.py +326 -148
  11. nat/agent/rewoo_agent/prompt.py +19 -22
  12. nat/agent/rewoo_agent/register.py +46 -26
  13. nat/agent/tool_calling_agent/agent.py +84 -28
  14. nat/agent/tool_calling_agent/register.py +51 -28
  15. nat/authentication/api_key/api_key_auth_provider.py +2 -2
  16. nat/authentication/credential_validator/bearer_token_validator.py +557 -0
  17. nat/authentication/http_basic_auth/http_basic_auth_provider.py +1 -1
  18. nat/authentication/interfaces.py +5 -2
  19. nat/authentication/oauth2/oauth2_auth_code_flow_provider.py +40 -20
  20. nat/authentication/oauth2/oauth2_resource_server_config.py +124 -0
  21. nat/authentication/register.py +0 -1
  22. nat/builder/builder.py +56 -24
  23. nat/builder/component_utils.py +9 -5
  24. nat/builder/context.py +46 -11
  25. nat/builder/eval_builder.py +16 -11
  26. nat/builder/framework_enum.py +1 -0
  27. nat/builder/front_end.py +1 -1
  28. nat/builder/function.py +378 -8
  29. nat/builder/function_base.py +3 -3
  30. nat/builder/function_info.py +6 -8
  31. nat/builder/user_interaction_manager.py +2 -2
  32. nat/builder/workflow.py +13 -1
  33. nat/builder/workflow_builder.py +281 -76
  34. nat/cli/cli_utils/config_override.py +2 -2
  35. nat/cli/commands/evaluate.py +1 -1
  36. nat/cli/commands/info/info.py +16 -6
  37. nat/cli/commands/info/list_channels.py +1 -1
  38. nat/cli/commands/info/list_components.py +7 -8
  39. nat/cli/commands/mcp/__init__.py +14 -0
  40. nat/cli/commands/mcp/mcp.py +986 -0
  41. nat/cli/commands/object_store/__init__.py +14 -0
  42. nat/cli/commands/object_store/object_store.py +227 -0
  43. nat/cli/commands/optimize.py +90 -0
  44. nat/cli/commands/registry/publish.py +2 -2
  45. nat/cli/commands/registry/pull.py +2 -2
  46. nat/cli/commands/registry/remove.py +2 -2
  47. nat/cli/commands/registry/search.py +15 -17
  48. nat/cli/commands/start.py +16 -5
  49. nat/cli/commands/uninstall.py +1 -1
  50. nat/cli/commands/workflow/templates/config.yml.j2 +0 -1
  51. nat/cli/commands/workflow/templates/pyproject.toml.j2 +4 -1
  52. nat/cli/commands/workflow/templates/register.py.j2 +0 -1
  53. nat/cli/commands/workflow/workflow_commands.py +9 -13
  54. nat/cli/entrypoint.py +8 -10
  55. nat/cli/register_workflow.py +38 -4
  56. nat/cli/type_registry.py +75 -6
  57. nat/control_flow/__init__.py +0 -0
  58. nat/control_flow/register.py +20 -0
  59. nat/control_flow/router_agent/__init__.py +0 -0
  60. nat/control_flow/router_agent/agent.py +329 -0
  61. nat/control_flow/router_agent/prompt.py +48 -0
  62. nat/control_flow/router_agent/register.py +91 -0
  63. nat/control_flow/sequential_executor.py +166 -0
  64. nat/data_models/agent.py +34 -0
  65. nat/data_models/api_server.py +10 -10
  66. nat/data_models/authentication.py +23 -9
  67. nat/data_models/common.py +1 -1
  68. nat/data_models/component.py +2 -0
  69. nat/data_models/component_ref.py +11 -0
  70. nat/data_models/config.py +41 -17
  71. nat/data_models/dataset_handler.py +1 -1
  72. nat/data_models/discovery_metadata.py +4 -4
  73. nat/data_models/evaluate.py +4 -1
  74. nat/data_models/function.py +34 -0
  75. nat/data_models/function_dependencies.py +14 -6
  76. nat/data_models/gated_field_mixin.py +242 -0
  77. nat/data_models/intermediate_step.py +3 -3
  78. nat/data_models/optimizable.py +119 -0
  79. nat/data_models/optimizer.py +149 -0
  80. nat/data_models/swe_bench_model.py +1 -1
  81. nat/data_models/temperature_mixin.py +44 -0
  82. nat/data_models/thinking_mixin.py +86 -0
  83. nat/data_models/top_p_mixin.py +44 -0
  84. nat/embedder/nim_embedder.py +1 -1
  85. nat/embedder/openai_embedder.py +1 -1
  86. nat/embedder/register.py +0 -1
  87. nat/eval/config.py +3 -1
  88. nat/eval/dataset_handler/dataset_handler.py +71 -7
  89. nat/eval/evaluate.py +86 -31
  90. nat/eval/evaluator/base_evaluator.py +1 -1
  91. nat/eval/evaluator/evaluator_model.py +13 -0
  92. nat/eval/intermediate_step_adapter.py +1 -1
  93. nat/eval/rag_evaluator/evaluate.py +2 -2
  94. nat/eval/rag_evaluator/register.py +3 -3
  95. nat/eval/register.py +4 -1
  96. nat/eval/remote_workflow.py +3 -3
  97. nat/eval/runtime_evaluator/__init__.py +14 -0
  98. nat/eval/runtime_evaluator/evaluate.py +123 -0
  99. nat/eval/runtime_evaluator/register.py +100 -0
  100. nat/eval/swe_bench_evaluator/evaluate.py +6 -6
  101. nat/eval/trajectory_evaluator/evaluate.py +1 -1
  102. nat/eval/trajectory_evaluator/register.py +1 -1
  103. nat/eval/tunable_rag_evaluator/evaluate.py +4 -7
  104. nat/eval/utils/eval_trace_ctx.py +89 -0
  105. nat/eval/utils/weave_eval.py +18 -9
  106. nat/experimental/decorators/experimental_warning_decorator.py +27 -7
  107. nat/experimental/test_time_compute/functions/plan_select_execute_function.py +7 -3
  108. nat/experimental/test_time_compute/functions/ttc_tool_orchestration_function.py +3 -3
  109. nat/experimental/test_time_compute/functions/ttc_tool_wrapper_function.py +1 -1
  110. nat/experimental/test_time_compute/models/strategy_base.py +5 -4
  111. nat/experimental/test_time_compute/register.py +0 -1
  112. nat/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +1 -3
  113. nat/front_ends/console/authentication_flow_handler.py +82 -30
  114. nat/front_ends/console/console_front_end_plugin.py +8 -5
  115. nat/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +52 -17
  116. nat/front_ends/fastapi/dask_client_mixin.py +65 -0
  117. nat/front_ends/fastapi/fastapi_front_end_config.py +36 -5
  118. nat/front_ends/fastapi/fastapi_front_end_controller.py +4 -4
  119. nat/front_ends/fastapi/fastapi_front_end_plugin.py +135 -4
  120. nat/front_ends/fastapi/fastapi_front_end_plugin_worker.py +481 -281
  121. nat/front_ends/fastapi/job_store.py +518 -99
  122. nat/front_ends/fastapi/main.py +11 -19
  123. nat/front_ends/fastapi/message_handler.py +13 -14
  124. nat/front_ends/fastapi/message_validator.py +17 -19
  125. nat/front_ends/fastapi/response_helpers.py +4 -4
  126. nat/front_ends/fastapi/step_adaptor.py +2 -2
  127. nat/front_ends/fastapi/utils.py +57 -0
  128. nat/front_ends/mcp/introspection_token_verifier.py +73 -0
  129. nat/front_ends/mcp/mcp_front_end_config.py +10 -1
  130. nat/front_ends/mcp/mcp_front_end_plugin.py +45 -13
  131. nat/front_ends/mcp/mcp_front_end_plugin_worker.py +116 -8
  132. nat/front_ends/mcp/tool_converter.py +44 -14
  133. nat/front_ends/register.py +0 -1
  134. nat/front_ends/simple_base/simple_front_end_plugin_base.py +3 -1
  135. nat/llm/aws_bedrock_llm.py +24 -12
  136. nat/llm/azure_openai_llm.py +13 -6
  137. nat/llm/litellm_llm.py +69 -0
  138. nat/llm/nim_llm.py +20 -8
  139. nat/llm/openai_llm.py +14 -6
  140. nat/llm/register.py +4 -1
  141. nat/llm/utils/env_config_value.py +2 -3
  142. nat/llm/utils/thinking.py +215 -0
  143. nat/meta/pypi.md +9 -9
  144. nat/object_store/register.py +0 -1
  145. nat/observability/exporter/base_exporter.py +3 -3
  146. nat/observability/exporter/file_exporter.py +1 -1
  147. nat/observability/exporter/processing_exporter.py +309 -81
  148. nat/observability/exporter/span_exporter.py +1 -1
  149. nat/observability/exporter_manager.py +7 -7
  150. nat/observability/mixin/file_mixin.py +7 -7
  151. nat/observability/mixin/redaction_config_mixin.py +42 -0
  152. nat/observability/mixin/tagging_config_mixin.py +62 -0
  153. nat/observability/mixin/type_introspection_mixin.py +420 -107
  154. nat/observability/processor/batching_processor.py +5 -7
  155. nat/observability/processor/falsy_batch_filter_processor.py +55 -0
  156. nat/observability/processor/processor.py +3 -0
  157. nat/observability/processor/processor_factory.py +70 -0
  158. nat/observability/processor/redaction/__init__.py +24 -0
  159. nat/observability/processor/redaction/contextual_redaction_processor.py +125 -0
  160. nat/observability/processor/redaction/contextual_span_redaction_processor.py +66 -0
  161. nat/observability/processor/redaction/redaction_processor.py +177 -0
  162. nat/observability/processor/redaction/span_header_redaction_processor.py +92 -0
  163. nat/observability/processor/span_tagging_processor.py +68 -0
  164. nat/observability/register.py +6 -4
  165. nat/profiler/calc/calc_runner.py +3 -4
  166. nat/profiler/callbacks/agno_callback_handler.py +1 -1
  167. nat/profiler/callbacks/langchain_callback_handler.py +6 -6
  168. nat/profiler/callbacks/llama_index_callback_handler.py +3 -3
  169. nat/profiler/callbacks/semantic_kernel_callback_handler.py +3 -3
  170. nat/profiler/data_frame_row.py +1 -1
  171. nat/profiler/decorators/framework_wrapper.py +62 -13
  172. nat/profiler/decorators/function_tracking.py +160 -3
  173. nat/profiler/forecasting/models/forecasting_base_model.py +3 -1
  174. nat/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +1 -1
  175. nat/profiler/inference_optimization/data_models.py +3 -3
  176. nat/profiler/inference_optimization/experimental/prefix_span_analysis.py +7 -8
  177. nat/profiler/inference_optimization/token_uniqueness.py +1 -1
  178. nat/profiler/parameter_optimization/__init__.py +0 -0
  179. nat/profiler/parameter_optimization/optimizable_utils.py +93 -0
  180. nat/profiler/parameter_optimization/optimizer_runtime.py +67 -0
  181. nat/profiler/parameter_optimization/parameter_optimizer.py +153 -0
  182. nat/profiler/parameter_optimization/parameter_selection.py +107 -0
  183. nat/profiler/parameter_optimization/pareto_visualizer.py +380 -0
  184. nat/profiler/parameter_optimization/prompt_optimizer.py +384 -0
  185. nat/profiler/parameter_optimization/update_helpers.py +66 -0
  186. nat/profiler/profile_runner.py +14 -9
  187. nat/profiler/utils.py +4 -2
  188. nat/registry_handlers/local/local_handler.py +2 -2
  189. nat/registry_handlers/package_utils.py +1 -2
  190. nat/registry_handlers/pypi/pypi_handler.py +23 -26
  191. nat/registry_handlers/register.py +3 -4
  192. nat/registry_handlers/rest/rest_handler.py +12 -13
  193. nat/retriever/milvus/retriever.py +2 -2
  194. nat/retriever/nemo_retriever/retriever.py +1 -1
  195. nat/retriever/register.py +0 -1
  196. nat/runtime/loader.py +2 -2
  197. nat/runtime/runner.py +3 -2
  198. nat/runtime/session.py +43 -8
  199. nat/settings/global_settings.py +16 -5
  200. nat/tool/chat_completion.py +5 -2
  201. nat/tool/code_execution/local_sandbox/local_sandbox_server.py +3 -3
  202. nat/tool/datetime_tools.py +49 -9
  203. nat/tool/document_search.py +2 -2
  204. nat/tool/github_tools.py +450 -0
  205. nat/tool/nvidia_rag.py +1 -1
  206. nat/tool/register.py +2 -9
  207. nat/tool/retriever.py +3 -2
  208. nat/utils/callable_utils.py +70 -0
  209. nat/utils/data_models/schema_validator.py +3 -3
  210. nat/utils/exception_handlers/automatic_retries.py +104 -51
  211. nat/utils/exception_handlers/schemas.py +1 -1
  212. nat/utils/io/yaml_tools.py +2 -2
  213. nat/utils/log_levels.py +25 -0
  214. nat/utils/reactive/base/observable_base.py +2 -2
  215. nat/utils/reactive/base/observer_base.py +1 -1
  216. nat/utils/reactive/observable.py +2 -2
  217. nat/utils/reactive/observer.py +4 -4
  218. nat/utils/reactive/subscription.py +1 -1
  219. nat/utils/settings/global_settings.py +6 -8
  220. nat/utils/type_converter.py +4 -3
  221. nat/utils/type_utils.py +9 -5
  222. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc1.dist-info}/METADATA +42 -16
  223. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc1.dist-info}/RECORD +230 -189
  224. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc1.dist-info}/entry_points.txt +1 -0
  225. nat/cli/commands/info/list_mcp.py +0 -304
  226. nat/tool/github_tools/create_github_commit.py +0 -133
  227. nat/tool/github_tools/create_github_issue.py +0 -87
  228. nat/tool/github_tools/create_github_pr.py +0 -106
  229. nat/tool/github_tools/get_github_file.py +0 -106
  230. nat/tool/github_tools/get_github_issue.py +0 -166
  231. nat/tool/github_tools/get_github_pr.py +0 -256
  232. nat/tool/github_tools/update_github_issue.py +0 -100
  233. nat/tool/mcp/exceptions.py +0 -142
  234. nat/tool/mcp/mcp_client.py +0 -255
  235. nat/tool/mcp/mcp_tool.py +0 -96
  236. nat/utils/exception_handlers/mcp.py +0 -211
  237. /nat/{tool/github_tools → agent/prompt_optimizer}/__init__.py +0 -0
  238. /nat/{tool/mcp → authentication/credential_validator}/__init__.py +0 -0
  239. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc1.dist-info}/WHEEL +0 -0
  240. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc1.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
  241. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc1.dist-info}/licenses/LICENSE.md +0 -0
  242. {nvidia_nat-1.3.0.dev2.dist-info → nvidia_nat-1.3.0rc1.dist-info}/top_level.txt +0 -0
@@ -50,46 +50,76 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
50
50
  - Processor pipeline management (add, remove, clear)
51
51
  - Type compatibility validation between processors
52
52
  - Pipeline processing with error handling
53
+ - Configurable None filtering: processors returning None can drop items from pipeline
53
54
  - Automatic type validation before export
54
55
  """
56
+ # All ProcessingExporter instances automatically use this for signature checking
57
+ _signature_method = '_process_pipeline'
55
58
 
56
- def __init__(self, context_state: ContextState | None = None):
59
+ def __init__(self, context_state: ContextState | None = None, drop_nones: bool = True):
57
60
  """Initialize the processing exporter.
58
61
 
59
62
  Args:
60
- context_state: The context state to use for the exporter.
63
+ context_state (ContextState | None): The context state to use for the exporter.
64
+ drop_nones (bool): Whether to drop items when processors return None (default: True).
61
65
  """
62
66
  super().__init__(context_state)
63
67
  self._processors: list[Processor] = [] # List of processors that implement process(item) -> item
64
-
65
- def add_processor(self, processor: Processor) -> None:
68
+ self._processor_names: dict[str, int] = {} # Maps processor names to their positions
69
+ self._pipeline_locked: bool = False # Prevents modifications after startup
70
+ self._drop_nones: bool = drop_nones # Whether to drop None values between processors
71
+
72
+ def add_processor(self,
73
+ processor: Processor,
74
+ name: str | None = None,
75
+ position: int | None = None,
76
+ before: str | None = None,
77
+ after: str | None = None) -> None:
66
78
  """Add a processor to the processing pipeline.
67
79
 
68
- Processors are executed in the order they are added.
69
- Processors can transform between any types (T -> U).
80
+ Processors are executed in the order they are added. Processes can transform between any types (T -> U).
81
+ Supports flexible positioning using names, positions, or relative placement.
70
82
 
71
83
  Args:
72
- processor: The processor to add to the pipeline
84
+ processor (Processor): The processor to add to the pipeline
85
+ name (str | None): Name for the processor (for later reference). Must be unique.
86
+ position (int | None): Specific position to insert at (0-based index, -1 for append)
87
+ before (str | None): Insert before the named processor
88
+ after (str | None): Insert after the named processor
89
+
90
+ Raises:
91
+ RuntimeError: If pipeline is locked (after startup)
92
+ ValueError: If positioning arguments conflict or named processor not found
73
93
  """
94
+ self._check_pipeline_locked()
74
95
 
75
- # Check if the processor is compatible with the last processor in the pipeline
76
- if len(self._processors) > 0:
77
- try:
78
- if not issubclass(processor.input_class, self._processors[-1].output_class):
79
- raise ValueError(f"Processor {processor.__class__.__name__} input type {processor.input_type} "
80
- f"is not compatible with the {self._processors[-1].__class__.__name__} "
81
- f"output type {self._processors[-1].output_type}")
82
- except TypeError:
83
- # Handle cases where input_class or output_class are generic types that can't be used with issubclass
84
- # Fall back to type comparison for generic types
85
- logger.warning(
86
- "Cannot use issubclass() for type compatibility check between "
87
- "%s (%s) and %s (%s). Skipping compatibility check.",
88
- processor.__class__.__name__,
89
- processor.input_type,
90
- self._processors[-1].__class__.__name__,
91
- self._processors[-1].output_type)
92
- self._processors.append(processor)
96
+ # Determine insertion position
97
+ insert_position = self._calculate_insertion_position(position, before, after)
98
+
99
+ # Validate type compatibility at insertion point
100
+ self._validate_insertion_compatibility(processor, insert_position)
101
+
102
+ # Pre-validate name (no side effects yet)
103
+ if name is not None:
104
+ if not isinstance(name, str):
105
+ raise TypeError(f"Processor name must be a string, got {type(name).__name__}")
106
+ if name in self._processor_names:
107
+ raise ValueError(f"Processor name '{name}' already exists")
108
+
109
+ # Shift existing name positions (do this before list mutation)
110
+ for proc_name, pos in list(self._processor_names.items()):
111
+ if pos >= insert_position:
112
+ self._processor_names[proc_name] = pos + 1
113
+
114
+ # Insert the processor
115
+ if insert_position == len(self._processors):
116
+ self._processors.append(processor)
117
+ else:
118
+ self._processors.insert(insert_position, processor)
119
+
120
+ # Record the new processor name, if provided
121
+ if name is not None:
122
+ self._processor_names[name] = insert_position
93
123
 
94
124
  # Set up pipeline continuation callback for processors that support it
95
125
  if isinstance(processor, CallbackProcessor):
@@ -99,64 +129,240 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
99
129
 
100
130
  processor.set_done_callback(pipeline_callback)
101
131
 
102
- def remove_processor(self, processor: Processor) -> None:
132
+ def remove_processor(self, processor: Processor | str | int) -> None:
103
133
  """Remove a processor from the processing pipeline.
104
134
 
105
135
  Args:
106
- processor: The processor to remove from the pipeline
136
+ processor (Processor | str | int): The processor to remove (by name, position, or object).
137
+
138
+ Raises:
139
+ RuntimeError: If pipeline is locked (after startup)
140
+ ValueError: If named processor or position not found
141
+ TypeError: If processor argument has invalid type
107
142
  """
108
- if processor in self._processors:
109
- self._processors.remove(processor)
143
+ self._check_pipeline_locked()
144
+
145
+ # Determine processor and position to remove
146
+ if isinstance(processor, str):
147
+ # Remove by name
148
+ if processor not in self._processor_names:
149
+ raise ValueError(f"Processor '{processor}' not found in pipeline")
150
+ position = self._processor_names[processor]
151
+ processor_obj = self._processors[position]
152
+ elif isinstance(processor, int):
153
+ # Remove by position
154
+ if not (0 <= processor < len(self._processors)):
155
+ raise ValueError(f"Position {processor} is out of range [0, {len(self._processors) - 1}]")
156
+ position = processor
157
+ processor_obj = self._processors[position]
158
+ elif isinstance(processor, Processor):
159
+ # Remove by object (existing behavior)
160
+ if processor not in self._processors:
161
+ return # Silently ignore if not found (existing behavior)
162
+ position = self._processors.index(processor)
163
+ processor_obj = processor
164
+ else:
165
+ raise TypeError(f"Processor must be a Processor object, string name, or int position, "
166
+ f"got {type(processor).__name__}")
167
+
168
+ # Remove the processor
169
+ self._processors.remove(processor_obj)
170
+
171
+ # Remove from name mapping and update positions
172
+ name_to_remove = None
173
+ for name, pos in self._processor_names.items():
174
+ if pos == position:
175
+ name_to_remove = name
176
+ break
177
+
178
+ if name_to_remove:
179
+ del self._processor_names[name_to_remove]
180
+
181
+ # Update positions for processors that shifted
182
+ for name, pos in self._processor_names.items():
183
+ if pos > position:
184
+ self._processor_names[name] = pos - 1
110
185
 
111
186
  def clear_processors(self) -> None:
112
187
  """Clear all processors from the pipeline."""
188
+ self._check_pipeline_locked()
113
189
  self._processors.clear()
190
+ self._processor_names.clear()
191
+
192
+ def reset_pipeline(self) -> None:
193
+ """Reset the pipeline to allow modifications.
194
+
195
+ This unlocks the pipeline and clears all processors, allowing
196
+ the pipeline to be reconfigured. Can only be called when the
197
+ exporter is stopped.
198
+
199
+ Raises:
200
+ RuntimeError: If exporter is currently running
201
+ """
202
+ if self._running:
203
+ raise RuntimeError("Cannot reset pipeline while exporter is running. "
204
+ "Call stop() first, then reset_pipeline().")
205
+
206
+ self._pipeline_locked = False
207
+ self._processors.clear()
208
+ self._processor_names.clear()
209
+ logger.debug("Pipeline reset - unlocked and cleared all processors")
210
+
211
+ def get_processor_by_name(self, name: str) -> Processor | None:
212
+ """Get a processor by its name.
213
+
214
+ Args:
215
+ name (str): The name of the processor to retrieve
216
+
217
+ Returns:
218
+ Processor | None: The processor with the given name, or None if not found
219
+ """
220
+ if not isinstance(name, str):
221
+ raise TypeError(f"Processor name must be a string, got {type(name).__name__}")
222
+ if name in self._processor_names:
223
+ position = self._processor_names[name]
224
+ return self._processors[position]
225
+ logger.debug("Processor '%s' not found in pipeline", name)
226
+ return None
227
+
228
+ def _check_pipeline_locked(self) -> None:
229
+ """Check if pipeline is locked and raise error if it is."""
230
+ if self._pipeline_locked:
231
+ raise RuntimeError("Cannot modify processor pipeline after exporter has started. "
232
+ "Pipeline must be fully configured before calling start().")
233
+
234
+ def _calculate_insertion_position(self, position: int | None, before: str | None, after: str | None) -> int:
235
+ """Calculate the insertion position based on provided arguments.
236
+
237
+ Args:
238
+ position (int | None): Explicit position (0-based index, -1 for append)
239
+ before (str | None): Insert before this named processor
240
+ after (str | None): Insert after this named processor
241
+
242
+ Returns:
243
+ int: The calculated insertion position
244
+
245
+ Raises:
246
+ ValueError: If arguments conflict or named processor not found
247
+ """
248
+ # Check for conflicting arguments
249
+ args_provided = sum(x is not None for x in [position, before, after])
250
+ if args_provided > 1:
251
+ raise ValueError("Only one of position, before, or after can be specified")
252
+
253
+ # Default to append
254
+ if args_provided == 0:
255
+ return len(self._processors)
256
+
257
+ # Handle explicit position
258
+ if position is not None:
259
+ if position == -1:
260
+ return len(self._processors)
261
+ if 0 <= position <= len(self._processors):
262
+ return position
263
+ raise ValueError(f"Position {position} is out of range [0, {len(self._processors)}]")
264
+
265
+ # Handle before/after named processors
266
+ if before is not None:
267
+ if not isinstance(before, str):
268
+ raise TypeError(f"'before' parameter must be a string, got {type(before).__name__}")
269
+ if before not in self._processor_names:
270
+ raise ValueError(f"Processor '{before}' not found in pipeline")
271
+ return self._processor_names[before]
272
+
273
+ if after is not None:
274
+ if not isinstance(after, str):
275
+ raise TypeError(f"'after' parameter must be a string, got {type(after).__name__}")
276
+ if after not in self._processor_names:
277
+ raise ValueError(f"Processor '{after}' not found in pipeline")
278
+ return self._processor_names[after] + 1
279
+
280
+ # Should never reach here
281
+ return len(self._processors)
282
+
283
+ def _validate_insertion_compatibility(self, processor: Processor, position: int) -> None:
284
+ """Validate type compatibility for processor insertion.
285
+
286
+ Args:
287
+ processor (Processor): The processor to insert
288
+ position (int): The position where it will be inserted
289
+
290
+ Raises:
291
+ ValueError: If processor is not compatible with neighbors
292
+ """
293
+ # Check compatibility with neighbors
294
+ if position > 0:
295
+ predecessor = self._processors[position - 1]
296
+ self._check_processor_compatibility(predecessor,
297
+ processor,
298
+ "predecessor",
299
+ str(predecessor.output_type),
300
+ str(processor.input_type))
301
+
302
+ if position < len(self._processors):
303
+ successor = self._processors[position]
304
+ self._check_processor_compatibility(processor,
305
+ successor,
306
+ "successor",
307
+ str(processor.output_type),
308
+ str(successor.input_type))
309
+
310
+ def _check_processor_compatibility(self,
311
+ source_processor: Processor,
312
+ target_processor: Processor,
313
+ relationship: str,
314
+ source_type: str,
315
+ target_type: str) -> None:
316
+ """Check type compatibility between two processors using Pydantic validation.
317
+
318
+ Args:
319
+ source_processor (Processor): The processor providing output
320
+ target_processor (Processor): The processor receiving input
321
+ relationship (str): Description of relationship ("predecessor" or "successor")
322
+ source_type (str): String representation of source type
323
+ target_type (str): String representation of target type
324
+ """
325
+ # Use Pydantic-based type compatibility checking
326
+ if not source_processor.is_output_compatible_with(target_processor.input_type):
327
+ raise ValueError(f"Processor {target_processor.__class__.__name__} input type {target_type} "
328
+ f"is not compatible with {relationship} {source_processor.__class__.__name__} "
329
+ f"output type {source_type}")
114
330
 
115
331
  async def _pre_start(self) -> None:
332
+
333
+ # Validate that the pipeline is compatible with the exporter
116
334
  if len(self._processors) > 0:
117
335
  first_processor = self._processors[0]
118
336
  last_processor = self._processors[-1]
119
337
 
120
338
  # validate that the first processor's input type is compatible with the exporter's input type
121
- try:
122
- if not issubclass(first_processor.input_class, self.input_class):
123
- raise ValueError(f"Processor {first_processor.__class__.__name__} input type "
124
- f"{first_processor.input_type} is not compatible with the "
125
- f"{self.input_type} input type")
126
- except TypeError as e:
127
- # Handle cases where classes are generic types that can't be used with issubclass
128
- logger.warning(
129
- "Cannot validate type compatibility between %s (%s) "
130
- "and exporter (%s): %s. Skipping validation.",
131
- first_processor.__class__.__name__,
132
- first_processor.input_type,
133
- self.input_type,
134
- e)
135
-
339
+ if not first_processor.is_compatible_with_input(self.input_type):
340
+ logger.error("First processor %s input=%s incompatible with exporter input=%s",
341
+ first_processor.__class__.__name__,
342
+ first_processor.input_type,
343
+ self.input_type)
344
+ raise ValueError("First processor incompatible with exporter input")
136
345
  # Validate that the last processor's output type is compatible with the exporter's output type
137
- try:
138
- if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
139
- raise ValueError(f"Processor {last_processor.__class__.__name__} output type "
140
- f"{last_processor.output_type} is not compatible with the "
141
- f"{self.output_type} output type")
142
- except TypeError as e:
143
- # Handle cases where classes are generic types that can't be used with issubclass
144
- logger.warning(
145
- "Cannot validate type compatibility between %s (%s) "
146
- "and exporter (%s): %s. Skipping validation.",
147
- last_processor.__class__.__name__,
148
- last_processor.output_type,
149
- self.output_type,
150
- e)
151
-
152
- async def _process_pipeline(self, item: PipelineInputT) -> PipelineOutputT:
346
+ # Use DecomposedType.is_type_compatible for the final export stage to allow batch compatibility
347
+ # This enables BatchingProcessor[T] -> Exporter[T] patterns where the exporter handles both T and list[T]
348
+ if not DecomposedType.is_type_compatible(last_processor.output_type, self.output_type):
349
+ logger.error("Last processor %s output=%s incompatible with exporter output=%s",
350
+ last_processor.__class__.__name__,
351
+ last_processor.output_type,
352
+ self.output_type)
353
+ raise ValueError("Last processor incompatible with exporter output")
354
+
355
+ # Lock the pipeline to prevent further modifications
356
+ self._pipeline_locked = True
357
+
358
+ async def _process_pipeline(self, item: PipelineInputT) -> PipelineOutputT | None:
153
359
  """Process item through all registered processors.
154
360
 
155
361
  Args:
156
362
  item (PipelineInputT): The item to process (starts as PipelineInputT, can transform to PipelineOutputT)
157
363
 
158
364
  Returns:
159
- PipelineOutputT: The processed item after running through all processors
365
+ PipelineOutputT | None: The processed item after running through all processors
160
366
  """
161
367
  return await self._process_through_processors(self._processors, item) # type: ignore
162
368
 
@@ -168,14 +374,20 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
168
374
  item (Any): The item to process
169
375
 
170
376
  Returns:
171
- The processed item after running through all processors
377
+ Any: The processed item after running through all processors, or None if
378
+ drop_nones is True and any processor returned None
172
379
  """
173
380
  processed_item = item
174
381
  for processor in processors:
175
382
  try:
176
383
  processed_item = await processor.process(processed_item)
384
+ # Drop None values between processors if configured to do so
385
+ if self._drop_nones and processed_item is None:
386
+ logger.debug("Processor %s returned None, dropping item from pipeline",
387
+ processor.__class__.__name__)
388
+ return None
177
389
  except Exception as e:
178
- logger.error("Error in processor %s: %s", processor.__class__.__name__, e, exc_info=True)
390
+ logger.exception("Error in processor %s: %s", processor.__class__.__name__, e)
179
391
  # Continue with unprocessed item rather than failing
180
392
  return processed_item
181
393
 
@@ -191,12 +403,15 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
191
403
  await self.export_processed(processed_item)
192
404
  else:
193
405
  logger.debug("Skipping export of empty batch")
194
- elif isinstance(processed_item, self.output_class):
406
+ elif self.validate_output_type(processed_item):
195
407
  await self.export_processed(processed_item)
196
408
  else:
197
409
  if raise_on_invalid:
198
- raise ValueError(f"Processed item {processed_item} is not a valid output type. "
199
- f"Expected {self.output_class} or list[{self.output_class}]")
410
+ logger.error("Invalid processed item type for export: %s (expected %s or list[%s])",
411
+ type(processed_item),
412
+ self.output_type,
413
+ self.output_type)
414
+ raise ValueError("Invalid processed item type for export")
200
415
  logger.warning("Processed item %s is not a valid output type for export", processed_item)
201
416
 
202
417
  async def _continue_pipeline_after(self, source_processor: Processor, item: Any) -> None:
@@ -214,31 +429,40 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
214
429
  try:
215
430
  source_index = self._processors.index(source_processor)
216
431
  except ValueError:
217
- logger.error("Source processor %s not found in pipeline", source_processor.__class__.__name__)
432
+ logger.exception("Source processor %s not found in pipeline", source_processor.__class__.__name__)
218
433
  return
219
434
 
220
435
  # Process through remaining processors (skip the source processor)
221
436
  remaining_processors = self._processors[source_index + 1:]
222
437
  processed_item = await self._process_through_processors(remaining_processors, item)
223
438
 
439
+ # Skip export if remaining pipeline dropped the item (returned None)
440
+ if processed_item is None:
441
+ logger.debug("Item was dropped by remaining processor pipeline, skipping export")
442
+ return
443
+
224
444
  # Export the final result
225
445
  await self._export_final_item(processed_item)
226
446
 
227
447
  except Exception as e:
228
- logger.error("Failed to continue pipeline processing after %s: %s",
229
- source_processor.__class__.__name__,
230
- e,
231
- exc_info=True)
448
+ logger.exception("Failed to continue pipeline processing after %s: %s",
449
+ source_processor.__class__.__name__,
450
+ e)
232
451
 
233
452
  async def _export_with_processing(self, item: PipelineInputT) -> None:
234
453
  """Export an item after processing it through the pipeline.
235
454
 
236
455
  Args:
237
- item: The item to export
456
+ item (PipelineInputT): The item to export
238
457
  """
239
458
  try:
240
459
  # Then, run through the processor pipeline
241
- final_item: PipelineOutputT = await self._process_pipeline(item)
460
+ final_item: PipelineOutputT | None = await self._process_pipeline(item)
461
+
462
+ # Skip export if pipeline dropped the item (returned None)
463
+ if final_item is None:
464
+ logger.debug("Item was dropped by processor pipeline, skipping export")
465
+ return
242
466
 
243
467
  # Handle different output types from batch processors
244
468
  if isinstance(final_item, list) and len(final_item) == 0:
@@ -248,7 +472,7 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
248
472
  await self._export_final_item(final_item, raise_on_invalid=True)
249
473
 
250
474
  except Exception as e:
251
- logger.error("Failed to export item '%s': %s", item, e, exc_info=True)
475
+ logger.error("Failed to export item '%s': %s", item, e)
252
476
  raise
253
477
 
254
478
  @override
@@ -262,7 +486,7 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
262
486
  event (IntermediateStep): The event to be exported.
263
487
  """
264
488
  # Convert IntermediateStep to PipelineInputT and create export task
265
- if isinstance(event, self.input_class):
489
+ if self.validate_input_type(event):
266
490
  input_item: PipelineInputT = event # type: ignore
267
491
  coro = self._export_with_processing(input_item)
268
492
  self._create_export_task(coro)
@@ -277,12 +501,16 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
277
501
  the actual export logic after the item has been processed through the pipeline.
278
502
 
279
503
  Args:
280
- item: The processed item to export (PipelineOutputT type)
504
+ item (PipelineOutputT | list[PipelineOutputT]): The processed item to export (PipelineOutputT type)
281
505
  """
282
506
  pass
283
507
 
284
- def _create_export_task(self, coro: Coroutine):
285
- """Create task with minimal overhead but proper tracking."""
508
+ def _create_export_task(self, coro: Coroutine) -> None:
509
+ """Create task with minimal overhead but proper tracking.
510
+
511
+ Args:
512
+ coro: The coroutine to create a task for
513
+ """
286
514
  if not self._running:
287
515
  logger.warning("%s: Attempted to create export task while not running", self.name)
288
516
  return
@@ -293,11 +521,11 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
293
521
  task.add_done_callback(self._tasks.discard)
294
522
 
295
523
  except Exception as e:
296
- logger.error("%s: Failed to create task: %s", self.name, e, exc_info=True)
524
+ logger.error("%s: Failed to create task: %s", self.name, e)
297
525
  raise
298
526
 
299
527
  @override
300
- async def _cleanup(self):
528
+ async def _cleanup(self) -> None:
301
529
  """Enhanced cleanup that shuts down all shutdown-aware processors.
302
530
 
303
531
  Each processor is responsible for its own cleanup, including routing
@@ -316,7 +544,7 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
316
544
  await asyncio.gather(*shutdown_tasks, return_exceptions=True)
317
545
  logger.debug("Successfully shut down %d processors", len(shutdown_tasks))
318
546
  except Exception as e:
319
- logger.error("Error shutting down processors: %s", e, exc_info=True)
547
+ logger.exception("Error shutting down processors: %s", e)
320
548
 
321
549
  # Call parent cleanup
322
550
  await super()._cleanup()
@@ -252,7 +252,7 @@ class SpanExporter(ProcessingExporter[InputSpanT, OutputSpanT], SerializeMixin):
252
252
 
253
253
  end_metadata = event.payload.metadata or {}
254
254
 
255
- if not isinstance(end_metadata, (dict, TraceMetadata)):
255
+ if not isinstance(end_metadata, dict | TraceMetadata):
256
256
  logger.warning("Invalid metadata type for step %s", event.UUID)
257
257
  return
258
258
 
@@ -177,14 +177,14 @@ class ExporterManager:
177
177
  else:
178
178
  logger.debug("Skipping cleanup for non-isolated exporter '%s'", name)
179
179
  except Exception as e:
180
- logger.error("Error preparing cleanup for isolated exporter '%s': %s", name, e)
180
+ logger.exception("Error preparing cleanup for isolated exporter '%s': %s", name, e)
181
181
 
182
182
  if cleanup_tasks:
183
183
  # Run cleanup tasks concurrently with timeout
184
184
  try:
185
185
  await asyncio.wait_for(asyncio.gather(*cleanup_tasks, return_exceptions=True),
186
186
  timeout=self._shutdown_timeout)
187
- except asyncio.TimeoutError:
187
+ except TimeoutError:
188
188
  logger.warning("Some isolated exporters did not clean up within timeout")
189
189
 
190
190
  self._active_isolated_exporters.clear()
@@ -195,7 +195,7 @@ class ExporterManager:
195
195
  logger.debug("Stopping isolated exporter '%s'", name)
196
196
  await exporter.stop()
197
197
  except Exception as e:
198
- logger.error("Error stopping isolated exporter '%s': %s", name, e)
198
+ logger.exception("Error stopping isolated exporter '%s': %s", name, e)
199
199
 
200
200
  @asynccontextmanager
201
201
  async def start(self, context_state: ContextState | None = None):
@@ -251,7 +251,7 @@ class ExporterManager:
251
251
  try:
252
252
  await self._cleanup_isolated_exporters()
253
253
  except Exception as e:
254
- logger.error("Error during isolated exporter cleanup: %s", e)
254
+ logger.exception("Error during isolated exporter cleanup: %s", e)
255
255
 
256
256
  # Then stop the manager tasks
257
257
  await self.stop()
@@ -275,7 +275,7 @@ class ExporterManager:
275
275
  logger.info("Stopped exporter '%s'", name)
276
276
  raise
277
277
  except Exception as e:
278
- logger.error("Failed to run exporter '%s': %s", name, str(e), exc_info=True)
278
+ logger.error("Failed to run exporter '%s': %s", name, str(e))
279
279
  # Re-raise the exception to ensure it's properly handled
280
280
  raise
281
281
 
@@ -301,13 +301,13 @@ class ExporterManager:
301
301
  try:
302
302
  task.cancel()
303
303
  await asyncio.wait_for(task, timeout=self._shutdown_timeout)
304
- except asyncio.TimeoutError:
304
+ except TimeoutError:
305
305
  logger.warning("Exporter '%s' task did not shut down in time and may be stuck.", name)
306
306
  stuck_tasks.append(name)
307
307
  except asyncio.CancelledError:
308
308
  logger.debug("Exporter '%s' task cancelled", name)
309
309
  except Exception as e:
310
- logger.error("Failed to stop exporter '%s': %s", name, str(e))
310
+ logger.exception("Failed to stop exporter '%s': %s", name, str(e))
311
311
 
312
312
  if stuck_tasks:
313
313
  logger.warning("Exporters did not shut down in time: %s", ", ".join(stuck_tasks))
@@ -103,7 +103,7 @@ class FileExportMixin(ResourceConflictMixin):
103
103
  self._current_file_path.unlink()
104
104
  logger.info("Cleaned up existing file: %s", self._current_file_path)
105
105
  except OSError as e:
106
- logger.error("Error removing existing file %s: %s", self._current_file_path, e)
106
+ logger.exception("Error removing existing file %s: %s", self._current_file_path, e)
107
107
 
108
108
  def _get_resource_identifiers(self) -> dict[str, Any]:
109
109
  """Return the file resources this instance will use.
@@ -154,10 +154,10 @@ class FileExportMixin(ResourceConflictMixin):
154
154
  old_file.unlink()
155
155
  logger.info("Cleaned up old log file during init: %s", old_file)
156
156
  except OSError as e:
157
- logger.error("Error removing old file %s: %s", old_file, e)
157
+ logger.exception("Error removing old file %s: %s", old_file, e)
158
158
 
159
159
  except Exception as e:
160
- logger.error("Error during initialization cleanup: %s", e)
160
+ logger.exception("Error during initialization cleanup: %s", e)
161
161
 
162
162
  async def _should_roll_file(self) -> bool:
163
163
  """Check if the current file should be rolled based on size."""
@@ -191,7 +191,7 @@ class FileExportMixin(ResourceConflictMixin):
191
191
  await self._cleanup_old_files()
192
192
 
193
193
  except OSError as e:
194
- logger.error("Error rolling file %s: %s", self._current_file_path, e)
194
+ logger.exception("Error rolling file %s: %s", self._current_file_path, e)
195
195
 
196
196
  async def _cleanup_old_files(self) -> None:
197
197
  """Remove old rolled files beyond the maximum count."""
@@ -209,10 +209,10 @@ class FileExportMixin(ResourceConflictMixin):
209
209
  old_file.unlink()
210
210
  logger.info("Cleaned up old log file: %s", old_file)
211
211
  except OSError as e:
212
- logger.error("Error removing old file %s: %s", old_file, e)
212
+ logger.exception("Error removing old file %s: %s", old_file, e)
213
213
 
214
214
  except Exception as e:
215
- logger.error("Error during cleanup: %s", e)
215
+ logger.exception("Error during cleanup: %s", e)
216
216
 
217
217
  async def export_processed(self, item: str | list[str]) -> None:
218
218
  """Export a processed string or list of strings.
@@ -248,7 +248,7 @@ class FileExportMixin(ResourceConflictMixin):
248
248
  await f.write("\n")
249
249
 
250
250
  except Exception as e:
251
- logger.error("Error exporting event: %s", e, exc_info=True)
251
+ logger.exception("Error exporting event: %s", e)
252
252
 
253
253
  def get_current_file_path(self) -> Path:
254
254
  """Get the current file path being written to.