gllm-pipeline-binary 0.4.21__cp312-cp312-win_amd64.whl → 0.4.23__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -154,6 +154,33 @@ class Composer:
154
154
  Returns:
155
155
  Self: The composer instance with this step appended.
156
156
  """
157
+ def copy(self, input_state: str | list[str], output_state: str | list[str], retry_config: RetryConfig | None = None, error_handler: BaseStepErrorHandler | None = None, cache_store: BaseCache | None = None, cache_config: dict[str, Any] | None = None, name: str | None = None) -> Self:
158
+ '''Append a copy step to copy input state(s) to output state(s).
159
+
160
+ This method creates a step that copies data from input state(s) to output state(s) without transformation.
161
+ The function handles different scenarios:
162
+ 1. Single input to single output: Direct copy
163
+ 2. Single input to multiple outputs: Broadcast the input to all outputs
164
+ 3. Multiple inputs to single output: Pack all inputs into a list
165
+ 4. Multiple inputs to multiple outputs: Copy each input to corresponding output (must have same length)
166
+
167
+ Args:
168
+ input_state (str | list[str]): Input state key(s) to copy from.
169
+ output_state (str | list[str]): Output state key(s) to copy to.
170
+ retry_config (RetryConfig | None, optional): Configuration for retry behavior using
171
+ GLLM Core\'s RetryConfig. Defaults to None, in which case no retry config is applied.
172
+ error_handler (BaseStepErrorHandler | None, optional): Error handler to use for this step.
173
+ Defaults to None, in which case no error handler is used.
174
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
175
+ Defaults to None, in which case no cache store is used.
176
+ cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
177
+ Defaults to None, in which case no cache configuration is used.
178
+ name (str | None, optional): A unique identifier for this step. If None, a name will be
179
+ auto-generated with the prefix "copy_". Defaults to None.
180
+
181
+ Returns:
182
+ Self: The composer instance with this step appended.
183
+ '''
157
184
  def transform(self, operation: Callable[[dict[str, Any]], Any], input_map: InputMapSpec, output_state: str | list[str], retry_config: RetryConfig | None = None, error_handler: BaseStepErrorHandler | None = None, name: str | None = None, cache_store: BaseCache | None = None, cache_config: dict[str, Any] | None = None) -> Self:
158
185
  '''Append a state operator step.
159
186
 
@@ -241,12 +268,12 @@ class Composer:
241
268
  @overload
242
269
  def switch(self, condition: Component | Callable[[dict[str, Any]], str], *, input_map: InputMapSpec | None = ..., output_state: str | None = ..., retry_config: RetryConfig | None = ..., error_handler: BaseStepErrorHandler | None = ..., cache_store: BaseCache | None = ..., cache_config: dict[str, Any] | None = ..., name: str | None = ...) -> SwitchComposer: ...
243
270
  @overload
244
- def switch(self, condition: Component | Callable[[dict[str, Any]], str], *, branches: dict[str, BasePipelineStep | list['BasePipelineStep']], input_map: InputMapSpec | None = ..., output_state: str | None = ..., default: BasePipelineStep | list['BasePipelineStep'] | None = ..., retry_config: RetryConfig | None = ..., error_handler: BaseStepErrorHandler | None = ..., cache_store: BaseCache | None = ..., cache_config: dict[str, Any] | None = ..., name: str | None = ...) -> Self: ...
271
+ def switch(self, condition: Component | Callable[[dict[str, Any]], str], *, branches: dict[str, BasePipelineStep | list[BasePipelineStep]], input_map: InputMapSpec | None = ..., output_state: str | None = ..., default: BasePipelineStep | list[BasePipelineStep] | None = ..., retry_config: RetryConfig | None = ..., error_handler: BaseStepErrorHandler | None = ..., cache_store: BaseCache | None = ..., cache_config: dict[str, Any] | None = ..., name: str | None = ...) -> Self: ...
245
272
  @overload
246
273
  def parallel(self, *, squash: bool = ..., input_map: InputMapSpec | None = ..., retry_config: RetryConfig | None = ..., error_handler: BaseStepErrorHandler | None = ..., cache_store: BaseCache | None = ..., cache_config: dict[str, Any] | None = ..., name: str | None = ...) -> ParallelComposer: ...
247
274
  @overload
248
- def parallel(self, *, branches: list['PipelineSteps'] | dict[str, 'PipelineSteps'], squash: bool = ..., input_map: InputMapSpec | None = ..., retry_config: RetryConfig | None = ..., error_handler: BaseStepErrorHandler | None = ..., cache_store: BaseCache | None = ..., cache_config: dict[str, Any] | None = ..., name: str | None = ...) -> Self: ...
249
- def parallel(self, *, branches: list['PipelineSteps'] | dict[str, 'PipelineSteps'] | None = None, squash: bool = True, input_map: InputMapSpec | None = None, retry_config: RetryConfig | None = None, error_handler: BaseStepErrorHandler | None = None, cache_store: BaseCache | None = None, cache_config: dict[str, Any] | None = None, name: str | None = None) -> Self | ParallelComposer:
275
+ def parallel(self, *, branches: list[PipelineSteps] | dict[str, PipelineSteps], squash: bool = ..., input_map: InputMapSpec | None = ..., retry_config: RetryConfig | None = ..., error_handler: BaseStepErrorHandler | None = ..., cache_store: BaseCache | None = ..., cache_config: dict[str, Any] | None = ..., name: str | None = ...) -> Self: ...
276
+ def parallel(self, *, branches: list[PipelineSteps] | dict[str, PipelineSteps] | None = None, squash: bool = True, input_map: InputMapSpec | None = None, retry_config: RetryConfig | None = None, error_handler: BaseStepErrorHandler | None = None, cache_store: BaseCache | None = None, cache_config: dict[str, Any] | None = None, name: str | None = None) -> Self | ParallelComposer:
250
277
  '''Create a parallel step (builder-style or direct-style).
251
278
 
252
279
  This method supports two usage patterns:
@@ -199,14 +199,17 @@ class Pipeline:
199
199
  invalidating any built graph or compiled app. Useful for reusing a pipeline
200
200
  instance with different configurations.
201
201
  """
202
- async def invoke(self, initial_state: PipelineState, config: dict[str, Any] | None = None) -> dict[str, Any]:
202
+ async def invoke(self, initial_state: PipelineState, config: dict[str, Any] | None = None, thread_id: str | None = None) -> dict[str, Any]:
203
203
  '''Runs the pipeline asynchronously with the given initial state and configuration.
204
204
 
205
205
  Args:
206
206
  initial_state (PipelineState): The initial state to start the pipeline with.
207
207
  This initial state should comply with the state type of the pipeline.
208
208
  config (dict[str, Any], optional): Additional configuration for the pipeline. User-defined config should not
209
- have "langraph_" prefix as it should be reserved for internal use. Defaults to None.
209
+ have "langgraph_" prefix as it should be reserved for internal use. Defaults to None.
210
+ thread_id (str | None, optional): The thread ID for this specific pipeline invocation. This will be passed
211
+ in the invocation_config.configurable when invoking the pipeline. Useful for checkpointing and
212
+ tracking related invocations. Defaults to None.
210
213
 
211
214
  Returns:
212
215
  dict[str, Any]: The final state after the pipeline execution.
@@ -1,4 +1,4 @@
1
- from gllm_pipeline.steps._func import bundle as bundle, guard as guard, if_else as if_else, log as log, map_reduce as map_reduce, no_op as no_op, parallel as parallel, step as step, subgraph as subgraph, switch as switch, terminate as terminate, toggle as toggle, transform as transform
1
+ from gllm_pipeline.steps._func import bundle as bundle, copy as copy, guard as guard, if_else as if_else, log as log, map_reduce as map_reduce, no_op as no_op, parallel as parallel, step as step, subgraph as subgraph, switch as switch, terminate as terminate, toggle as toggle, transform as transform
2
2
  from gllm_pipeline.steps.component_step import ComponentStep as ComponentStep
3
3
  from gllm_pipeline.steps.conditional_step import ConditionalStep as ConditionalStep
4
4
  from gllm_pipeline.steps.guard_step import GuardStep as GuardStep
@@ -14,4 +14,4 @@ from gllm_pipeline.steps.step_error_handler.raise_step_error_handler import Rais
14
14
  from gllm_pipeline.steps.subgraph_step import SubgraphStep as SubgraphStep
15
15
  from gllm_pipeline.steps.terminator_step import TerminatorStep as TerminatorStep
16
16
 
17
- __all__ = ['ComponentStep', 'ConditionalStep', 'GuardStep', 'LogStep', 'MapReduceStep', 'NoOpStep', 'ParallelStep', 'StateOperatorStep', 'SubgraphStep', 'TerminatorStep', 'EmptyStepErrorHandler', 'FallbackStepErrorHandler', 'KeepStepErrorHandler', 'RaiseStepErrorHandler', 'bundle', 'guard', 'if_else', 'log', 'map_reduce', 'no_op', 'parallel', 'step', 'subgraph', 'switch', 'terminate', 'toggle', 'transform']
17
+ __all__ = ['ComponentStep', 'ConditionalStep', 'GuardStep', 'LogStep', 'MapReduceStep', 'NoOpStep', 'ParallelStep', 'StateOperatorStep', 'SubgraphStep', 'TerminatorStep', 'EmptyStepErrorHandler', 'FallbackStepErrorHandler', 'KeepStepErrorHandler', 'RaiseStepErrorHandler', 'bundle', 'copy', 'guard', 'if_else', 'log', 'map_reduce', 'no_op', 'parallel', 'step', 'subgraph', 'switch', 'terminate', 'toggle', 'transform']
@@ -23,7 +23,7 @@ def step(component: Component, input_state_map: dict[str, str] | None = None, ou
23
23
  This function creates a ComponentStep, which wraps a component and manages its inputs and outputs within the
24
24
  pipeline.
25
25
 
26
- Usage example:
26
+ Examples:
27
27
  We can leverage the `input_map` parameter to specify both state/config keys (as strings)
28
28
  and fixed values (as any type) in a single dictionary.
29
29
  ```python
@@ -44,7 +44,7 @@ def step(component: Component, input_state_map: dict[str, str] | None = None, ou
44
44
  3. It will pass the `<fixed_value>` from the `conversation_id` argument of the Retriever.
45
45
  4. It will store the `retrieved_data` from the Retriever result in the pipeline state.
46
46
 
47
- Legacy Approach (will be deprecated in v0.5, please use `input_map` instead):
47
+ Legacy Approach (will be deprecated in v0.5, please use `input_map` instead):
48
48
  ```python
49
49
  retriever = Retriever()
50
50
  retriever_step = step(retriever, {"query": "input_query"}, "retrieved_data")
@@ -53,7 +53,6 @@ def step(component: Component, input_state_map: dict[str, str] | None = None, ou
53
53
  1. It will pass the `input_query` from the pipeline state to the `query` argument of the Retriever.
54
54
  2. It will store the `retrieved_data` from the Retriever result in the pipeline state.
55
55
 
56
-
57
56
  Args:
58
57
  component (Component): The component to be executed in this step.
59
58
  input_state_map (dict[str, str] | None): Mapping of component input arguments to pipeline state keys.
@@ -75,7 +74,7 @@ def step(component: Component, input_state_map: dict[str, str] | None = None, ou
75
74
  Defaults to None, in which case no retry config is applied.
76
75
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
77
76
  Defaults to None, in which case the RaiseStepErrorHandler is used.
78
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
77
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
79
78
  Defaults to None, in which case no cache store is used.
80
79
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
81
80
  Defaults to None, in which case no cache configuration is used.
@@ -93,15 +92,16 @@ def log(message: str, is_template: bool = True, emit_kwargs: dict[str, Any] | No
93
92
 
94
93
  The message can be a plain string or a template with placeholders for state variables.
95
94
 
96
- Usage example 1 (plain message):
97
- ```python
98
- log_step = log("Processing document", is_template=False)
99
- ```
95
+ Examples:
96
+ Plain message:
97
+ ```python
98
+ log_step = log("Processing document", is_template=False)
99
+ ```
100
100
 
101
- Usage example 2 (template message with state variables):
102
- ```python
103
- log_step = log("Processing query: {query} with model: {model_name}")
104
- ```
101
+ Template message with state variables:
102
+ ```python
103
+ log_step = log("Processing query: {query} with model: {model_name}")
104
+ ```
105
105
 
106
106
  Args:
107
107
  message (str): The message to be logged. May contain placeholders in curly braces for state variables.
@@ -110,7 +110,7 @@ def log(message: str, is_template: bool = True, emit_kwargs: dict[str, Any] | No
110
110
  Defaults to None.
111
111
  retry_config (RetryConfig | None, optional): Configuration for retry behavior using GLLM Core\'s RetryConfig.
112
112
  Defaults to None, in which case no retry config is applied.
113
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
113
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
114
114
  Defaults to None, in which case no cache store is used.
115
115
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
116
116
  Defaults to None, in which case no cache configuration is used.
@@ -133,7 +133,8 @@ def if_else(condition: ConditionType | Callable[[dict[str, Any]], bool], if_bran
133
133
  For boolean conditions and string conditions, True/true/TRUE maps to the if_branch
134
134
  and False/false/FALSE maps to the else_branch.
135
135
 
136
- Usage example with a Callable condition:
136
+ Examples:
137
+ With a Callable condition:
137
138
  ```python
138
139
  # Using a Callable condition - receives merged state and config directly
139
140
  condition = lambda data: data["input"] > data["threshold"]
@@ -150,7 +151,7 @@ def if_else(condition: ConditionType | Callable[[dict[str, Any]], bool], if_bran
150
151
  )
151
152
 
152
153
  # or use the legacy approach via input_state_map, runtime_config_map, and fixed_args
153
- Note: this approach is deprecated in v0.5. Please use input_map instead.
154
+ # Note: this approach is deprecated in v0.5. Please use input_map instead.
154
155
  if_else_step = if_else(
155
156
  condition,
156
157
  if_branch,
@@ -163,7 +164,7 @@ def if_else(condition: ConditionType | Callable[[dict[str, Any]], bool], if_bran
163
164
  the threshold (0), and the NegativeComponent otherwise. The result of the condition will be stored in the
164
165
  pipeline state under the key `condition_result`.
165
166
 
166
- Usage example with a Component condition:
167
+ With a Component condition:
167
168
  ```python
168
169
  # Using a Component condition - requires input_state_map and runtime_config_map
169
170
  threshold_checker = ThresholdChecker() # A Component that returns "true" or "false"
@@ -184,7 +185,7 @@ def if_else(condition: ConditionType | Callable[[dict[str, Any]], bool], if_bran
184
185
  )
185
186
 
186
187
  # or use the legacy approach via input_state_map, runtime_config_map, and fixed_args
187
- Note: this approach is deprecated in v0.5. Please use input_map instead.
188
+ # Note: this approach is deprecated in v0.5. Please use input_map instead.
188
189
  if_else_step = if_else(
189
190
  threshold_checker,
190
191
  if_branch,
@@ -194,14 +195,12 @@ def if_else(condition: ConditionType | Callable[[dict[str, Any]], bool], if_bran
194
195
  runtime_config_map={"threshold": "threshold_config"},
195
196
  fixed_args={"strict_mode": True}
196
197
  )
197
-
198
198
  ```
199
199
  This will cause the step to execute the ThresholdChecker component with the `input` from the pipeline state
200
200
  as its `value` parameter and the `threshold_config` from runtime configuration as its `threshold` parameter.
201
201
  Based on the component\'s result ("true" or "false"), it will execute either the PositiveComponent or
202
202
  the NegativeComponent.
203
203
 
204
-
205
204
  Args:
206
205
  condition (ConditionType | Callable[[dict[str, Any]], bool]): The condition to evaluate.
207
206
  if_branch (BasePipelineStep | list[BasePipelineStep]): Step(s) to execute if condition is true.
@@ -225,7 +224,7 @@ def if_else(condition: ConditionType | Callable[[dict[str, Any]], bool], if_bran
225
224
  Defaults to None, in which case no retry config is applied.
226
225
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
227
226
  Defaults to None, in which case the RaiseStepErrorHandler is used.
228
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
227
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
229
228
  Defaults to None, in which case no cache store is used.
230
229
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
231
230
  Defaults to None, in which case no cache configuration is used.
@@ -240,7 +239,8 @@ def switch(condition: ConditionType, branches: dict[str, BasePipelineStep | list
240
239
 
241
240
  This function creates a ConditionalStep that can execute one of multiple branches based on a condition.
242
241
 
243
- Usage example with a Callable condition:
242
+ Examples:
243
+ With a Callable condition:
244
244
  ```python
245
245
  # Using a Callable condition - receives merged state and config directly
246
246
  def extract_command(data):
@@ -264,7 +264,7 @@ def switch(condition: ConditionType, branches: dict[str, BasePipelineStep | list
264
264
  )
265
265
 
266
266
  # or use the legacy approach via input_state_map, runtime_config_map, and fixed_args
267
- Note: this approach is deprecated in v0.5. Please use input_map instead.
267
+ # Note: this approach is deprecated in v0.5. Please use input_map instead.
268
268
  switch_step = switch(
269
269
  extract_command,
270
270
  branches,
@@ -280,7 +280,7 @@ def switch(condition: ConditionType, branches: dict[str, BasePipelineStep | list
280
280
  as a fixed argument. The result of the condition will be stored in the pipeline state under the key
281
281
  `command_type`.
282
282
 
283
- Usage example with a Component condition:
283
+ With a Component condition:
284
284
  ```python
285
285
  # Using a Component condition - requires input_state_map and runtime_config_map
286
286
  command_extractor = CommandExtractor() # A Component that extracts command from query
@@ -301,7 +301,7 @@ def switch(condition: ConditionType, branches: dict[str, BasePipelineStep | list
301
301
  )
302
302
 
303
303
  # or use the legacy approach via input_state_map, runtime_config_map, and fixed_args
304
- Note: this approach is deprecated in v0.5. Please use input_map instead.
304
+ # Note: this approach is deprecated in v0.5. Please use input_map instead.
305
305
  switch_step = switch(
306
306
  command_extractor,
307
307
  branches,
@@ -341,7 +341,7 @@ def switch(condition: ConditionType, branches: dict[str, BasePipelineStep | list
341
341
  Defaults to None, in which case no retry config is applied.
342
342
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
343
343
  Defaults to None, in which case the RaiseStepErrorHandler is used.
344
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
344
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
345
345
  Defaults to None, in which case no cache store is used.
346
346
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
347
347
  Defaults to None, in which case no cache configuration is used.
@@ -357,7 +357,7 @@ def transform(operation: Callable[[dict[str, Any]], Any], input_states: list[str
357
357
  This function creates a StateOperatorStep that applies a transformation operation to the pipeline state.
358
358
  Note that the function `operation` should accept a dictionary of input data and return the operation result.
359
359
 
360
- Usage example:
360
+ Examples:
361
361
  ```python
362
362
  def sort(data: dict) -> dict:
363
363
  is_reverse = data["reverse"]
@@ -399,7 +399,7 @@ def transform(operation: Callable[[dict[str, Any]], Any], input_states: list[str
399
399
  Defaults to None, in which case no retry config is applied.
400
400
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
401
401
  Defaults to None, in which case the RaiseStepErrorHandler is used.
402
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
402
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
403
403
  Defaults to None, in which case no cache store is used.
404
404
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
405
405
  Defaults to None, in which case no cache configuration is used.
@@ -415,7 +415,7 @@ def bundle(input_states: list[str] | dict[str, str], output_state: str | list[st
415
415
  This function creates a StateOperatorStep that combines multiple keys from the pipeline state into a single output
416
416
  without modifying the data.
417
417
 
418
- Usage example:
418
+ Examples:
419
419
  ```python
420
420
  bundle_step = bundle(["input1", "input2"], "output")
421
421
  # Produces: {"output": {"input1": state["input1"], "input2": state["input2"]}}
@@ -423,7 +423,7 @@ def bundle(input_states: list[str] | dict[str, str], output_state: str | list[st
423
423
  This will cause the step to bundle the values of `input1` and `input2` from the pipeline state into a single
424
424
  dictionary. The result will be stored in the pipeline state under the key `output`.
425
425
 
426
- Usage example (with remapping):
426
+ With remapping:
427
427
  ```python
428
428
  # Provide a mapping of desired output field names to source state keys
429
429
  # Renames state key "user_id" to "id" in the bundled output
@@ -441,7 +441,7 @@ def bundle(input_states: list[str] | dict[str, str], output_state: str | list[st
441
441
  Defaults to None, in which case the RaiseStepErrorHandler is used.
442
442
  retry_config (RetryConfig | None, optional): Configuration for retry behavior using GLLM Core\'s RetryConfig.
443
443
  Defaults to None, in which case no retry config is applied.
444
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
444
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
445
445
  Defaults to None, in which case no cache store is used.
446
446
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
447
447
  Defaults to None, in which case no cache configuration is used.
@@ -456,7 +456,7 @@ def guard(condition: ConditionType | Callable[[dict[str, Any]], bool], success_b
456
456
 
457
457
  This function creates a GuardStep that can terminate pipeline execution if a condition is not met.
458
458
 
459
- Usage example:
459
+ Examples:
460
460
  ```python
461
461
  auth_check = lambda state: state["is_authenticated"]
462
462
  success_step = step(SuccessHandler(), {"input": "input"}, "output")
@@ -504,7 +504,7 @@ def guard(condition: ConditionType | Callable[[dict[str, Any]], bool], success_b
504
504
  Defaults to None, in which case no retry config is applied.
505
505
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
506
506
  Defaults to None, in which case the RaiseStepErrorHandler is used.
507
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
507
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
508
508
  Defaults to None, in which case no cache store is used.
509
509
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
510
510
  Defaults to None, in which case no cache configuration is used.
@@ -519,7 +519,7 @@ def terminate(name: str | None = None, retry_config: RetryConfig | None = None,
519
519
 
520
520
  This function creates a TerminatorStep that explicitly terminates a branch or the entire pipeline.
521
521
 
522
- Usage example:
522
+ Examples:
523
523
  ```python
524
524
  early_exit = terminate("early_exit")
525
525
 
@@ -537,7 +537,7 @@ def terminate(name: str | None = None, retry_config: RetryConfig | None = None,
537
537
  Defaults to None, in which case no retry config is applied.
538
538
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
539
539
  Defaults to None, in which case the RaiseStepErrorHandler is used.
540
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
540
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
541
541
  Defaults to None, in which case no cache store is used.
542
542
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
543
543
  Defaults to None, in which case no cache configuration is used.
@@ -570,7 +570,8 @@ def toggle(condition: ConditionType | Callable[[dict[str, Any]], bool] | str, if
570
570
  4. A string key that will be looked up in the merged state data (state + runtime config + fixed args).
571
571
  The value will be evaluated for truthiness - any non-empty, non-zero, non-False value will be considered True.
572
572
 
573
- Usage example with a Callable condition:
573
+ Examples:
574
+ With a Callable condition:
574
575
  ```python
575
576
  # Using a Callable condition - receives merged state and config directly
576
577
  condition = lambda data: data["feature_enabled"] and data["user_tier"] >= 2
@@ -596,7 +597,7 @@ def toggle(condition: ConditionType | Callable[[dict[str, Any]], bool] | str, if
596
597
  Otherwise, it will do nothing. The condition result will be stored in the pipeline state under the key
597
598
  `feature_status`.
598
599
 
599
- Usage example with a Component condition:
600
+ With a Component condition:
600
601
  ```python
601
602
  # Using a Component condition - requires input_state_map and runtime_config_map
602
603
  feature_checker = FeatureChecker() # A Component that returns "true" or "false"
@@ -641,7 +642,7 @@ def toggle(condition: ConditionType | Callable[[dict[str, Any]], bool] | str, if
641
642
  Defaults to None, in which case no retry config is applied.
642
643
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
643
644
  Defaults to None, in which case the RaiseStepErrorHandler is used.
644
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
645
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
645
646
  Defaults to None, in which case no cache store is used.
646
647
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
647
648
  Defaults to None, in which case no cache configuration is used.
@@ -662,7 +663,7 @@ def subgraph(subgraph: Pipeline, input_state_map: dict[str, str] | None = None,
662
663
  in the parent state, it will be omitted from the subgraph input rather than causing an error. This allows
663
664
  for flexible composition of pipelines with different state schemas.
664
665
 
665
- Usage example:
666
+ Examples:
666
667
  ```python
667
668
  from typing import TypedDict
668
669
  from gllm_pipeline.pipeline.pipeline import Pipeline
@@ -745,7 +746,7 @@ def subgraph(subgraph: Pipeline, input_state_map: dict[str, str] | None = None,
745
746
  Defaults to None, in which case no retry config is applied.
746
747
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
747
748
  Defaults to None, in which case the RaiseStepErrorHandler is used.
748
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
749
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
749
750
  Defaults to None, in which case no cache store is used.
750
751
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
751
752
  Defaults to None, in which case no cache configuration is used.
@@ -775,8 +776,8 @@ def parallel(branches: list[PipelineSteps] | dict[str, PipelineSteps], input_sta
775
776
  This is especially useful when the state is large but branches only need specific parts of it.
776
777
  If input_states is None (default), all state keys will be passed.
777
778
 
778
- Usage example:
779
- 1. Define branches as a list of steps or lists of steps
779
+ Examples:
780
+ Define branches as a list of steps or lists of steps:
780
781
  ```python
781
782
  parallel_step = parallel(
782
783
  branches=[
@@ -791,7 +792,7 @@ def parallel(branches: list[PipelineSteps] | dict[str, PipelineSteps], input_sta
791
792
  )
792
793
  ```
793
794
 
794
- 2. Define branches as a dictionary of branches
795
+ Define branches as a dictionary of branches:
795
796
  Other than the list format, we can also use the dictionary format for branches to
796
797
  make it easier to exclude branches.
797
798
  ```python
@@ -827,7 +828,7 @@ def parallel(branches: list[PipelineSteps] | dict[str, PipelineSteps], input_sta
827
828
  Defaults to None, in which case no retry config is applied.
828
829
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
829
830
  Defaults to None, in which case the RaiseStepErrorHandler is used.
830
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
831
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
831
832
  Defaults to None, in which case no cache store is used.
832
833
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
833
834
  Defaults to None, in which case no cache configuration is used.
@@ -860,7 +861,8 @@ def map_reduce(output_state: str, map_func: Component | Callable[[dict[str, Any]
860
861
  1. If multiple list inputs are provided, they must be the same length.
861
862
  2. Scalar inputs are broadcasted to match list lengths.
862
863
 
863
- Usage Example - Processing a list of items with an async map function:
864
+ Examples:
865
+ Processing a list of items with an async map function:
864
866
  ```python
865
867
  async def count_words(item):
866
868
  await asyncio.sleep(0.1) # Simulate I/O operation
@@ -879,7 +881,7 @@ def map_reduce(output_state: str, map_func: Component | Callable[[dict[str, Any]
879
881
  # returns {"word_counts": 60} (total word count)
880
882
  ```
881
883
 
882
- Usage Example - Broadcasting scalar values to match list length:
884
+ Broadcasting scalar values to match list length:
883
885
  ```python
884
886
  # Apply a common threshold to multiple values
885
887
  threshold_check = map_reduce(
@@ -895,7 +897,7 @@ def map_reduce(output_state: str, map_func: Component | Callable[[dict[str, Any]
895
897
  # returns {"above_threshold": [False, True, True]}
896
898
  ```
897
899
 
898
- Usage Example - Multiple list inputs with the same length:
900
+ Multiple list inputs with the same length:
899
901
  ```python
900
902
  similarity_step = map_reduce(
901
903
  input_state_map={
@@ -910,7 +912,7 @@ def map_reduce(output_state: str, map_func: Component | Callable[[dict[str, Any]
910
912
  # returns {"similarity_scores": 0.75}
911
913
  ```
912
914
 
913
- Usage Example - Using a Component for complex processing instead of a map function:
915
+ Using a Component for complex processing instead of a map function:
914
916
  ```python
915
917
  summarizer = TextSummarizer() # Subclass of Component
916
918
  summarize_step = map_reduce(
@@ -946,7 +948,7 @@ def map_reduce(output_state: str, map_func: Component | Callable[[dict[str, Any]
946
948
  Defaults to None, in which case no retry config is applied.
947
949
  error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
948
950
  Defaults to None, in which case the RaiseStepErrorHandler is used.
949
- cache_store ("BaseCache" | None, optional): Cache store to be used for caching.
951
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
950
952
  Defaults to None, in which case no cache store is used.
951
953
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
952
954
  Defaults to None, in which case no cache configuration is used.
@@ -956,3 +958,70 @@ def map_reduce(output_state: str, map_func: Component | Callable[[dict[str, Any]
956
958
  Returns:
957
959
  MapReduceStep: An instance of MapReduceStep configured with the provided parameters.
958
960
  '''
961
+ def copy(input_state: str | list[str], output_state: str | list[str], retry_config: RetryConfig | None = None, error_handler: BaseStepErrorHandler | None = None, cache_store: BaseCache | None = None, cache_config: dict[str, Any] | None = None, name: str | None = None) -> StateOperatorStep:
962
+ '''Create a StateOperatorStep to copy input state(s) to output state(s).
963
+
964
+ This function creates a StateOperatorStep that copies data from input state(s) to output state(s)
965
+ without any transformation. The function handles different scenarios:
966
+ 1. Single input to single output: Direct copy
967
+ 2. Single input to multiple outputs: Broadcast the input to all outputs
968
+ 3. Multiple inputs to single output: Pack all inputs into a list
969
+ 4. Multiple inputs to multiple outputs: Copy each input to corresponding output (must have same length)
970
+
971
+ Args:
972
+ input_state (str | list[str]): Input state key(s) to copy from.
973
+ output_state (str | list[str]): Output state key(s) to copy to.
974
+ retry_config (RetryConfig | None, optional): Configuration for retry behavior using GLLM Core\'s RetryConfig.
975
+ Defaults to None, in which case no retry config is applied.
976
+ error_handler (BaseStepErrorHandler | None, optional): Strategy to handle errors during execution.
977
+ Defaults to None, in which case the RaiseStepErrorHandler is used.
978
+ cache_store (BaseCache | None, optional): Cache store to be used for caching.
979
+ Defaults to None, in which case no cache store is used.
980
+ cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
981
+ Defaults to None, in which case no cache configuration is used.
982
+ name (str | None, optional): A unique identifier for this step. Defaults to None, in which case the
983
+ name will be "Copy" followed by a unique identifier.
984
+
985
+ Returns:
986
+ StateOperatorStep: An instance of StateOperatorStep configured to copy the input states to output states.
987
+
988
+ Raises:
989
+ ValueError: If both input_state and output_state are lists but have different lengths.
990
+
991
+ Examples:
992
+ Single input to single output:
993
+ ```python
994
+ step = copy("input_data", "output_data")
995
+ # Copies value from "input_data" key to "output_data" key
996
+ ```
997
+
998
+ Single input to multiple outputs (broadcast):
999
+ ```python
1000
+ step = copy("input_data", ["output1", "output2", "output3"])
1001
+ # Copies value from "input_data" to all three output keys
1002
+ ```
1003
+
1004
+ Multiple inputs to single output (pack):
1005
+ ```python
1006
+ step = copy(["input1", "input2", "input3"], "packed_output")
1007
+ # Packs values from all three input keys into a list at "packed_output"
1008
+ ```
1009
+
1010
+ Multiple inputs to multiple outputs (pairwise):
1011
+ ```python
1012
+ step = copy(["input1", "input2"], ["output1", "output2"])
1013
+ # Copies "input1" → "output1" and "input2" → "output2"
1014
+ ```
1015
+
1016
+ With custom name and retry config:
1017
+ ```python
1018
+ from gllm_core import RetryConfig
1019
+ retry_cfg = RetryConfig(max_attempts=3, delay=1.0)
1020
+ step = copy(
1021
+ "input_data",
1022
+ "output_data",
1023
+ name="DataCopyStep",
1024
+ retry_config=retry_cfg
1025
+ )
1026
+ ```
1027
+ '''
@@ -9,6 +9,7 @@ from gllm_pipeline.utils.error_handling import ErrorContext as ErrorContext
9
9
  from gllm_pipeline.utils.has_inputs_mixin import HasInputsMixin as HasInputsMixin
10
10
  from gllm_pipeline.utils.input_map import shallow_dump as shallow_dump
11
11
  from langgraph.runtime import Runtime
12
+ from langgraph.types import RunnableConfig as RunnableConfig
12
13
  from pydantic import BaseModel as BaseModel
13
14
  from typing import Any
14
15
 
@@ -60,7 +61,7 @@ class ComponentStep(BasePipelineStep, HasInputsMixin):
60
61
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
61
62
  Defaults to None, in which case no cache configuration is used.
62
63
  '''
63
- async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any] | None:
64
+ async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any] | None:
64
65
  """Executes the component and processes its output.
65
66
 
66
67
  This method validates inputs, prepares data, executes the component, and formats the output for integration
@@ -69,6 +70,7 @@ class ComponentStep(BasePipelineStep, HasInputsMixin):
69
70
  Args:
70
71
  state (PipelineState): The current state of the pipeline, containing all data.
71
72
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
73
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
72
74
 
73
75
  Returns:
74
76
  dict[str, Any] | None: The update to the pipeline state after this step's operation, or None if
@@ -18,7 +18,7 @@ from gllm_pipeline.utils.mermaid import MERMAID_HEADER as MERMAID_HEADER
18
18
  from gllm_pipeline.utils.step_execution import execute_sequential_steps as execute_sequential_steps
19
19
  from langgraph.graph import StateGraph as StateGraph
20
20
  from langgraph.runtime import Runtime as Runtime
21
- from langgraph.types import Command, RetryPolicy as RetryPolicy
21
+ from langgraph.types import Command, RetryPolicy as RetryPolicy, RunnableConfig as RunnableConfig
22
22
  from pydantic import BaseModel as BaseModel
23
23
  from typing import Any, Callable
24
24
 
@@ -122,17 +122,18 @@ class ConditionalStep(BranchingStep, HasInputsMixin):
122
122
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
123
123
  Defaults to None, in which case no cache configuration is used.
124
124
  '''
125
- async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel]) -> Command:
125
+ async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> Command:
126
126
  """Executes the conditional step, determines the route, and returns a Command.
127
127
 
128
128
  Args:
129
129
  state (PipelineState): The current state of the pipeline.
130
130
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
131
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
131
132
 
132
133
  Returns:
133
134
  Command: A LangGraph Command object with 'goto' for routing and 'update' for state changes.
134
135
  """
135
- async def execute_direct(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any] | None:
136
+ async def execute_direct(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any] | None:
136
137
  """Execute this step directly, handling both branch selection and execution.
137
138
 
138
139
  This method is used when the step needs to be executed directly (e.g. in parallel execution).
@@ -141,6 +142,7 @@ class ConditionalStep(BranchingStep, HasInputsMixin):
141
142
  Args:
142
143
  state (dict[str, Any]): The current state of the pipeline.
143
144
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
145
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
144
146
 
145
147
  Returns:
146
148
  dict[str, Any] | None: Updates to apply to the pipeline state, or None if no updates.
@@ -5,6 +5,7 @@ from gllm_pipeline.alias import PipelineState as PipelineState
5
5
  from gllm_pipeline.steps.pipeline_step import BasePipelineStep as BasePipelineStep
6
6
  from gllm_pipeline.utils.input_map import shallow_dump as shallow_dump
7
7
  from langgraph.runtime import Runtime as Runtime
8
+ from langgraph.types import RunnableConfig as RunnableConfig
8
9
  from pydantic import BaseModel as BaseModel
9
10
  from typing import Any
10
11
 
@@ -38,12 +39,13 @@ class LogStep(BasePipelineStep):
38
39
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
39
40
  Defaults to None, in which case no cache configuration is used.
40
41
  '''
41
- async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel]) -> None:
42
+ async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> None:
42
43
  """Executes the log step by formatting and emitting the message.
43
44
 
44
45
  Args:
45
46
  state (PipelineState): The current state of the pipeline, containing all data.
46
47
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
48
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
47
49
 
48
50
  Returns:
49
51
  None: This step does not modify the pipeline state.
@@ -10,6 +10,7 @@ from gllm_pipeline.utils.error_handling import ErrorContext as ErrorContext
10
10
  from gllm_pipeline.utils.has_inputs_mixin import HasInputsMixin as HasInputsMixin
11
11
  from gllm_pipeline.utils.input_map import shallow_dump as shallow_dump
12
12
  from langgraph.runtime import Runtime as Runtime
13
+ from langgraph.types import RunnableConfig as RunnableConfig
13
14
  from pydantic import BaseModel as BaseModel
14
15
  from typing import Any, Callable
15
16
 
@@ -77,12 +78,13 @@ class MapReduceStep(BasePipelineStep, HasInputsMixin):
77
78
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
78
79
  Defaults to None, in which case no cache configuration is used.
79
80
  '''
80
- async def execute(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any]:
81
+ async def execute(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any]:
81
82
  """Execute the map and reduce operations.
82
83
 
83
84
  Args:
84
85
  state (dict[str, Any]): The current state of the pipeline.
85
86
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
87
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
86
88
 
87
89
  Returns:
88
90
  dict[str, Any]: The reduced result stored under output_state.
@@ -1,5 +1,6 @@
1
1
  from gllm_pipeline.steps.pipeline_step import BasePipelineStep as BasePipelineStep
2
2
  from langgraph.runtime import Runtime as Runtime
3
+ from langgraph.types import RunnableConfig as RunnableConfig
3
4
  from pydantic import BaseModel as BaseModel
4
5
  from typing import Any
5
6
 
@@ -28,12 +29,13 @@ class NoOpStep(BasePipelineStep):
28
29
  Attributes:
29
30
  name (str): A unique identifier for this pipeline step.
30
31
  '''
31
- async def execute(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel]) -> None:
32
+ async def execute(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> None:
32
33
  """Executes this step, which does nothing.
33
34
 
34
35
  Args:
35
36
  state (dict[str, Any]): The current state of the pipeline.
36
37
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
38
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
37
39
 
38
40
  Returns:
39
41
  None: This step does not modify the pipeline state.
@@ -14,7 +14,7 @@ from gllm_pipeline.utils.mermaid import MERMAID_HEADER as MERMAID_HEADER
14
14
  from gllm_pipeline.utils.step_execution import execute_sequential_steps as execute_sequential_steps
15
15
  from langgraph.graph import StateGraph as StateGraph
16
16
  from langgraph.runtime import Runtime as Runtime
17
- from langgraph.types import RetryPolicy as RetryPolicy
17
+ from langgraph.types import RetryPolicy as RetryPolicy, RunnableConfig as RunnableConfig
18
18
  from pydantic import BaseModel as BaseModel
19
19
  from typing import Any
20
20
 
@@ -106,7 +106,7 @@ class ParallelStep(BranchingStep, HasInputsMixin):
106
106
  Returns:
107
107
  list[str]: Exit points after adding all child steps.
108
108
  """
109
- async def execute(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any] | None:
109
+ async def execute(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any] | None:
110
110
  """Execute all branches in parallel and merge their results.
111
111
 
112
112
  This method is only used for the squashed approach. For the expanded approach,
@@ -115,6 +115,7 @@ class ParallelStep(BranchingStep, HasInputsMixin):
115
115
  Args:
116
116
  state (dict[str, Any]): The current state of the pipeline.
117
117
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
118
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
118
119
 
119
120
  Returns:
120
121
  dict[str, Any] | None: The merged results from all parallel branches, or None if no updates were produced.
@@ -13,7 +13,7 @@ from gllm_pipeline.utils.graph import create_edge as create_edge
13
13
  from gllm_pipeline.utils.retry_converter import retry_config_to_langgraph_policy as retry_config_to_langgraph_policy
14
14
  from langgraph.graph import StateGraph as StateGraph
15
15
  from langgraph.runtime import Runtime as Runtime
16
- from langgraph.types import RetryPolicy as RetryPolicy
16
+ from langgraph.types import RetryPolicy as RetryPolicy, RunnableConfig as RunnableConfig
17
17
  from pydantic import BaseModel as BaseModel
18
18
  from typing import Any
19
19
 
@@ -153,7 +153,7 @@ class BasePipelineStep(ABC, metaclass=abc.ABCMeta):
153
153
  list[str]: The exit points (endpoints) of this step.
154
154
  """
155
155
  @abstractmethod
156
- async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any] | None:
156
+ async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any] | None:
157
157
  """Executes the operation defined for this pipeline step.
158
158
 
159
159
  This method should be implemented by subclasses to perform the actual processing or computation for this step.
@@ -161,6 +161,9 @@ class BasePipelineStep(ABC, metaclass=abc.ABCMeta):
161
161
  Args:
162
162
  state (PipelineState): The current state of the pipeline, containing all data.
163
163
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
164
+ config (RunnableConfig | None, optional): Runnable configuration containing thread_id and other
165
+ LangGraph config. This allows steps to access invocation-level configuration like thread_id for
166
+ tracking and checkpointing. Defaults to None.
164
167
 
165
168
  Returns:
166
169
  dict[str, Any] | None: The update to the pipeline state after this step's operation.
@@ -170,7 +173,7 @@ class BasePipelineStep(ABC, metaclass=abc.ABCMeta):
170
173
  Raises:
171
174
  NotImplementedError: If the subclass does not implement this method.
172
175
  """
173
- async def execute_direct(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any] | None:
176
+ async def execute_direct(self, state: dict[str, Any], runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any] | None:
174
177
  """Execute this step directly, bypassing graph-based execution.
175
178
 
176
179
  This method is used when a step needs to be executed directly, such as in parallel execution.
@@ -179,6 +182,7 @@ class BasePipelineStep(ABC, metaclass=abc.ABCMeta):
179
182
  Args:
180
183
  state (dict[str, Any]): The current state of the pipeline.
181
184
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
185
+ config (RunnableConfig | None, optional): The runnable configuration to pass to the step.
182
186
 
183
187
  Returns:
184
188
  dict[str, Any] | None: Updates to apply to the pipeline state, or None if no updates.
@@ -9,6 +9,7 @@ from gllm_pipeline.utils.error_handling import ErrorContext as ErrorContext
9
9
  from gllm_pipeline.utils.has_inputs_mixin import HasInputsMixin as HasInputsMixin
10
10
  from gllm_pipeline.utils.input_map import shallow_dump as shallow_dump
11
11
  from langgraph.runtime import Runtime
12
+ from langgraph.types import RunnableConfig as RunnableConfig
12
13
  from pydantic import BaseModel as BaseModel
13
14
  from typing import Any, Callable
14
15
 
@@ -56,7 +57,7 @@ class StateOperatorStep(BasePipelineStep, HasInputsMixin):
56
57
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
57
58
  Defaults to None, in which case no cache configuration is used.
58
59
  '''
59
- async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any]:
60
+ async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any]:
60
61
  """Executes the operation and processes its output.
61
62
 
62
63
  This method validates inputs, prepares data, executes the operation, and formats the output for integration
@@ -65,6 +66,7 @@ class StateOperatorStep(BasePipelineStep, HasInputsMixin):
65
66
  Args:
66
67
  state (PipelineState): The current state of the pipeline, containing all data.
67
68
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
69
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
68
70
 
69
71
  Returns:
70
72
  dict[str, Any]: The update to the pipeline state after this step's operation.
@@ -11,6 +11,7 @@ from gllm_pipeline.utils.error_handling import ErrorContext as ErrorContext
11
11
  from gllm_pipeline.utils.has_inputs_mixin import HasInputsMixin as HasInputsMixin
12
12
  from gllm_pipeline.utils.input_map import shallow_dump as shallow_dump
13
13
  from langgraph.runtime import Runtime as Runtime
14
+ from langgraph.types import RunnableConfig as RunnableConfig
14
15
  from pydantic import BaseModel as BaseModel
15
16
  from typing import Any
16
17
 
@@ -59,7 +60,7 @@ class SubgraphStep(BaseCompositeStep, HasInputsMixin):
59
60
  cache_config (dict[str, Any] | None, optional): Cache configuration to be used for caching.
60
61
  Defaults to None, in which case no cache configuration is used.
61
62
  '''
62
- async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel]) -> dict[str, Any]:
63
+ async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> dict[str, Any]:
63
64
  """Executes the subgraph and processes its output.
64
65
 
65
66
  This method prepares data, executes the subgraph, and formats the output for integration
@@ -69,6 +70,7 @@ class SubgraphStep(BaseCompositeStep, HasInputsMixin):
69
70
  Args:
70
71
  state (PipelineState): The current state of the pipeline, containing all data.
71
72
  runtime (Runtime[dict[str, Any] | BaseModel]): Runtime information for this step's execution.
73
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
72
74
 
73
75
  Returns:
74
76
  dict[str, Any]: The update to the pipeline state after this step's operation.
@@ -2,7 +2,7 @@ from gllm_pipeline.alias import PipelineState as PipelineState
2
2
  from gllm_pipeline.steps.pipeline_step import BasePipelineStep as BasePipelineStep
3
3
  from langgraph.graph import StateGraph as StateGraph
4
4
  from langgraph.runtime import Runtime as Runtime
5
- from langgraph.types import RetryPolicy as RetryPolicy
5
+ from langgraph.types import RetryPolicy as RetryPolicy, RunnableConfig as RunnableConfig
6
6
  from pydantic import BaseModel as BaseModel
7
7
  from typing import Any
8
8
 
@@ -48,10 +48,11 @@ class TerminatorStep(BasePipelineStep):
48
48
  Returns:
49
49
  list[str]: Empty list as this step has no endpoints (it terminates the flow).
50
50
  """
51
- async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel]) -> None:
51
+ async def execute(self, state: PipelineState, runtime: Runtime[dict[str, Any] | BaseModel], config: RunnableConfig | None = None) -> None:
52
52
  """Executes this step, which does nothing but pass through the state.
53
53
 
54
54
  Args:
55
55
  state (PipelineState): The current pipeline state.
56
56
  runtime (Runtime[dict[str, Any] | BaseModel]): The runtime information.
57
+ config (RunnableConfig | None, optional): The runnable configuration. Defaults to None.
57
58
  """
Binary file
gllm_pipeline.pyi CHANGED
@@ -20,6 +20,7 @@ import gllm_datastore
20
20
  import gllm_datastore.cache
21
21
  import gllm_datastore.cache.cache
22
22
  import asyncio
23
+ import uuid
23
24
  import copy
24
25
  import gllm_core.utils.imports
25
26
  import gllm_core.utils.logger_manager
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: gllm-pipeline-binary
3
- Version: 0.4.21
3
+ Version: 0.4.23
4
4
  Summary: A library containing components related to Gen AI applications pipeline orchestration.
5
5
  Author-email: Dimitrij Ray <dimitrij.ray@gdplabs.id>, Henry Wicaksono <henry.wicaksono@gdplabs.id>, Kadek Denaya <kadek.d.r.diana@gdplabs.id>
6
6
  Requires-Python: <3.13,>=3.11
@@ -1,5 +1,5 @@
1
- gllm_pipeline.cp312-win_amd64.pyd,sha256=zH_aiOL7m_egsHnbFCLel10g9B5V0dGQi19F367U2XI,2160640
2
- gllm_pipeline.pyi,sha256=QRQhvO2Qq-xIEm7IGhaJ7ZELWzla_t4KiOV9MtH6zSQ,2294
1
+ gllm_pipeline.cp312-win_amd64.pyd,sha256=tzPFp3ZMu0Ek6EYDOwCAfzeEO6N-jxpuOPCntj91GHE,2177024
2
+ gllm_pipeline.pyi,sha256=SCe8vHfVDlgMZ1I_dfYPa-qFjsj_IWl0lgTa7lRrQqQ,2306
3
3
  gllm_pipeline/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  gllm_pipeline/alias.pyi,sha256=FbALRYZpDlmQMsKNUvgCi6ji11PrEtNo2kgzbt0iT7g,237
5
5
  gllm_pipeline/types.pyi,sha256=CV3cEAxlNsnVatYz5iCxqmEFPEqeKW5vv-qUD3FpF54,241
@@ -7,10 +7,10 @@ gllm_pipeline/exclusions/__init__.pyi,sha256=_LwIlqmH4Iiksn7p09d2vZG4Ek8CdKC8UcD
7
7
  gllm_pipeline/exclusions/exclusion_manager.pyi,sha256=DzoL-2KeTRmFgJEo8rzYViFYKbzZVTZGJmKvzaoTC0M,2960
8
8
  gllm_pipeline/exclusions/exclusion_set.pyi,sha256=11XTt6IfkHpzomcNybA78SfWlp752Z3AGhXfm2rL0Fk,1685
9
9
  gllm_pipeline/pipeline/__init__.pyi,sha256=1IKGdMvmLWEiOOmAKFNUPm-gdw13zrnU1gs7tDNzgEU,168
10
- gllm_pipeline/pipeline/pipeline.pyi,sha256=3aPgaDfAUohJo5yCbJ68uSJyD7QE3jD8LszFkkTVA-Y,14651
10
+ gllm_pipeline/pipeline/pipeline.pyi,sha256=m8OStEgk9SsV6faKX6wEqGyygE_xF421wJWQJvBAEeY,14979
11
11
  gllm_pipeline/pipeline/states.pyi,sha256=EiyfBPwrVDZ336w5wyD1q8W4E6G1uZNzsP-bzrHDumo,6464
12
12
  gllm_pipeline/pipeline/composer/__init__.pyi,sha256=-hcOUQgpTRt1QjQfRurTf-UApFnTrhilx6vN-gYd5J0,666
13
- gllm_pipeline/pipeline/composer/composer.pyi,sha256=foztmOTsqdd6CW1CY4QQrObe2shy_yvEopz_knwAmFk,26697
13
+ gllm_pipeline/pipeline/composer/composer.pyi,sha256=7h7EhEA-hex6w36Is6uGTz9OBUbmq6C0SdkeBeLFcAI,28715
14
14
  gllm_pipeline/pipeline/composer/guard_composer.pyi,sha256=YfbXmzyU3CwAvGnCfM-6MVcTdxk53-j6Cv3IdzNr_-c,3335
15
15
  gllm_pipeline/pipeline/composer/if_else_composer.pyi,sha256=uGyd1S7P5uXdER_fAzVIftg0yovZjMbN-L9JxgHxZNw,3196
16
16
  gllm_pipeline/pipeline/composer/parallel_composer.pyi,sha256=rL6dc9Rm1Bzo_Acl0yLEEUZZQmNmfuNNpU4jhjt3Yzg,2593
@@ -39,21 +39,21 @@ gllm_pipeline/router/preset/aurelio/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeu
39
39
  gllm_pipeline/router/preset/aurelio/router_image_domain_specific.pyi,sha256=6pm2aV2fgmyIaKPMkNbAl337HSbFAUFuDxVIEgYAmk8,1093
40
40
  gllm_pipeline/router/preset/lm_based/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  gllm_pipeline/router/preset/lm_based/router_image_domain_specific.pyi,sha256=UdiuoSXm2MVAL8AspAaSkyXYkE59bYj1y4xRRgKwavE,655
42
- gllm_pipeline/steps/__init__.pyi,sha256=f_dgel-CsDow7nSWYQDVTQYOogwvty8tEeh2lZautqM,1967
43
- gllm_pipeline/steps/_func.pyi,sha256=fKL6aCobBmjo7vznRKJeeML4VmJtoQhD8G5L5vG3Pao,60843
42
+ gllm_pipeline/steps/__init__.pyi,sha256=5HtVA5CODr_9_7_OGEXFXU40edqhHa9YlCV5qVx3xbU,1989
43
+ gllm_pipeline/steps/_func.pyi,sha256=FS-g9LiyHb8OHLM8bXrAMiqvk4-KgaxV8RacD0IQw-Q,64125
44
44
  gllm_pipeline/steps/branching_step.pyi,sha256=iNarrcZgWfiRdr8CJfGm8GzUlYq13Rx5cgYXnBsNWN4,1041
45
- gllm_pipeline/steps/component_step.pyi,sha256=2rHqYeVHcKCqsA8GXeTyVAWus6rCkYp0GHK7qnACZjc,5547
45
+ gllm_pipeline/steps/component_step.pyi,sha256=VNBFZscK2Q4HgBt8ZrbE6B69oLTKcXo-KkqbTjmYYhM,5748
46
46
  gllm_pipeline/steps/composite_step.pyi,sha256=lvueTBQG_t0TtS5qRvUzZOIt3h6-uD26DJXW4ZSkuUc,3544
47
- gllm_pipeline/steps/conditional_step.pyi,sha256=8WLvtNAS-wuYxjelTBBKo8KwkLaJmNWYrMXgsnpIeSk,10189
47
+ gllm_pipeline/steps/conditional_step.pyi,sha256=Nm7zjxyPooRjJboT8Pqc3UwWKiA1gJHOwCtPW7KtrpE,10501
48
48
  gllm_pipeline/steps/guard_step.pyi,sha256=c_vdRz3hjfPu2DFkBt0_UBGJErQpdJwl0AE-GNxC4gM,4779
49
- gllm_pipeline/steps/log_step.pyi,sha256=p0DVXHV1mf4zhC5QP5uuzM8cVytrzY9uZHS_s1SYgKs,3100
50
- gllm_pipeline/steps/map_reduce_step.pyi,sha256=WE4a7SqeIf9BC-aJWQ-KnikN9q2YfJzQc5-_slIpRtg,6034
51
- gllm_pipeline/steps/no_op_step.pyi,sha256=mVCpfUOIGGs_q6BMy0g1tsaDH8JBiKyFMcVHeeOTMXY,1379
52
- gllm_pipeline/steps/parallel_step.pyi,sha256=Z5FhVXJslQ777cdi7Bnzm27yqJU61MflNic7lAIscno,8382
53
- gllm_pipeline/steps/pipeline_step.pyi,sha256=oc_Sg-YhFSFbGKaNZoLvkka44kyQap7euj30AB0WWJ4,11213
54
- gllm_pipeline/steps/state_operator_step.pyi,sha256=60hvqAw8QLYyhO9zo8F56bVpG2494WZpoZOtpYyiaT8,5111
55
- gllm_pipeline/steps/subgraph_step.pyi,sha256=j0oF18OJ3VjApXO27l6B2zZO-joBdkS3J2dZYjSwTAo,5825
56
- gllm_pipeline/steps/terminator_step.pyi,sha256=ZBZEWS232HRWJUgxqznB3dCjLw0XoezBAuK3gczaKp0,2514
49
+ gllm_pipeline/steps/log_step.pyi,sha256=XjL_-mlsWAEcoCBZ--qa8XkGdIXrx1K-chI8aXVsOFE,3301
50
+ gllm_pipeline/steps/map_reduce_step.pyi,sha256=c9qo_EmQWlu-jjAHeOKeyj8dVRL6yR89Tj5RHRoxhhA,6235
51
+ gllm_pipeline/steps/no_op_step.pyi,sha256=3DWW_gmY_66_mmNmnn2ic_NJZ0tNi_obDfjN8oLZByk,1580
52
+ gllm_pipeline/steps/parallel_step.pyi,sha256=XI_gM3Z1WS2R0j_qfrwf0tausxvX6VWuiknSCLgAetg,8555
53
+ gllm_pipeline/steps/pipeline_step.pyi,sha256=p0qNBL7v8Gv0wc7CeN3hIv08u_I8uaT1xPQwt-YOQPM,11711
54
+ gllm_pipeline/steps/state_operator_step.pyi,sha256=55mX2vxUUmRy2sS4iqIE4ZM_D4pB1FFoQa5Ox9N1Jdg,5312
55
+ gllm_pipeline/steps/subgraph_step.pyi,sha256=xvHxBiFbXJhMPW-06YcoW1SjVGthTj1_O4nymSew9L0,6026
56
+ gllm_pipeline/steps/terminator_step.pyi,sha256=M1LNw1AszTVMtwOHGrWvnr15A8qIgWuHYUdbFNrCzUM,2687
57
57
  gllm_pipeline/steps/step_error_handler/__init__.pyi,sha256=6eUbWMlQKQjlqS2KJHIMZksb_dXUxmsgRzoJ03tnX4o,618
58
58
  gllm_pipeline/steps/step_error_handler/empty_step_error_handler.pyi,sha256=JwD09mJD-80_pmeOEaB7blKCuYXOsju4JyVejOGtTLI,901
59
59
  gllm_pipeline/steps/step_error_handler/fallback_step_error_handler.pyi,sha256=N4Skd5C5df34KdephUX5l_KclMBnnvGBmhNQFKhmtBs,1326
@@ -71,7 +71,7 @@ gllm_pipeline/utils/mermaid.pyi,sha256=B096GTXxVAO--kw3UDsbysOsnjGOytYfozX39YaM2
71
71
  gllm_pipeline/utils/retry_converter.pyi,sha256=JPUuaGzKpVLshrbhX9rQHYl5XmC9GDa59rGU-FtOpWM,1128
72
72
  gllm_pipeline/utils/step_execution.pyi,sha256=3o28tiCHR8t-6Vk3Poz91V-CLdYrdhvJblPW9AoOK-c,996
73
73
  gllm_pipeline.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
74
- gllm_pipeline_binary-0.4.21.dist-info/METADATA,sha256=qXwlCvVnTxYG6BT28dtv-18pNAT7IQFumJKQ4jcbwUA,4476
75
- gllm_pipeline_binary-0.4.21.dist-info/WHEEL,sha256=x5rgv--I0NI0IT1Lh9tN1VG2cI637p3deednwYLKnxc,96
76
- gllm_pipeline_binary-0.4.21.dist-info/top_level.txt,sha256=C3yeOtoE6ZhuOnBEq_FFc_Rp954IHJBlB6fBgSdAWYI,14
77
- gllm_pipeline_binary-0.4.21.dist-info/RECORD,,
74
+ gllm_pipeline_binary-0.4.23.dist-info/METADATA,sha256=rro3zbAKhq4bHW2wFZZjHyFTFaAH_ZxbCCWFpJ9OKvY,4476
75
+ gllm_pipeline_binary-0.4.23.dist-info/WHEEL,sha256=x5rgv--I0NI0IT1Lh9tN1VG2cI637p3deednwYLKnxc,96
76
+ gllm_pipeline_binary-0.4.23.dist-info/top_level.txt,sha256=C3yeOtoE6ZhuOnBEq_FFc_Rp954IHJBlB6fBgSdAWYI,14
77
+ gllm_pipeline_binary-0.4.23.dist-info/RECORD,,