FlowerPower 0.30.0__py3-none-any.whl → 0.31.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. flowerpower/cfg/__init__.py +143 -25
  2. flowerpower/cfg/base.py +132 -11
  3. flowerpower/cfg/exceptions.py +53 -0
  4. flowerpower/cfg/pipeline/__init__.py +151 -35
  5. flowerpower/cfg/pipeline/adapter.py +1 -0
  6. flowerpower/cfg/pipeline/builder.py +24 -25
  7. flowerpower/cfg/pipeline/builder_adapter.py +142 -0
  8. flowerpower/cfg/pipeline/builder_executor.py +101 -0
  9. flowerpower/cfg/pipeline/run.py +99 -40
  10. flowerpower/cfg/project/__init__.py +59 -14
  11. flowerpower/cfg/project/adapter.py +6 -0
  12. flowerpower/cli/__init__.py +8 -2
  13. flowerpower/cli/cfg.py +0 -38
  14. flowerpower/cli/pipeline.py +121 -83
  15. flowerpower/cli/utils.py +120 -71
  16. flowerpower/flowerpower.py +94 -120
  17. flowerpower/pipeline/config_manager.py +180 -0
  18. flowerpower/pipeline/executor.py +126 -0
  19. flowerpower/pipeline/lifecycle_manager.py +231 -0
  20. flowerpower/pipeline/manager.py +121 -274
  21. flowerpower/pipeline/pipeline.py +66 -278
  22. flowerpower/pipeline/registry.py +45 -4
  23. flowerpower/utils/__init__.py +19 -0
  24. flowerpower/utils/adapter.py +286 -0
  25. flowerpower/utils/callback.py +73 -67
  26. flowerpower/utils/config.py +306 -0
  27. flowerpower/utils/executor.py +178 -0
  28. flowerpower/utils/filesystem.py +194 -0
  29. flowerpower/utils/misc.py +312 -138
  30. flowerpower/utils/security.py +221 -0
  31. {flowerpower-0.30.0.dist-info → flowerpower-0.31.1.dist-info}/METADATA +2 -2
  32. flowerpower-0.31.1.dist-info/RECORD +53 -0
  33. flowerpower/cfg/pipeline/_schedule.py +0 -32
  34. flowerpower-0.30.0.dist-info/RECORD +0 -42
  35. {flowerpower-0.30.0.dist-info → flowerpower-0.31.1.dist-info}/WHEEL +0 -0
  36. {flowerpower-0.30.0.dist-info → flowerpower-0.31.1.dist-info}/entry_points.txt +0 -0
  37. {flowerpower-0.30.0.dist-info → flowerpower-0.31.1.dist-info}/licenses/LICENSE +0 -0
  38. {flowerpower-0.30.0.dist-info → flowerpower-0.31.1.dist-info}/top_level.txt +0 -0
@@ -3,13 +3,14 @@ import yaml
3
3
  from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
4
4
  from hamilton.function_modifiers import source, value
5
5
  from munch import Munch, munchify
6
+ from typing import Optional
6
7
 
7
- from ..base import BaseConfig
8
+ from ..base import BaseConfig, validate_file_path
9
+ from ..exceptions import ConfigLoadError, ConfigSaveError, ConfigPathError
8
10
  from .adapter import AdapterConfig
9
11
  from .run import ExecutorConfig as ExecutorConfig
10
12
  from .run import RunConfig
11
13
  from .run import WithAdapterConfig as WithAdapterConfig
12
- #from .schedule import ScheduleConfig
13
14
 
14
15
 
15
16
  class PipelineConfig(BaseConfig):
@@ -22,7 +23,6 @@ class PipelineConfig(BaseConfig):
22
23
  Attributes:
23
24
  name (str | None): The name of the pipeline.
24
25
  run (RunConfig): Configuration for pipeline execution.
25
- schedule (ScheduleConfig): Configuration for pipeline scheduling. DEPRECATED.
26
26
  params (dict): Pipeline parameters.
27
27
  adapter (AdapterConfig): Configuration for the pipeline adapter.
28
28
  h_params (dict): Hamilton-formatted parameters.
@@ -45,7 +45,6 @@ class PipelineConfig(BaseConfig):
45
45
 
46
46
  name: str | None = msgspec.field(default=None)
47
47
  run: RunConfig = msgspec.field(default_factory=RunConfig)
48
- #: ScheduleConfig = msgspec.field(default_factory=ScheduleConfig)
49
48
  params: dict = msgspec.field(default_factory=dict)
50
49
  adapter: AdapterConfig = msgspec.field(default_factory=AdapterConfig)
51
50
  h_params: dict = msgspec.field(default_factory=dict)
@@ -54,36 +53,100 @@ class PipelineConfig(BaseConfig):
54
53
  if isinstance(self.params, dict):
55
54
  self.h_params = munchify(self.to_h_params(self.params))
56
55
  self.params = munchify(self.params)
56
+
57
+ # Validate pipeline name if provided
58
+ if self.name is not None:
59
+ self._validate_pipeline_name()
57
60
 
58
61
  def to_yaml(self, path: str, fs: AbstractFileSystem):
62
+ """Save pipeline configuration to YAML file.
63
+
64
+ Args:
65
+ path: Path to the YAML file.
66
+ fs: Filesystem instance.
67
+
68
+ Raises:
69
+ ConfigSaveError: If saving the configuration fails.
70
+ ConfigPathError: If the path contains directory traversal attempts.
71
+ """
72
+ try:
73
+ # Validate the path to prevent directory traversal
74
+ validated_path = validate_file_path(path)
75
+ except ConfigPathError as e:
76
+ raise ConfigSaveError(f"Path validation failed: {e}", path=path, original_error=e)
77
+
59
78
  try:
60
- fs.makedirs(fs._parent(path), exist_ok=True)
61
- with fs.open(path, "w") as f:
79
+ fs.makedirs(fs._parent(validated_path), exist_ok=True)
80
+ with fs.open(validated_path, "w") as f:
62
81
  d = self.to_dict()
63
82
  d.pop("name")
64
83
  d.pop("h_params")
65
84
  yaml.dump(d, f, default_flow_style=False)
66
- except NotImplementedError:
67
- raise NotImplementedError(
68
- "The filesystem "
69
- f"{self.fs.fs.protocol[0] if isinstance(self.fs.fs.protocol, tuple) else self.fs.fs.protocol} "
70
- "does not support writing files."
85
+ except NotImplementedError as e:
86
+ raise ConfigSaveError(
87
+ f"The filesystem does not support writing files.",
88
+ path=validated_path,
89
+ original_error=e
90
+ )
91
+ except Exception as e:
92
+ raise ConfigSaveError(
93
+ f"Failed to write configuration to {validated_path}",
94
+ path=validated_path,
95
+ original_error=e
71
96
  )
72
97
 
73
98
  @classmethod
74
99
  def from_dict(cls, name: str, data: dict | Munch):
75
100
  data.update({"name": name})
76
- return msgspec.convert(data, cls)
101
+
102
+ # Handle null params field by converting to empty dict
103
+ # This fixes the issue where YAML parses empty sections with comments as null
104
+ if data.get('params') is None:
105
+ data['params'] = {}
106
+
107
+ instance = msgspec.convert(data, cls)
108
+ # Manually call __post_init__ since msgspec.convert doesn't call it
109
+ instance.__post_init__()
110
+ return instance
77
111
 
78
112
  @classmethod
79
113
  def from_yaml(cls, name: str, path: str, fs: AbstractFileSystem):
80
- with fs.open(path) as f:
81
- data = yaml.full_load(f)
82
- return cls.from_dict(name=name, data=data)
114
+ """Load pipeline configuration from YAML file.
115
+
116
+ Args:
117
+ name: Pipeline name.
118
+ path: Path to the YAML file.
119
+ fs: Filesystem instance.
120
+
121
+ Returns:
122
+ Loaded pipeline configuration.
123
+
124
+ Raises:
125
+ ConfigLoadError: If loading the configuration fails.
126
+ ConfigPathError: If the path contains directory traversal attempts.
127
+ """
128
+ try:
129
+ # Validate the path to prevent directory traversal
130
+ validated_path = validate_file_path(path)
131
+ except ConfigPathError as e:
132
+ raise ConfigLoadError(f"Path validation failed: {e}", path=path, original_error=e)
133
+
134
+ try:
135
+ with fs.open(validated_path) as f:
136
+ data = yaml.safe_load(f)
137
+ return cls.from_dict(name=name, data=data)
138
+ except Exception as e:
139
+ raise ConfigLoadError(
140
+ f"Failed to load configuration from {validated_path}",
141
+ path=validated_path,
142
+ original_error=e
143
+ )
83
144
 
84
145
  def update(self, d: dict | Munch):
85
146
  for k, v in d.items():
86
- eval(f"self.{k}.update({v})")
147
+ # Safe attribute access instead of eval()
148
+ if hasattr(self, k) and hasattr(getattr(self, k), 'update'):
149
+ getattr(self, k).update(v)
87
150
  if k == "params":
88
151
  self.params.update(munchify(v))
89
152
  self.h_params = munchify(self.to_h_params(self.params))
@@ -133,11 +196,10 @@ class PipelineConfig(BaseConfig):
133
196
  return value(val)
134
197
  # For all other values
135
198
  return val
136
-
137
- # Step 1: Replace each value with a dictionary containing key and value
138
- result = {k: {k: d[k]} for k in d}
139
-
140
- # Step 2: Transform all values recursively
199
+
200
+ result = {k: {k: d[k]} for k in d} # Step 1: Wrap each parameter in its own dict
201
+
202
+ # Step 2: Transform each parameter value recursively
141
203
  return {k: transform_recursive(v, d) for k, v in result.items()}
142
204
 
143
205
  @classmethod
@@ -168,22 +230,72 @@ class PipelineConfig(BaseConfig):
168
230
  ```
169
231
  """
170
232
  if fs is None:
171
- fs = filesystem(
172
- base_dir, cached=False, dirfs=True, storage_options=storage_options
233
+ # Use cached filesystem for better performance
234
+ storage_options_hash = cls._hash_storage_options(storage_options)
235
+ fs = cls._get_cached_filesystem(base_dir, storage_options_hash)
236
+ if fs.exists("conf/pipelines") and name is not None:
237
+
238
+ pipeline = PipelineConfig.from_yaml(
239
+ name=name,
240
+ path=f"conf/pipelines/{name}.yml",
241
+ fs=fs,
173
242
  )
174
- if fs.exists("conf/pipelines"):
175
- if name is not None:
176
- pipeline = PipelineConfig.from_yaml(
177
- name=name,
178
- path=f"conf/pipelines/{name}.yml",
179
- fs=fs,
180
- )
181
- else:
182
- pipeline = PipelineConfig(name=name)
183
243
  else:
184
244
  pipeline = PipelineConfig(name=name)
185
245
 
186
246
  return pipeline
247
+
248
+
249
+ # Helper methods for centralized load/save logic
250
+ @classmethod
251
+ def _load_pipeline_config(cls, base_dir: str, name: str | None, fs: AbstractFileSystem) -> "PipelineConfig":
252
+ """Centralized pipeline configuration loading logic.
253
+
254
+ Args:
255
+ base_dir: Base directory for the pipeline.
256
+ name: Pipeline name.
257
+ fs: Filesystem instance.
258
+
259
+ Returns:
260
+ Loaded pipeline configuration.
261
+ """
262
+ if fs.exists("conf/pipelines") and name is not None:
263
+ pipeline = cls.from_yaml(
264
+ name=name,
265
+ path=f"conf/pipelines/{name}.yml",
266
+ fs=fs,
267
+ )
268
+ else:
269
+ pipeline = cls(name=name)
270
+ return pipeline
271
+
272
+
273
+ def _save_pipeline_config(self, fs: AbstractFileSystem) -> None:
274
+ """Centralized pipeline configuration saving logic.
275
+
276
+ Args:
277
+ fs: Filesystem instance.
278
+ """
279
+ h_params = getattr(self, "h_params")
280
+ self.to_yaml(path=f"conf/pipelines/{self.name}.yml", fs=fs)
281
+ setattr(self, "h_params", h_params)
282
+
283
+ def _validate_pipeline_name(self) -> None:
284
+ """Validate pipeline name parameter.
285
+
286
+ Raises:
287
+ ValueError: If pipeline name contains invalid characters.
288
+ """
289
+ if not isinstance(self.name, str):
290
+ raise ValueError(f"Pipeline name must be a string, got {type(self.name)}")
291
+
292
+ # Check for directory traversal attempts
293
+ if '..' in self.name or '/' in self.name or '\\' in self.name:
294
+ raise ValueError(f"Invalid pipeline name: {self.name}. Contains path traversal characters.")
295
+
296
+ # Check for empty string
297
+ if not self.name.strip():
298
+ raise ValueError("Pipeline name cannot be empty or whitespace only.")
187
299
 
188
300
  def save(
189
301
  self,
@@ -209,9 +321,9 @@ class PipelineConfig(BaseConfig):
209
321
  ```
210
322
  """
211
323
  if fs is None:
212
- fs = filesystem(
213
- base_dir, cached=True, dirfs=True, storage_options=storage_options
214
- )
324
+ # Use cached filesystem for better performance
325
+ storage_options_hash = self._hash_storage_options(storage_options)
326
+ fs = self._get_cached_filesystem(base_dir, storage_options_hash)
215
327
 
216
328
  fs.makedirs("conf/pipelines", exist_ok=True)
217
329
  if name is not None:
@@ -219,6 +331,10 @@ class PipelineConfig(BaseConfig):
219
331
  if self.name is None:
220
332
  raise ValueError("Pipeline name is not set. Please provide a name.")
221
333
 
334
+ # Validate pipeline name to prevent directory traversal
335
+ if self.name and ('..' in self.name or '/' in self.name or '\\' in self.name):
336
+ raise ValueError(f"Invalid pipeline name: {self.name}. Contains path traversal characters.")
337
+
222
338
  h_params = getattr(self, "h_params")
223
339
 
224
340
  self.to_yaml(path=f"conf/pipelines/{self.name}.yml", fs=fs)
@@ -1,4 +1,5 @@
1
1
  import msgspec
2
+ import os
2
3
  from munch import munchify
3
4
 
4
5
  from ... import settings
@@ -8,6 +8,8 @@ from ..base import BaseConfig
8
8
  from .adapter import AdapterConfig as PipelineAdapterConfig
9
9
  from .run import ExecutorConfig, RunConfig, WithAdapterConfig
10
10
  from ..project.adapter import AdapterConfig as ProjectAdapterConfig
11
+ from .builder_executor import ExecutorBuilder
12
+ from .builder_adapter import AdapterBuilder
11
13
 
12
14
 
13
15
  class RunConfigBuilder:
@@ -40,6 +42,10 @@ class RunConfigBuilder:
40
42
  # Initialize with empty config
41
43
  self._config = RunConfig()
42
44
 
45
+ # Initialize sub-builders
46
+ self._executor_builder = ExecutorBuilder()
47
+ self._adapter_builder = AdapterBuilder()
48
+
43
49
  # Load defaults from pipeline and project configs
44
50
  self._load_defaults()
45
51
 
@@ -148,16 +154,9 @@ class RunConfigBuilder:
148
154
  Returns:
149
155
  Self for method chaining
150
156
  """
151
- if not self._config.executor:
152
- self._config.executor = ExecutorConfig()
153
-
154
- self._config.executor.type = executor_type
155
-
156
- # Apply additional executor options
157
- for key, value in kwargs.items():
158
- if hasattr(self._config.executor, key):
159
- setattr(self._config.executor, key, value)
160
-
157
+ self._executor_builder.with_type(executor_type)
158
+ if kwargs:
159
+ self._executor_builder.with_config(kwargs)
161
160
  return self
162
161
 
163
162
  def with_adapter(self, adapter_name: str, **kwargs) -> "RunConfigBuilder":
@@ -170,17 +169,10 @@ class RunConfigBuilder:
170
169
  Returns:
171
170
  Self for method chaining
172
171
  """
173
- if not self._config.with_adapter:
174
- self._config.with_adapter = WithAdapterConfig()
175
-
176
- # Enable the adapter
177
- if hasattr(self._config.with_adapter, adapter_name):
178
- setattr(self._config.with_adapter, adapter_name, True)
179
-
180
- # Store adapter configuration for merging
181
- if not hasattr(self, '_adapter_configs'):
182
- self._adapter_configs = {}
183
- self._adapter_configs[adapter_name] = kwargs
172
+ # Enable the adapter using the adapter builder
173
+ enable_method = getattr(self._adapter_builder, f"enable_{adapter_name}", None)
174
+ if enable_method:
175
+ enable_method(True, **kwargs)
184
176
 
185
177
  return self
186
178
 
@@ -315,16 +307,23 @@ class RunConfigBuilder:
315
307
  # Create a deep copy to avoid modifying the internal state
316
308
  final_config = copy.deepcopy(self._config)
317
309
 
310
+ # Build executor configuration
311
+ final_config.executor = self._executor_builder.build()
312
+
313
+ # Build adapter configuration
314
+ final_config.with_adapter = self._adapter_builder.build()
315
+
318
316
  # Merge adapter configurations
319
- if hasattr(self, '_adapter_configs') and self._adapter_configs:
320
- self._merge_adapter_configs(final_config)
317
+ adapter_configs = self._adapter_builder.get_adapter_configs()
318
+ if adapter_configs:
319
+ self._merge_adapter_configs(final_config, adapter_configs)
321
320
 
322
321
  # Validate configuration
323
322
  self._validate_config(final_config)
324
323
 
325
324
  return final_config
326
325
 
327
- def _merge_adapter_configs(self, config: RunConfig):
326
+ def _merge_adapter_configs(self, config: RunConfig, adapter_configs: dict[str, dict[str, Any]]):
328
327
  """Merge adapter configurations from builder with project/pipeline configs."""
329
328
  if not config.pipeline_adapter_cfg:
330
329
  config.pipeline_adapter_cfg = {}
@@ -333,7 +332,7 @@ class RunConfigBuilder:
333
332
  config.project_adapter_cfg = {}
334
333
 
335
334
  # Merge project adapter defaults
336
- for adapter_name, adapter_config in self._adapter_configs.items():
335
+ for adapter_name, adapter_config in adapter_configs.items():
337
336
  if adapter_name in ['hamilton_tracker', 'mlflow', 'opentelemetry']:
338
337
  # Merge with project config
339
338
  if hasattr(self._project_adapter_cfg, adapter_name):
@@ -0,0 +1,142 @@
1
+ """
2
+ Adapter builder for RunConfig.
3
+ """
4
+
5
+ from typing import Any, Optional
6
+ from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
7
+
8
+ from ..base import BaseConfig
9
+ from .run import WithAdapterConfig
10
+
11
+
12
+ class AdapterBuilder:
13
+ """Builder for creating WithAdapterConfig objects."""
14
+
15
+ def __init__(self, adapter_config: Optional[WithAdapterConfig] = None):
16
+ """Initialize the AdapterBuilder.
17
+
18
+ Args:
19
+ adapter_config: Initial adapter configuration to build upon.
20
+ """
21
+ self._config = adapter_config or WithAdapterConfig()
22
+ self._adapter_configs = {}
23
+
24
+ def enable_hamilton_tracker(self, enabled: bool = True, **kwargs) -> "AdapterBuilder":
25
+ """Enable or disable Hamilton tracker adapter.
26
+
27
+ Args:
28
+ enabled: Whether to enable the adapter
29
+ **kwargs: Additional configuration options
30
+
31
+ Returns:
32
+ Self for method chaining
33
+ """
34
+ self._config.hamilton_tracker = enabled
35
+ if enabled and kwargs:
36
+ self._adapter_configs['hamilton_tracker'] = kwargs
37
+ return self
38
+
39
+ def enable_mlflow(self, enabled: bool = True, **kwargs) -> "AdapterBuilder":
40
+ """Enable or disable MLflow adapter.
41
+
42
+ Args:
43
+ enabled: Whether to enable the adapter
44
+ **kwargs: Additional configuration options
45
+
46
+ Returns:
47
+ Self for method chaining
48
+ """
49
+ self._config.mlflow = enabled
50
+ if enabled and kwargs:
51
+ self._adapter_configs['mlflow'] = kwargs
52
+ return self
53
+
54
+ def enable_ray(self, enabled: bool = True, **kwargs) -> "AdapterBuilder":
55
+ """Enable or disable Ray adapter.
56
+
57
+ Args:
58
+ enabled: Whether to enable the adapter
59
+ **kwargs: Additional configuration options
60
+
61
+ Returns:
62
+ Self for method chaining
63
+ """
64
+ self._config.ray = enabled
65
+ if enabled and kwargs:
66
+ self._adapter_configs['ray'] = kwargs
67
+ return self
68
+
69
+ def enable_opentelemetry(self, enabled: bool = True, **kwargs) -> "AdapterBuilder":
70
+ """Enable or disable OpenTelemetry adapter.
71
+
72
+ Args:
73
+ enabled: Whether to enable the adapter
74
+ **kwargs: Additional configuration options
75
+
76
+ Returns:
77
+ Self for method chaining
78
+ """
79
+ self._config.opentelemetry = enabled
80
+ if enabled and kwargs:
81
+ self._adapter_configs['opentelemetry'] = kwargs
82
+ return self
83
+
84
+ def enable_progressbar(self, enabled: bool = True, **kwargs) -> "AdapterBuilder":
85
+ """Enable or disable progress bar adapter.
86
+
87
+ Args:
88
+ enabled: Whether to enable the adapter
89
+ **kwargs: Additional configuration options
90
+
91
+ Returns:
92
+ Self for method chaining
93
+ """
94
+ self._config.progressbar = enabled
95
+ if enabled and kwargs:
96
+ self._adapter_configs['progressbar'] = kwargs
97
+ return self
98
+
99
+ def enable_future(self, enabled: bool = True, **kwargs) -> "AdapterBuilder":
100
+ """Enable or disable future adapter.
101
+
102
+ Args:
103
+ enabled: Whether to enable the adapter
104
+ **kwargs: Additional configuration options
105
+
106
+ Returns:
107
+ Self for method chaining
108
+ """
109
+ self._config.future = enabled
110
+ if enabled and kwargs:
111
+ self._adapter_configs['future'] = kwargs
112
+ return self
113
+
114
+ def with_adapter_config(self, adapter_name: str, config: dict[str, Any]) -> "AdapterBuilder":
115
+ """Set configuration for a specific adapter.
116
+
117
+ Args:
118
+ adapter_name: Name of the adapter
119
+ config: Configuration dictionary
120
+
121
+ Returns:
122
+ Self for method chaining
123
+ """
124
+ if hasattr(self._config, adapter_name):
125
+ self._adapter_configs[adapter_name] = config
126
+ return self
127
+
128
+ def build(self) -> WithAdapterConfig:
129
+ """Build the final WithAdapterConfig object.
130
+
131
+ Returns:
132
+ Fully configured WithAdapterConfig object
133
+ """
134
+ return self._config
135
+
136
+ def get_adapter_configs(self) -> dict[str, dict[str, Any]]:
137
+ """Get the collected adapter configurations.
138
+
139
+ Returns:
140
+ Dictionary of adapter configurations
141
+ """
142
+ return self._adapter_configs
@@ -0,0 +1,101 @@
1
+ """
2
+ Executor builder for RunConfig.
3
+ """
4
+
5
+ from typing import Any, Optional, Union
6
+ from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
7
+
8
+ from ... import settings
9
+ from ..base import BaseConfig
10
+ from .run import ExecutorConfig
11
+
12
+
13
+ class ExecutorBuilder:
14
+ """Builder for creating ExecutorConfig objects."""
15
+
16
+ def __init__(self, executor_config: Optional[ExecutorConfig] = None):
17
+ """Initialize the ExecutorBuilder.
18
+
19
+ Args:
20
+ executor_config: Initial executor configuration to build upon.
21
+ """
22
+ self._config = executor_config or ExecutorConfig()
23
+
24
+ def with_type(self, executor_type: str) -> "ExecutorBuilder":
25
+ """Set the executor type.
26
+
27
+ Args:
28
+ executor_type: Type of executor ('synchronous', 'threadpool', 'processpool', 'ray', 'dask')
29
+
30
+ Returns:
31
+ Self for method chaining
32
+ """
33
+ self._config.type = executor_type
34
+ return self
35
+
36
+ def with_max_workers(self, max_workers: int) -> "ExecutorBuilder":
37
+ """Set the maximum number of workers.
38
+
39
+ Args:
40
+ max_workers: Maximum number of worker threads/processes
41
+
42
+ Returns:
43
+ Self for method chaining
44
+ """
45
+ self._config.max_workers = max_workers
46
+ return self
47
+
48
+ def with_num_cpus(self, num_cpus: int) -> "ExecutorBuilder":
49
+ """Set the number of CPUs to use.
50
+
51
+ Args:
52
+ num_cpus: Number of CPUs to allocate
53
+
54
+ Returns:
55
+ Self for method chaining
56
+ """
57
+ self._config.num_cpus = num_cpus
58
+ return self
59
+
60
+ def with_config(self, config: dict[str, Any]) -> "ExecutorBuilder":
61
+ """Apply additional configuration options.
62
+
63
+ Args:
64
+ config: Dictionary of additional configuration options
65
+
66
+ Returns:
67
+ Self for method chaining
68
+ """
69
+ for key, value in config.items():
70
+ if hasattr(self._config, key):
71
+ setattr(self._config, key, value)
72
+ return self
73
+
74
+ def build(self) -> ExecutorConfig:
75
+ """Build the final ExecutorConfig object.
76
+
77
+ Returns:
78
+ Fully configured ExecutorConfig object
79
+
80
+ Raises:
81
+ ValueError: If configuration is invalid
82
+ """
83
+ self._validate_config()
84
+ return self._config
85
+
86
+ def _validate_config(self) -> None:
87
+ """Validate the executor configuration.
88
+
89
+ Raises:
90
+ ValueError: If configuration is invalid
91
+ """
92
+ if self._config.type:
93
+ valid_executors = ['synchronous', 'threadpool', 'processpool', 'ray', 'dask']
94
+ if self._config.type not in valid_executors:
95
+ raise ValueError(f"Invalid executor type: {self._config.type}")
96
+
97
+ if self._config.max_workers is not None and self._config.max_workers < 1:
98
+ raise ValueError("max_workers must be at least 1")
99
+
100
+ if self._config.num_cpus is not None and self._config.num_cpus < 1:
101
+ raise ValueError("num_cpus must be at least 1")