FlowerPower 0.9.13.1__py3-none-any.whl → 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. flowerpower/__init__.py +17 -2
  2. flowerpower/cfg/__init__.py +201 -149
  3. flowerpower/cfg/base.py +122 -24
  4. flowerpower/cfg/pipeline/__init__.py +254 -0
  5. flowerpower/cfg/pipeline/adapter.py +66 -0
  6. flowerpower/cfg/pipeline/run.py +40 -11
  7. flowerpower/cfg/pipeline/schedule.py +69 -79
  8. flowerpower/cfg/project/__init__.py +149 -0
  9. flowerpower/cfg/project/adapter.py +57 -0
  10. flowerpower/cfg/project/job_queue.py +165 -0
  11. flowerpower/cli/__init__.py +92 -37
  12. flowerpower/cli/job_queue.py +878 -0
  13. flowerpower/cli/mqtt.py +32 -1
  14. flowerpower/cli/pipeline.py +559 -406
  15. flowerpower/cli/utils.py +29 -18
  16. flowerpower/flowerpower.py +12 -8
  17. flowerpower/fs/__init__.py +20 -2
  18. flowerpower/fs/base.py +350 -26
  19. flowerpower/fs/ext.py +797 -216
  20. flowerpower/fs/storage_options.py +1097 -55
  21. flowerpower/io/base.py +13 -18
  22. flowerpower/io/loader/__init__.py +28 -0
  23. flowerpower/io/loader/deltatable.py +7 -10
  24. flowerpower/io/metadata.py +1 -0
  25. flowerpower/io/saver/__init__.py +28 -0
  26. flowerpower/io/saver/deltatable.py +4 -3
  27. flowerpower/job_queue/__init__.py +252 -0
  28. flowerpower/job_queue/apscheduler/__init__.py +11 -0
  29. flowerpower/job_queue/apscheduler/_setup/datastore.py +110 -0
  30. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +93 -0
  31. flowerpower/job_queue/apscheduler/manager.py +1063 -0
  32. flowerpower/job_queue/apscheduler/setup.py +524 -0
  33. flowerpower/job_queue/apscheduler/trigger.py +169 -0
  34. flowerpower/job_queue/apscheduler/utils.py +309 -0
  35. flowerpower/job_queue/base.py +382 -0
  36. flowerpower/job_queue/rq/__init__.py +10 -0
  37. flowerpower/job_queue/rq/_trigger.py +37 -0
  38. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +226 -0
  39. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +231 -0
  40. flowerpower/job_queue/rq/manager.py +1449 -0
  41. flowerpower/job_queue/rq/setup.py +150 -0
  42. flowerpower/job_queue/rq/utils.py +69 -0
  43. flowerpower/pipeline/__init__.py +5 -0
  44. flowerpower/pipeline/base.py +118 -0
  45. flowerpower/pipeline/io.py +407 -0
  46. flowerpower/pipeline/job_queue.py +505 -0
  47. flowerpower/pipeline/manager.py +1586 -0
  48. flowerpower/pipeline/registry.py +560 -0
  49. flowerpower/pipeline/runner.py +560 -0
  50. flowerpower/pipeline/visualizer.py +142 -0
  51. flowerpower/plugins/mqtt/__init__.py +12 -0
  52. flowerpower/plugins/mqtt/cfg.py +16 -0
  53. flowerpower/plugins/mqtt/manager.py +789 -0
  54. flowerpower/settings.py +110 -0
  55. flowerpower/utils/logging.py +21 -0
  56. flowerpower/utils/misc.py +57 -9
  57. flowerpower/utils/sql.py +122 -24
  58. flowerpower/utils/templates.py +2 -142
  59. flowerpower-1.0.0b1.dist-info/METADATA +324 -0
  60. flowerpower-1.0.0b1.dist-info/RECORD +94 -0
  61. flowerpower/_web/__init__.py +0 -61
  62. flowerpower/_web/routes/config.py +0 -103
  63. flowerpower/_web/routes/pipelines.py +0 -173
  64. flowerpower/_web/routes/scheduler.py +0 -136
  65. flowerpower/cfg/pipeline/tracker.py +0 -14
  66. flowerpower/cfg/project/open_telemetry.py +0 -8
  67. flowerpower/cfg/project/tracker.py +0 -11
  68. flowerpower/cfg/project/worker.py +0 -19
  69. flowerpower/cli/scheduler.py +0 -309
  70. flowerpower/cli/web.py +0 -44
  71. flowerpower/event_handler.py +0 -23
  72. flowerpower/mqtt.py +0 -609
  73. flowerpower/pipeline.py +0 -2499
  74. flowerpower/scheduler.py +0 -680
  75. flowerpower/tui.py +0 -79
  76. flowerpower/utils/datastore.py +0 -186
  77. flowerpower/utils/eventbroker.py +0 -127
  78. flowerpower/utils/executor.py +0 -58
  79. flowerpower/utils/trigger.py +0 -140
  80. flowerpower-0.9.13.1.dist-info/METADATA +0 -586
  81. flowerpower-0.9.13.1.dist-info/RECORD +0 -76
  82. /flowerpower/{cfg/pipeline/params.py → cli/worker.py} +0 -0
  83. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/WHEEL +0 -0
  84. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/entry_points.txt +0 -0
  85. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/top_level.txt +0 -0
flowerpower/__init__.py CHANGED
@@ -1,5 +1,20 @@
1
- from .flowerpower import init as init_flowerpower
2
-
3
1
  import importlib.metadata
4
2
 
3
+ from .flowerpower import init as init_project # noqa: E402
4
+
5
+ from .pipeline import PipelineManager
6
+ from .job_queue import JobQueueManager
7
+ from .cfg import Config, ProjectConfig, PipelineConfig
8
+
5
9
  __version__ = importlib.metadata.version("FlowerPower")
10
+
11
+ __all__ = [
12
+ "__version__",
13
+ "init_project",
14
+ "PipelineManager",
15
+ "JobQueueManager",
16
+ "Config",
17
+ "ProjectConfig",
18
+ "PipelineConfig",
19
+ "PipelineConfig",
20
+ ]
@@ -1,164 +1,96 @@
1
1
  from pathlib import Path
2
2
 
3
- import yaml
4
- from fsspec import AbstractFileSystem
5
- from hamilton.function_modifiers import source, value
6
- from munch import Munch, munchify
7
- from pydantic import Field
3
+ import msgspec
4
+ from munch import Munch
8
5
 
9
- from ..fs import get_filesystem
6
+ from ..fs import AbstractFileSystem, get_filesystem
10
7
  from .base import BaseConfig
11
- from .pipeline.run import PipelineRunConfig
12
- from .pipeline.schedule import PipelineScheduleConfig
13
- from .pipeline.tracker import PipelineTrackerConfig
14
- from .project.open_telemetry import ProjectOpenTelemetryConfig
15
- from .project.tracker import ProjectTrackerConfig
16
- from .project.worker import ProjectWorkerConfig
17
-
18
-
19
- class PipelineConfig(BaseConfig):
20
- name: str | None = None
21
- run: PipelineRunConfig = Field(default_factory=PipelineRunConfig)
22
- schedule: PipelineScheduleConfig = Field(default_factory=PipelineScheduleConfig)
23
- params: dict | Munch = {}
24
- tracker: PipelineTrackerConfig = Field(default_factory=PipelineTrackerConfig)
25
- h_params: dict | Munch = {}
26
-
27
- def model_post_init(self, __context):
28
- if isinstance(self.params, dict):
29
- self.h_params = munchify(self.to_h_params(self.params))
30
- self.params = munchify(self.params)
31
-
32
- def to_yaml(self, path: str, fs: AbstractFileSystem):
33
- try:
34
- fs.makedirs(fs._parent(path), exist_ok=True)
35
- with fs.open(path, "w") as f:
36
- d = self.to_dict()
37
- d.pop("name")
38
- d.pop("h_params")
39
- yaml.dump(d, f, default_flow_style=False)
40
- except NotImplementedError:
41
- raise NotImplementedError(
42
- "The filesystem "
43
- f"{self.fs.fs.protocol[0] if isinstance(self.fs.fs.protocol, tuple) else self.fs.fs.protocol} "
44
- "does not support writing files."
45
- )
8
+ from .pipeline import PipelineConfig, init_pipeline_config
9
+ from .project import ProjectConfig, init_project_config
46
10
 
47
- @classmethod
48
- def from_dict(cls, name: str, d: dict | Munch):
49
- d.update({"name": name})
50
- return cls(**d)
51
11
 
52
- @classmethod
53
- def from_yaml(cls, name: str, path: str, fs: AbstractFileSystem):
54
- with fs.open(path) as f:
55
- return cls.from_dict(name, yaml.full_load(f))
56
-
57
- def update(self, d: dict | Munch):
58
- for k, v in d.items():
59
- eval(f"self.{k}.update({v})")
60
- if k == "params":
61
- self.params.update(munchify(v))
62
- self.h_params = munchify(self.to_h_params(self.params))
63
- # self.params = munchify(self.params)
64
- if "params" in d:
65
- self.h_params = munchify(self.to_h_params(self.params))
66
- self.params = munchify(self.params)
67
-
68
- @staticmethod
69
- # def to_h_params(d: dict) -> dict:
70
- # """Coverts a dictionary of function arguments to Hamilton function parameters"""
71
- #
72
- # def transform_recursive(val, original_dict):
73
- # # If it's a dictionary, recursively transform its values
74
- # if isinstance(val, dict):
75
- # return {
76
- # k: transform_recursive(v, original_dict) for k, v in val.items()
77
- # }
78
- # # If it's a string and matches a key in the original dictionary
79
- # elif isinstance(val, str) and val in original_dict:
80
- # return source(val)
81
- # # For all other values
82
- # else:
83
- # return value(val)
84
- #
85
- # # Step 1: Replace each value with a dictionary containing key and value
86
- # result = {k: {k: d[k]} for k in d}
87
- #
88
- # # Step 2 & 3: Transform all values recursively
89
- # return {k: transform_recursive(v, d) for k, v in result.items()}
90
- def to_h_params(d: dict) -> dict:
91
- """Converts a dictionary of function arguments to Hamilton function parameters"""
92
-
93
- def transform_recursive(val, original_dict, depth=1):
94
- if isinstance(val, dict):
95
- # If we're at depth 3, wrap the entire dictionary in value()
96
- if depth == 3:
97
- return value(val)
98
- # Otherwise, continue recursing
99
- return {
100
- k: transform_recursive(v, original_dict, depth + 1)
101
- for k, v in val.items()
102
- }
103
- # If it's a string and matches a key in the original dictionary
104
- elif isinstance(val, str) and val in original_dict:
105
- return source(val)
106
- # For non-dictionary values at depth 3
107
- elif depth == 3:
108
- return value(val)
109
- # For all other values
110
- return val
111
-
112
- # Step 1: Replace each value with a dictionary containing key and value
113
- result = {k: {k: d[k]} for k in d}
114
-
115
- # Step 2: Transform all values recursively
116
- return {k: transform_recursive(v, d) for k, v in result.items()}
117
-
118
-
119
- class ProjectConfig(BaseConfig):
120
- name: str | None = None
121
- worker: ProjectWorkerConfig = Field(default_factory=ProjectWorkerConfig)
122
- tracker: ProjectTrackerConfig = Field(default_factory=ProjectTrackerConfig)
123
- open_telemetry: ProjectOpenTelemetryConfig = Field(
124
- default_factory=ProjectOpenTelemetryConfig
125
- )
12
+ class Config(BaseConfig):
13
+ """Main configuration class for FlowerPower, combining project and pipeline settings.
14
+
15
+ This class serves as the central configuration manager, handling both project-wide
16
+ and pipeline-specific settings. It provides functionality for loading and saving
17
+ configurations using various filesystem abstractions.
18
+
19
+ Attributes:
20
+ pipeline (PipelineConfig): Configuration for the pipeline.
21
+ project (ProjectConfig): Configuration for the project.
22
+ fs (AbstractFileSystem | None): Filesystem abstraction for I/O operations.
23
+ base_dir (str | Path | None): Base directory for the configuration.
24
+ storage_options (dict | Munch): Options for filesystem operations.
25
+
26
+ Example:
27
+ ```python
28
+ # Load configuration
29
+ config = Config.load(
30
+ base_dir="my_project",
31
+ name="project1",
32
+ pipeline_name="data-pipeline"
33
+ )
126
34
 
35
+ # Save configuration
36
+ config.save(project=True, pipeline=True)
37
+ ```
38
+ """
127
39
 
128
- class Config(BaseConfig):
129
- pipeline: PipelineConfig = Field(default_factory=PipelineConfig)
130
- project: ProjectConfig = Field(default_factory=ProjectConfig)
40
+ pipeline: PipelineConfig = msgspec.field(default_factory=PipelineConfig)
41
+ project: ProjectConfig = msgspec.field(default_factory=ProjectConfig)
131
42
  fs: AbstractFileSystem | None = None
132
43
  base_dir: str | Path | None = None
133
- storage_options: dict | Munch = Field(default_factory=Munch)
44
+ storage_options: dict | Munch = msgspec.field(default_factory=Munch)
134
45
 
135
46
  @classmethod
136
47
  def load(
137
48
  cls,
138
- base_dir: str = "",
49
+ base_dir: str = ".",
139
50
  name: str | None = None,
140
51
  pipeline_name: str | None = None,
52
+ job_queue_type: str | None = None,
141
53
  fs: AbstractFileSystem | None = None,
142
54
  storage_options: dict | Munch = Munch(),
143
55
  ):
56
+ """Load both project and pipeline configurations.
57
+
58
+ Args:
59
+ base_dir (str, optional): Base directory for configurations. Defaults to ".".
60
+ name (str | None, optional): Project name. Defaults to None.
61
+ pipeline_name (str | None, optional): Pipeline name. Defaults to None.
62
+ job_queue_type (str | None, optional): Type of job queue to use. Defaults to None.
63
+ fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
64
+ storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
65
+
66
+ Returns:
67
+ Config: Combined configuration instance.
68
+
69
+ Example:
70
+ ```python
71
+ config = Config.load(
72
+ base_dir="my_project",
73
+ name="test_project",
74
+ pipeline_name="etl",
75
+ job_queue_type="rq"
76
+ )
77
+ ```
78
+ """
144
79
  if fs is None:
145
80
  fs = get_filesystem(base_dir, cached=True, dirfs=True, **storage_options)
146
- if fs.exists("conf/project.yml"):
147
- project = ProjectConfig.from_yaml(path="conf/project.yml", fs=fs)
148
- else:
149
- project = ProjectConfig(name=name)
150
-
151
- if pipeline_name is not None:
152
- if fs.exists(f"conf/pipelines/{pipeline_name}.yml"):
153
- pipeline = PipelineConfig.from_yaml(
154
- name=pipeline_name,
155
- path=f"conf/pipelines/{pipeline_name}.yml",
156
- fs=fs,
157
- )
158
- else:
159
- pipeline = PipelineConfig(name=pipeline_name)
160
- else:
161
- pipeline = PipelineConfig(name=pipeline_name)
81
+ project = ProjectConfig.load(
82
+ base_dir=base_dir,
83
+ name=name,
84
+ job_queue_type=job_queue_type,
85
+ fs=fs,
86
+ storage_options=storage_options,
87
+ )
88
+ pipeline = PipelineConfig.load(
89
+ base_dir=base_dir,
90
+ name=pipeline_name,
91
+ fs=fs,
92
+ storage_options=storage_options,
93
+ )
162
94
 
163
95
  return cls(
164
96
  base_dir=base_dir,
@@ -168,18 +100,44 @@ class Config(BaseConfig):
168
100
  storage_options=storage_options,
169
101
  )
170
102
 
171
- def save(self):
103
+ def save(
104
+ self,
105
+ project: bool = False,
106
+ pipeline: bool = True,
107
+ fs: AbstractFileSystem | None = None,
108
+ storage_options: dict | Munch = Munch(),
109
+ ):
110
+ """Save project and/or pipeline configurations.
111
+
112
+ Args:
113
+ project (bool, optional): Whether to save project config. Defaults to False.
114
+ pipeline (bool, optional): Whether to save pipeline config. Defaults to True.
115
+ fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
116
+ storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
117
+
118
+ Example:
119
+ ```python
120
+ config.save(project=True, pipeline=True)
121
+ ```
122
+ """
123
+ if fs is None and self.fs is None:
124
+ self.fs = get_filesystem(
125
+ self.base_dir, cached=True, dirfs=True, **storage_options
126
+ )
127
+
172
128
  if not self.fs.exists("conf"):
173
129
  self.fs.makedirs("conf")
174
130
 
175
- if self.pipeline.name is not None:
176
- # h_params = self.pipeline.params.pop("h_params") if "h_params" in self.pipeline.params else None
131
+ if pipeline:
132
+ self.fs.makedirs("conf/pipelines", exist_ok=True)
133
+ h_params = self.pipeline.pop("h_params") if self.pipeline.h_params else None
177
134
  self.pipeline.to_yaml(
178
- f"conf/pipelines/{self.pipeline.name.replace('.', '/')}.yml", self.fs
135
+ path=f"conf/pipelines/{self.pipeline.name}.yml", fs=self.fs
179
136
  )
180
- # if h_params is not None:
181
- # self.pipeline.params["h_params"] = h_params
182
- self.project.to_yaml("conf/project.yml", self.fs)
137
+ if h_params:
138
+ self.pipeline.h_params = h_params
139
+ if project:
140
+ self.project.to_yaml("conf/project.yml", self.fs)
183
141
 
184
142
 
185
143
  def load(
@@ -189,6 +147,25 @@ def load(
189
147
  storage_options: dict | Munch = Munch(),
190
148
  fs: AbstractFileSystem | None = None,
191
149
  ):
150
+ """Helper function to load configuration.
151
+
152
+ This is a convenience wrapper around Config.load().
153
+
154
+ Args:
155
+ base_dir (str): Base directory for configurations.
156
+ name (str | None, optional): Project name. Defaults to None.
157
+ pipeline_name (str | None, optional): Pipeline name. Defaults to None.
158
+ storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
159
+ fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
160
+
161
+ Returns:
162
+ Config: Combined configuration instance.
163
+
164
+ Example:
165
+ ```python
166
+ config = load(base_dir="my_project", name="test", pipeline_name="etl")
167
+ ```
168
+ """
192
169
  return Config.load(
193
170
  name=name,
194
171
  pipeline_name=pipeline_name,
@@ -198,5 +175,80 @@ def load(
198
175
  )
199
176
 
200
177
 
201
- def save(config: Config):
202
- config.save()
178
+ def save(
179
+ config: Config,
180
+ project: bool = False,
181
+ pipeline: bool = True,
182
+ fs: AbstractFileSystem | None = None,
183
+ storage_options: dict | Munch = Munch(),
184
+ ):
185
+ """Helper function to save configuration.
186
+
187
+ This is a convenience wrapper around Config.save().
188
+
189
+ Args:
190
+ config (Config): Configuration instance to save.
191
+ project (bool, optional): Whether to save project config. Defaults to False.
192
+ pipeline (bool, optional): Whether to save pipeline config. Defaults to True.
193
+ fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
194
+ storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
195
+
196
+ Example:
197
+ ```python
198
+ config = load(base_dir="my_project")
199
+ save(config, project=True, pipeline=True)
200
+ ```
201
+ """
202
+ config.save(
203
+ project=project, pipeline=pipeline, fs=fs, storage_options=storage_options
204
+ )
205
+
206
+
207
+ def init_config(
208
+ base_dir: str = ".",
209
+ name: str | None = None,
210
+ pipeline_name: str | None = None,
211
+ job_queue_type: str | None = None,
212
+ fs: AbstractFileSystem | None = None,
213
+ storage_options: dict | Munch = Munch(),
214
+ ):
215
+ """Initialize a new configuration with both project and pipeline settings.
216
+
217
+ This function creates and initializes both project and pipeline configurations,
218
+ combining them into a single Config instance.
219
+
220
+ Args:
221
+ base_dir (str, optional): Base directory for configurations. Defaults to ".".
222
+ name (str | None, optional): Project name. Defaults to None.
223
+ pipeline_name (str | None, optional): Pipeline name. Defaults to None.
224
+ job_queue_type (str | None, optional): Type of job queue to use. Defaults to None.
225
+ fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
226
+ storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
227
+
228
+ Returns:
229
+ Config: The initialized configuration instance.
230
+
231
+ Example:
232
+ ```python
233
+ config = init_config(
234
+ base_dir="my_project",
235
+ name="test_project",
236
+ pipeline_name="data-pipeline",
237
+ job_queue_type="rq"
238
+ )
239
+ ```
240
+ """
241
+ pipeline_cfg = init_pipeline_config(
242
+ base_dir=base_dir,
243
+ name=pipeline_name,
244
+ fs=fs,
245
+ storage_options=storage_options,
246
+ )
247
+ project_cfg = init_project_config(
248
+ base_dir=base_dir,
249
+ name=name,
250
+ job_queue_type=job_queue_type,
251
+ fs=fs,
252
+ storage_options=storage_options,
253
+ )
254
+ return Config(pipeline=pipeline_cfg, project=project_cfg, fs=fs, base_dir=base_dir)
flowerpower/cfg/base.py CHANGED
@@ -1,39 +1,137 @@
1
- from typing import Any
1
+ import copy
2
+ from typing import Any, Self
2
3
 
3
- import yaml
4
- from fsspec import AbstractFileSystem
5
- from munch import Munch, unmunchify
6
- from pydantic import BaseModel, ConfigDict
4
+ import msgspec
5
+ from fsspec import AbstractFileSystem, filesystem
7
6
 
8
7
 
9
- class BaseConfig(BaseModel):
10
- model_config = ConfigDict(arbitrary_types_allowed=True)
11
-
8
+ class BaseConfig(msgspec.Struct, kw_only=True):
12
9
  def to_dict(self) -> dict[str, Any]:
13
- return unmunchify(self.model_dump())
10
+ return msgspec.to_builtins(self)
14
11
 
15
12
  def to_yaml(self, path: str, fs: AbstractFileSystem | None = None) -> None:
13
+ """
14
+ Converts the instance to a YAML file.
15
+
16
+ Args:
17
+ path: The path to the YAML file.
18
+ fs: An optional filesystem instance to use for file operations.
19
+
20
+ Raises:
21
+ NotImplementedError: If the filesystem does not support writing files.
22
+ """
23
+ if fs is None:
24
+ fs = filesystem("file")
16
25
  try:
17
- with fs.open(path, "w") as f:
18
- yaml.dump(self.to_dict(), f, default_flow_style=False)
26
+ with fs.open(path, "wb") as f:
27
+ f.write(msgspec.yaml.encode(self, order="deterministic"))
28
+ # yaml.dump(self.to_dict(), f, default_flow_style=False)
19
29
  except NotImplementedError:
20
- raise NotImplementedError(
21
- "The filesystem "
22
- f"{self.fs.fs.protocol[0] if isinstance(self.fs.fs.protocol, tuple) else self.fs.fs.protocol} "
23
- "does not support writing files."
24
- )
30
+ raise NotImplementedError("The filesystem does not support writing files.")
25
31
 
26
32
  @classmethod
27
- def from_dict(cls, d: dict[str, Any] | Munch) -> "BaseConfig":
28
- return cls(**d)
33
+ def from_dict(cls, data: dict[str, Any]) -> "BaseConfig":
34
+ """
35
+ Converts a dictionary to an instance of the class.
36
+ Args:
37
+ data: The dictionary to convert.
38
+
39
+ Returns:
40
+ An instance of the class with the values from the dictionary.
41
+ """
42
+ return msgspec.convert(data, cls)
29
43
 
30
44
  @classmethod
31
- def from_yaml(cls, path: str, fs: AbstractFileSystem):
32
- # if fs is None:
33
- # fs = get_filesystem(".", cached=True)
45
+ def from_yaml(cls, path: str, fs: AbstractFileSystem | None = None) -> "BaseConfig":
46
+ """
47
+ Loads a YAML file and converts it to an instance of the class.
48
+
49
+ Args:
50
+ path: The path to the YAML file.
51
+ fs: An optional filesystem instance to use for file operations.
52
+
53
+ Returns:
54
+ An instance of the class with the values from the YAML file.
55
+
56
+ """
57
+ if fs is None:
58
+ fs = filesystem("file")
34
59
  with fs.open(path) as f:
35
- return cls.from_dict(yaml.full_load(f))
60
+ # data = yaml.full_load(f)
61
+ # return cls.from_dict(data)
62
+ return msgspec.yaml.decode(f.read(), type=cls, strict=False)
63
+
64
+ def update(self, d: dict[str, Any]) -> None:
65
+ for k, v in d.items():
66
+ if hasattr(self, k):
67
+ current_value = getattr(self, k)
68
+ if isinstance(current_value, dict) and isinstance(v, dict):
69
+ current_value.update(v)
70
+ else:
71
+ setattr(self, k, v)
72
+ else:
73
+ setattr(self, k, v)
74
+
75
+ def merge_dict(self, d: dict[str, Any]) -> Self:
76
+ """
77
+ Creates a copy of this instance and updates the copy with values
78
+ from the provided dictionary, only if the dictionary field's value is not
79
+ its default value. The original instance (self) is not modified.
80
+
81
+ Args:
82
+ d: The dictionary to get values from.
36
83
 
37
- def update(self, d: dict[str, Any] | Munch) -> None:
84
+ Returns:
85
+ A new instance of the struct with updated values.
86
+ """
87
+ self_copy = copy.copy(self)
38
88
  for k, v in d.items():
39
- setattr(self, k, v)
89
+ if hasattr(self_copy, k):
90
+ current_value = getattr(self_copy, k)
91
+ if isinstance(current_value, dict) and isinstance(v, dict):
92
+ current_value.update(v)
93
+ else:
94
+ setattr(self_copy, k, v)
95
+ else:
96
+ setattr(self_copy, k, v)
97
+ return self_copy
98
+
99
+ def merge(self, source: Self) -> Self:
100
+ """
101
+ Creates a copy of this instance and updates the copy with values
102
+ from the source struct, only if the source field's value is not
103
+ its default value. The original instance (self) is not modified.
104
+
105
+ Args:
106
+ source: The msgspec.Struct instance of the same type to get values from.
107
+
108
+ Returns:
109
+ A new instance of the struct with updated values.
110
+
111
+ Raises:
112
+ TypeError: If source is not of the same type as self.
113
+ """
114
+ if type(self) is not type(source):
115
+ raise TypeError(
116
+ f"Source must be an instance of {type(self).__name__}, not {type(source).__name__}"
117
+ )
118
+
119
+ updated_instance = copy.copy(self)
120
+
121
+ # Get default values if they exist
122
+ defaults = getattr(source, "__struct_defaults__", {})
123
+
124
+ for field in source.__struct_fields__:
125
+ source_value = getattr(source, field)
126
+ has_explicit_default = field in defaults
127
+ is_default_value = False
128
+
129
+ if has_explicit_default:
130
+ is_default_value = source_value == defaults[field]
131
+ else:
132
+ is_default_value = source_value is None
133
+
134
+ if not is_default_value:
135
+ setattr(updated_instance, field, source_value)
136
+
137
+ return updated_instance