cuvis-ai-schemas 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,238 +1,238 @@
1
- """Pipeline configuration schemas."""
2
-
3
- from __future__ import annotations
4
-
5
- from typing import TYPE_CHECKING, Any
6
-
7
- import yaml
8
- from pydantic import BaseModel, ConfigDict, Field
9
-
10
- if TYPE_CHECKING:
11
- from pathlib import Path
12
-
13
- from cuvis_ai_schemas.grpc.v1 import cuvis_ai_pb2
14
-
15
-
16
- class _BaseConfig(BaseModel):
17
- """Base model with strict validation."""
18
-
19
- model_config = ConfigDict(extra="forbid", validate_assignment=True, populate_by_name=True)
20
-
21
-
22
- class PipelineMetadata(_BaseConfig):
23
- """Pipeline metadata for documentation and discovery.
24
-
25
- Attributes
26
- ----------
27
- name : str
28
- Pipeline name
29
- description : str
30
- Human-readable description
31
- created : str
32
- Creation timestamp (ISO format)
33
- tags : list[str]
34
- Tags for categorization and search
35
- author : str
36
- Author name or email
37
- cuvis_ai_version : str
38
- Version of cuvis-ai-schemas used
39
- """
40
-
41
- name: str
42
- description: str = ""
43
- created: str = ""
44
- tags: list[str] = Field(default_factory=list)
45
- author: str = ""
46
- cuvis_ai_version: str = "0.1.0"
47
-
48
- def to_dict(self) -> dict[str, Any]:
49
- """Convert to dictionary."""
50
- return self.model_dump()
51
-
52
- @classmethod
53
- def from_dict(cls, data: dict[str, Any]) -> PipelineMetadata:
54
- """Load from dictionary."""
55
- return cls.model_validate(data)
56
-
57
- def to_proto(self) -> cuvis_ai_pb2.PipelineMetadata:
58
- """Convert to proto message.
59
-
60
- Requires cuvis-ai-schemas[proto] to be installed.
61
-
62
- Returns
63
- -------
64
- cuvis_ai_pb2.PipelineMetadata
65
- Proto message representation
66
- """
67
- try:
68
- from cuvis_ai_schemas.grpc.v1 import cuvis_ai_pb2
69
- except ImportError as exc:
70
- msg = "Proto support not installed. Install with: pip install cuvis-ai-schemas[proto]"
71
- raise ImportError(msg) from exc
72
-
73
- return cuvis_ai_pb2.PipelineMetadata(
74
- name=self.name,
75
- description=self.description,
76
- created=self.created,
77
- tags=list(self.tags),
78
- author=self.author,
79
- cuvis_ai_version=self.cuvis_ai_version,
80
- )
81
-
82
-
83
- class NodeConfig(_BaseConfig):
84
- """Node configuration within a pipeline.
85
-
86
- Attributes
87
- ----------
88
- id : str
89
- Unique node identifier
90
- class_name : str
91
- Fully-qualified class name (e.g., 'my_package.MyNode')
92
- Alias: 'class' for backward compatibility
93
- params : dict[str, Any]
94
- Node parameters/hyperparameters
95
- Alias: 'hparams' for backward compatibility
96
- """
97
-
98
- id: str = Field(description="Unique node identifier")
99
- class_name: str = Field(description="Fully-qualified class name", alias="class")
100
- params: dict[str, Any] = Field(
101
- default_factory=dict, description="Node parameters", alias="hparams"
102
- )
103
-
104
-
105
- class ConnectionConfig(_BaseConfig):
106
- """Connection between two nodes.
107
-
108
- Attributes
109
- ----------
110
- from_node : str
111
- Source node ID
112
- from_port : str
113
- Source port name
114
- to_node : str
115
- Target node ID
116
- to_port : str
117
- Target port name
118
- """
119
-
120
- from_node: str = Field(description="Source node ID")
121
- from_port: str = Field(description="Source port name")
122
- to_node: str = Field(description="Target node ID")
123
- to_port: str = Field(description="Target port name")
124
-
125
-
126
- class PipelineConfig(_BaseConfig):
127
- """Pipeline structure configuration.
128
-
129
- Attributes
130
- ----------
131
- name : str
132
- Pipeline name
133
- nodes : list[NodeConfig] | list[dict[str, Any]]
134
- Node definitions (can be NodeConfig or dict for flexibility)
135
- connections : list[ConnectionConfig] | list[dict[str, Any]]
136
- Node connections (can be ConnectionConfig or dict for flexibility)
137
- frozen_nodes : list[str]
138
- Node IDs to keep frozen during training
139
- metadata : PipelineMetadata | None
140
- Optional pipeline metadata
141
- """
142
-
143
- name: str = Field(default="", description="Pipeline name")
144
- nodes: list[dict[str, Any]] = Field(description="Node definitions")
145
- connections: list[dict[str, Any]] = Field(description="Node connections")
146
- frozen_nodes: list[str] = Field(
147
- default_factory=list, description="Node names to keep frozen during training"
148
- )
149
- metadata: PipelineMetadata | None = Field(
150
- default=None, description="Optional pipeline metadata"
151
- )
152
-
153
- def to_proto(self) -> cuvis_ai_pb2.PipelineConfig:
154
- """Convert to proto message.
155
-
156
- Requires cuvis-ai-schemas[proto] to be installed.
157
-
158
- Returns
159
- -------
160
- cuvis_ai_pb2.PipelineConfig
161
- Proto message representation
162
- """
163
- try:
164
- from cuvis_ai_schemas.grpc.v1 import cuvis_ai_pb2
165
- except ImportError as exc:
166
- msg = "Proto support not installed. Install with: pip install cuvis-ai-schemas[proto]"
167
- raise ImportError(msg) from exc
168
-
169
- return cuvis_ai_pb2.PipelineConfig(config_bytes=self.model_dump_json().encode("utf-8"))
170
-
171
- @classmethod
172
- def from_proto(cls, proto_config: cuvis_ai_pb2.PipelineConfig) -> PipelineConfig:
173
- """Load from proto message.
174
-
175
- Parameters
176
- ----------
177
- proto_config : cuvis_ai_pb2.PipelineConfig
178
- Proto message to deserialize
179
-
180
- Returns
181
- -------
182
- PipelineConfig
183
- Loaded configuration
184
- """
185
- return cls.model_validate_json(proto_config.config_bytes.decode("utf-8"))
186
-
187
- def to_json(self) -> str:
188
- """Convert to JSON string."""
189
- return self.model_dump_json()
190
-
191
- @classmethod
192
- def from_json(cls, payload: str) -> PipelineConfig:
193
- """Load from JSON string."""
194
- return cls.model_validate_json(payload)
195
-
196
- def to_dict(self) -> dict[str, Any]:
197
- """Convert to dictionary."""
198
- return self.model_dump()
199
-
200
- @classmethod
201
- def from_dict(cls, data: dict[str, Any]) -> PipelineConfig:
202
- """Load from dictionary."""
203
- return cls.model_validate(data)
204
-
205
- def save_to_file(self, path: str | Path) -> None:
206
- """Save pipeline configuration to YAML file.
207
-
208
- Parameters
209
- ----------
210
- path : str | Path
211
- Output file path
212
- """
213
- from pathlib import Path
214
-
215
- output_path = Path(path)
216
- output_path.parent.mkdir(parents=True, exist_ok=True)
217
- with output_path.open("w", encoding="utf-8") as f:
218
- yaml.safe_dump(self.model_dump(), f, sort_keys=False)
219
-
220
- @classmethod
221
- def load_from_file(cls, path: str | Path) -> PipelineConfig:
222
- """Load pipeline configuration from YAML file.
223
-
224
- Parameters
225
- ----------
226
- path : str | Path
227
- Input file path
228
-
229
- Returns
230
- -------
231
- PipelineConfig
232
- Loaded configuration
233
- """
234
- from pathlib import Path
235
-
236
- with Path(path).open("r", encoding="utf-8") as f:
237
- data = yaml.safe_load(f)
238
- return cls.from_dict(data)
1
+ """Pipeline configuration schemas."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING, Any
6
+
7
+ import yaml
8
+ from pydantic import BaseModel, ConfigDict, Field
9
+
10
+ if TYPE_CHECKING:
11
+ from pathlib import Path
12
+
13
+ from cuvis_ai_schemas.grpc.v1 import cuvis_ai_pb2
14
+
15
+
16
+ class _BaseConfig(BaseModel):
17
+ """Base model with strict validation."""
18
+
19
+ model_config = ConfigDict(extra="forbid", validate_assignment=True, populate_by_name=True)
20
+
21
+
22
+ class PipelineMetadata(_BaseConfig):
23
+ """Pipeline metadata for documentation and discovery.
24
+
25
+ Attributes
26
+ ----------
27
+ name : str
28
+ Pipeline name
29
+ description : str
30
+ Human-readable description
31
+ created : str
32
+ Creation timestamp (ISO format)
33
+ tags : list[str]
34
+ Tags for categorization and search
35
+ author : str
36
+ Author name or email
37
+ cuvis_ai_version : str
38
+ Version of cuvis-ai-schemas used
39
+ """
40
+
41
+ name: str
42
+ description: str = ""
43
+ created: str = ""
44
+ tags: list[str] = Field(default_factory=list)
45
+ author: str = ""
46
+ cuvis_ai_version: str = "0.1.0"
47
+
48
+ def to_dict(self) -> dict[str, Any]:
49
+ """Convert to dictionary."""
50
+ return self.model_dump()
51
+
52
+ @classmethod
53
+ def from_dict(cls, data: dict[str, Any]) -> PipelineMetadata:
54
+ """Load from dictionary."""
55
+ return cls.model_validate(data)
56
+
57
+ def to_proto(self) -> cuvis_ai_pb2.PipelineMetadata:
58
+ """Convert to proto message.
59
+
60
+ Requires cuvis-ai-schemas[proto] to be installed.
61
+
62
+ Returns
63
+ -------
64
+ cuvis_ai_pb2.PipelineMetadata
65
+ Proto message representation
66
+ """
67
+ try:
68
+ from cuvis_ai_schemas.grpc.v1 import cuvis_ai_pb2
69
+ except ImportError as exc:
70
+ msg = "Proto support not installed. Install with: pip install cuvis-ai-schemas[proto]"
71
+ raise ImportError(msg) from exc
72
+
73
+ return cuvis_ai_pb2.PipelineMetadata(
74
+ name=self.name,
75
+ description=self.description,
76
+ created=self.created,
77
+ tags=list(self.tags),
78
+ author=self.author,
79
+ cuvis_ai_version=self.cuvis_ai_version,
80
+ )
81
+
82
+
83
+ class NodeConfig(_BaseConfig):
84
+ """Node configuration within a pipeline.
85
+
86
+ Attributes
87
+ ----------
88
+ id : str
89
+ Unique node identifier
90
+ class_name : str
91
+ Fully-qualified class name (e.g., 'my_package.MyNode')
92
+ Alias: 'class' for backward compatibility
93
+ params : dict[str, Any]
94
+ Node parameters/hyperparameters
95
+ Alias: 'hparams' for backward compatibility
96
+ """
97
+
98
+ id: str = Field(description="Unique node identifier")
99
+ class_name: str = Field(description="Fully-qualified class name", alias="class")
100
+ params: dict[str, Any] = Field(
101
+ default_factory=dict, description="Node parameters", alias="hparams"
102
+ )
103
+
104
+
105
+ class ConnectionConfig(_BaseConfig):
106
+ """Connection between two nodes.
107
+
108
+ Attributes
109
+ ----------
110
+ from_node : str
111
+ Source node ID
112
+ from_port : str
113
+ Source port name
114
+ to_node : str
115
+ Target node ID
116
+ to_port : str
117
+ Target port name
118
+ """
119
+
120
+ from_node: str = Field(description="Source node ID")
121
+ from_port: str = Field(description="Source port name")
122
+ to_node: str = Field(description="Target node ID")
123
+ to_port: str = Field(description="Target port name")
124
+
125
+
126
+ class PipelineConfig(_BaseConfig):
127
+ """Pipeline structure configuration.
128
+
129
+ Attributes
130
+ ----------
131
+ name : str
132
+ Pipeline name
133
+ nodes : list[NodeConfig] | list[dict[str, Any]]
134
+ Node definitions (can be NodeConfig or dict for flexibility)
135
+ connections : list[ConnectionConfig] | list[dict[str, Any]]
136
+ Node connections (can be ConnectionConfig or dict for flexibility)
137
+ frozen_nodes : list[str]
138
+ Node IDs to keep frozen during training
139
+ metadata : PipelineMetadata | None
140
+ Optional pipeline metadata
141
+ """
142
+
143
+ name: str = Field(default="", description="Pipeline name")
144
+ nodes: list[dict[str, Any]] = Field(description="Node definitions")
145
+ connections: list[dict[str, Any]] = Field(description="Node connections")
146
+ frozen_nodes: list[str] = Field(
147
+ default_factory=list, description="Node names to keep frozen during training"
148
+ )
149
+ metadata: PipelineMetadata | None = Field(
150
+ default=None, description="Optional pipeline metadata"
151
+ )
152
+
153
+ def to_proto(self) -> cuvis_ai_pb2.PipelineConfig:
154
+ """Convert to proto message.
155
+
156
+ Requires cuvis-ai-schemas[proto] to be installed.
157
+
158
+ Returns
159
+ -------
160
+ cuvis_ai_pb2.PipelineConfig
161
+ Proto message representation
162
+ """
163
+ try:
164
+ from cuvis_ai_schemas.grpc.v1 import cuvis_ai_pb2
165
+ except ImportError as exc:
166
+ msg = "Proto support not installed. Install with: pip install cuvis-ai-schemas[proto]"
167
+ raise ImportError(msg) from exc
168
+
169
+ return cuvis_ai_pb2.PipelineConfig(config_bytes=self.model_dump_json().encode("utf-8"))
170
+
171
+ @classmethod
172
+ def from_proto(cls, proto_config: cuvis_ai_pb2.PipelineConfig) -> PipelineConfig:
173
+ """Load from proto message.
174
+
175
+ Parameters
176
+ ----------
177
+ proto_config : cuvis_ai_pb2.PipelineConfig
178
+ Proto message to deserialize
179
+
180
+ Returns
181
+ -------
182
+ PipelineConfig
183
+ Loaded configuration
184
+ """
185
+ return cls.model_validate_json(proto_config.config_bytes.decode("utf-8"))
186
+
187
+ def to_json(self) -> str:
188
+ """Convert to JSON string."""
189
+ return self.model_dump_json()
190
+
191
+ @classmethod
192
+ def from_json(cls, payload: str) -> PipelineConfig:
193
+ """Load from JSON string."""
194
+ return cls.model_validate_json(payload)
195
+
196
+ def to_dict(self) -> dict[str, Any]:
197
+ """Convert to dictionary."""
198
+ return self.model_dump()
199
+
200
+ @classmethod
201
+ def from_dict(cls, data: dict[str, Any]) -> PipelineConfig:
202
+ """Load from dictionary."""
203
+ return cls.model_validate(data)
204
+
205
+ def save_to_file(self, path: str | Path) -> None:
206
+ """Save pipeline configuration to YAML file.
207
+
208
+ Parameters
209
+ ----------
210
+ path : str | Path
211
+ Output file path
212
+ """
213
+ from pathlib import Path
214
+
215
+ output_path = Path(path)
216
+ output_path.parent.mkdir(parents=True, exist_ok=True)
217
+ with output_path.open("w", encoding="utf-8") as f:
218
+ yaml.safe_dump(self.model_dump(), f, sort_keys=False)
219
+
220
+ @classmethod
221
+ def load_from_file(cls, path: str | Path) -> PipelineConfig:
222
+ """Load pipeline configuration from YAML file.
223
+
224
+ Parameters
225
+ ----------
226
+ path : str | Path
227
+ Input file path
228
+
229
+ Returns
230
+ -------
231
+ PipelineConfig
232
+ Loaded configuration
233
+ """
234
+ from pathlib import Path
235
+
236
+ with Path(path).open("r", encoding="utf-8") as f:
237
+ data = yaml.safe_load(f)
238
+ return cls.from_dict(data)
@@ -1,48 +1,48 @@
1
- """Port specification for node inputs and outputs."""
2
-
3
- from __future__ import annotations
4
-
5
- from dataclasses import dataclass
6
- from typing import Any
7
-
8
-
9
- @dataclass
10
- class PortSpec:
11
- """Specification for a node input or output port.
12
-
13
- This is a lightweight schema definition. Full compatibility checking
14
- logic is implemented in cuvis-ai-core.
15
-
16
- Attributes
17
- ----------
18
- dtype : Any
19
- Data type for the port (e.g., torch.Tensor, torch.float32, int, str)
20
- shape : tuple[int | str, ...]
21
- Expected shape with:
22
- - Fixed dimensions: positive integers
23
- - Flexible dimensions: -1
24
- - Symbolic dimensions: strings (resolved from node attributes)
25
- description : str
26
- Human-readable description of the port
27
- optional : bool
28
- Whether the port is optional (for inputs)
29
-
30
- Examples
31
- --------
32
- >>> # Fixed shape tensor port
33
- >>> port = PortSpec(dtype=torch.Tensor, shape=(1, 3, 224, 224))
34
-
35
- >>> # Flexible batch dimension
36
- >>> port = PortSpec(dtype=torch.Tensor, shape=(-1, 3, 224, 224))
37
-
38
- >>> # Symbolic dimension from node attribute
39
- >>> port = PortSpec(dtype=torch.Tensor, shape=(-1, "num_channels", 224, 224))
40
-
41
- >>> # Scalar port
42
- >>> port = PortSpec(dtype=float, shape=())
43
- """
44
-
45
- dtype: Any
46
- shape: tuple[int | str, ...]
47
- description: str = ""
48
- optional: bool = False
1
+ """Port specification for node inputs and outputs."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from typing import Any
7
+
8
+
9
+ @dataclass
10
+ class PortSpec:
11
+ """Specification for a node input or output port.
12
+
13
+ This is a lightweight schema definition. Full compatibility checking
14
+ logic is implemented in cuvis-ai-core.
15
+
16
+ Attributes
17
+ ----------
18
+ dtype : Any
19
+ Data type for the port (e.g., torch.Tensor, torch.float32, int, str)
20
+ shape : tuple[int | str, ...]
21
+ Expected shape with:
22
+ - Fixed dimensions: positive integers
23
+ - Flexible dimensions: -1
24
+ - Symbolic dimensions: strings (resolved from node attributes)
25
+ description : str
26
+ Human-readable description of the port
27
+ optional : bool
28
+ Whether the port is optional (for inputs)
29
+
30
+ Examples
31
+ --------
32
+ >>> # Fixed shape tensor port
33
+ >>> port = PortSpec(dtype=torch.Tensor, shape=(1, 3, 224, 224))
34
+
35
+ >>> # Flexible batch dimension
36
+ >>> port = PortSpec(dtype=torch.Tensor, shape=(-1, 3, 224, 224))
37
+
38
+ >>> # Symbolic dimension from node attribute
39
+ >>> port = PortSpec(dtype=torch.Tensor, shape=(-1, "num_channels", 224, 224))
40
+
41
+ >>> # Scalar port
42
+ >>> port = PortSpec(dtype=float, shape=())
43
+ """
44
+
45
+ dtype: Any
46
+ shape: tuple[int | str, ...]
47
+ description: str = ""
48
+ optional: bool = False
@@ -1,6 +1,6 @@
1
- """Plugin system schemas."""
2
-
3
- from cuvis_ai_schemas.plugin.config import GitPluginConfig, LocalPluginConfig
4
- from cuvis_ai_schemas.plugin.manifest import PluginManifest
5
-
6
- __all__ = ["PluginManifest", "GitPluginConfig", "LocalPluginConfig"]
1
+ """Plugin system schemas."""
2
+
3
+ from cuvis_ai_schemas.plugin.config import GitPluginConfig, LocalPluginConfig
4
+ from cuvis_ai_schemas.plugin.manifest import PluginManifest
5
+
6
+ __all__ = ["PluginManifest", "GitPluginConfig", "LocalPluginConfig"]