cuvis-ai-schemas 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cuvis_ai_schemas/__init__.py +5 -5
- cuvis_ai_schemas/discovery/__init__.py +6 -6
- cuvis_ai_schemas/enums/__init__.py +5 -5
- cuvis_ai_schemas/enums/types.py +30 -30
- cuvis_ai_schemas/execution/__init__.py +12 -12
- cuvis_ai_schemas/execution/context.py +41 -41
- cuvis_ai_schemas/execution/monitoring.py +83 -83
- cuvis_ai_schemas/extensions/__init__.py +3 -3
- cuvis_ai_schemas/extensions/ui/__init__.py +8 -8
- cuvis_ai_schemas/extensions/ui/port_display.py +159 -159
- cuvis_ai_schemas/grpc/__init__.py +3 -3
- cuvis_ai_schemas/grpc/v1/__init__.py +11 -11
- cuvis_ai_schemas/pipeline/__init__.py +17 -17
- cuvis_ai_schemas/pipeline/config.py +238 -238
- cuvis_ai_schemas/pipeline/ports.py +48 -48
- cuvis_ai_schemas/plugin/__init__.py +6 -6
- cuvis_ai_schemas/plugin/config.py +118 -118
- cuvis_ai_schemas/plugin/manifest.py +95 -95
- {cuvis_ai_schemas-0.1.0.dist-info → cuvis_ai_schemas-0.1.2.dist-info}/METADATA +111 -111
- cuvis_ai_schemas-0.1.2.dist-info/RECORD +34 -0
- {cuvis_ai_schemas-0.1.0.dist-info → cuvis_ai_schemas-0.1.2.dist-info}/licenses/LICENSE +190 -190
- cuvis_ai_schemas-0.1.0.dist-info/RECORD +0 -34
- {cuvis_ai_schemas-0.1.0.dist-info → cuvis_ai_schemas-0.1.2.dist-info}/WHEEL +0 -0
- {cuvis_ai_schemas-0.1.0.dist-info → cuvis_ai_schemas-0.1.2.dist-info}/top_level.txt +0 -0
|
@@ -1,118 +1,118 @@
|
|
|
1
|
-
"""Plugin configuration schemas."""
|
|
2
|
-
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
|
|
5
|
-
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class _BasePluginConfig(BaseModel):
|
|
9
|
-
"""Base plugin configuration with strict validation.
|
|
10
|
-
|
|
11
|
-
All plugin types inherit from this base class to ensure
|
|
12
|
-
consistent validation and error handling.
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
model_config = ConfigDict(
|
|
16
|
-
extra="forbid", # Reject unknown fields (catch typos)
|
|
17
|
-
validate_assignment=True, # Validate on attribute assignment
|
|
18
|
-
populate_by_name=True, # Allow field aliases
|
|
19
|
-
)
|
|
20
|
-
|
|
21
|
-
provides: list[str] = Field(
|
|
22
|
-
description="List of fully-qualified class paths this plugin provides",
|
|
23
|
-
min_length=1, # At least one class required
|
|
24
|
-
)
|
|
25
|
-
|
|
26
|
-
@field_validator("provides")
|
|
27
|
-
@classmethod
|
|
28
|
-
def _validate_class_paths(cls, value: list[str]) -> list[str]:
|
|
29
|
-
"""Ensure class paths are well-formed."""
|
|
30
|
-
for class_path in value:
|
|
31
|
-
if not class_path or "." not in class_path:
|
|
32
|
-
msg = (
|
|
33
|
-
f"Invalid class path '{class_path}'. "
|
|
34
|
-
"Must be fully-qualified (e.g., 'package.module.ClassName')"
|
|
35
|
-
)
|
|
36
|
-
raise ValueError(msg)
|
|
37
|
-
return value
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
class GitPluginConfig(_BasePluginConfig):
|
|
41
|
-
"""Git repository plugin configuration.
|
|
42
|
-
|
|
43
|
-
Supports:
|
|
44
|
-
- SSH URLs: git@gitlab.com:user/repo.git
|
|
45
|
-
- HTTPS URLs: https://github.com/user/repo.git
|
|
46
|
-
- Git tags only: v1.2.3, v0.1.0-alpha, etc.
|
|
47
|
-
|
|
48
|
-
Note: Branches and commit hashes are NOT supported for reproducibility.
|
|
49
|
-
"""
|
|
50
|
-
|
|
51
|
-
repo: str = Field(
|
|
52
|
-
description="Git repository URL (SSH or HTTPS)",
|
|
53
|
-
min_length=1,
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
tag: str = Field(
|
|
57
|
-
description="Git tag (e.g., v1.2.3, v0.1.0-alpha). "
|
|
58
|
-
"Branches and commit hashes are not supported.",
|
|
59
|
-
min_length=1,
|
|
60
|
-
)
|
|
61
|
-
|
|
62
|
-
@field_validator("repo")
|
|
63
|
-
@classmethod
|
|
64
|
-
def _validate_repo_url(cls, value: str) -> str:
|
|
65
|
-
"""Validate Git repository URL format."""
|
|
66
|
-
if not (
|
|
67
|
-
value.startswith("git@") or value.startswith("https://") or value.startswith("http://")
|
|
68
|
-
):
|
|
69
|
-
msg = f"Invalid repo URL '{value}'. Must start with 'git@', 'https://', or 'http://'"
|
|
70
|
-
raise ValueError(msg)
|
|
71
|
-
return value
|
|
72
|
-
|
|
73
|
-
@field_validator("tag")
|
|
74
|
-
@classmethod
|
|
75
|
-
def _validate_tag(cls, value: str) -> str:
|
|
76
|
-
"""Validate Git tag is not empty."""
|
|
77
|
-
if not value.strip():
|
|
78
|
-
msg = "Git tag cannot be empty"
|
|
79
|
-
raise ValueError(msg)
|
|
80
|
-
return value.strip()
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
class LocalPluginConfig(_BasePluginConfig):
|
|
84
|
-
"""Local filesystem plugin configuration.
|
|
85
|
-
|
|
86
|
-
Supports:
|
|
87
|
-
- Absolute paths: /home/user/my-plugin
|
|
88
|
-
- Relative paths: ../my-plugin (resolved relative to manifest file)
|
|
89
|
-
- Windows paths: C:\\Users\\user\\my-plugin
|
|
90
|
-
"""
|
|
91
|
-
|
|
92
|
-
path: str = Field(
|
|
93
|
-
description="Absolute or relative path to plugin directory",
|
|
94
|
-
min_length=1,
|
|
95
|
-
)
|
|
96
|
-
|
|
97
|
-
@field_validator("path")
|
|
98
|
-
@classmethod
|
|
99
|
-
def _validate_path(cls, value: str) -> str:
|
|
100
|
-
"""Validate path is not empty."""
|
|
101
|
-
if not value.strip():
|
|
102
|
-
msg = "Path cannot be empty"
|
|
103
|
-
raise ValueError(msg)
|
|
104
|
-
return value.strip()
|
|
105
|
-
|
|
106
|
-
def resolve_path(self, manifest_dir: Path) -> Path:
|
|
107
|
-
"""Resolve relative paths to absolute paths.
|
|
108
|
-
|
|
109
|
-
Args:
|
|
110
|
-
manifest_dir: Directory containing the manifest file
|
|
111
|
-
|
|
112
|
-
Returns:
|
|
113
|
-
Absolute path to plugin directory
|
|
114
|
-
"""
|
|
115
|
-
plugin_path = Path(self.path)
|
|
116
|
-
if not plugin_path.is_absolute():
|
|
117
|
-
plugin_path = (manifest_dir / plugin_path).resolve()
|
|
118
|
-
return plugin_path
|
|
1
|
+
"""Plugin configuration schemas."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class _BasePluginConfig(BaseModel):
|
|
9
|
+
"""Base plugin configuration with strict validation.
|
|
10
|
+
|
|
11
|
+
All plugin types inherit from this base class to ensure
|
|
12
|
+
consistent validation and error handling.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
model_config = ConfigDict(
|
|
16
|
+
extra="forbid", # Reject unknown fields (catch typos)
|
|
17
|
+
validate_assignment=True, # Validate on attribute assignment
|
|
18
|
+
populate_by_name=True, # Allow field aliases
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
provides: list[str] = Field(
|
|
22
|
+
description="List of fully-qualified class paths this plugin provides",
|
|
23
|
+
min_length=1, # At least one class required
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
@field_validator("provides")
|
|
27
|
+
@classmethod
|
|
28
|
+
def _validate_class_paths(cls, value: list[str]) -> list[str]:
|
|
29
|
+
"""Ensure class paths are well-formed."""
|
|
30
|
+
for class_path in value:
|
|
31
|
+
if not class_path or "." not in class_path:
|
|
32
|
+
msg = (
|
|
33
|
+
f"Invalid class path '{class_path}'. "
|
|
34
|
+
"Must be fully-qualified (e.g., 'package.module.ClassName')"
|
|
35
|
+
)
|
|
36
|
+
raise ValueError(msg)
|
|
37
|
+
return value
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class GitPluginConfig(_BasePluginConfig):
|
|
41
|
+
"""Git repository plugin configuration.
|
|
42
|
+
|
|
43
|
+
Supports:
|
|
44
|
+
- SSH URLs: git@gitlab.com:user/repo.git
|
|
45
|
+
- HTTPS URLs: https://github.com/user/repo.git
|
|
46
|
+
- Git tags only: v1.2.3, v0.1.0-alpha, etc.
|
|
47
|
+
|
|
48
|
+
Note: Branches and commit hashes are NOT supported for reproducibility.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
repo: str = Field(
|
|
52
|
+
description="Git repository URL (SSH or HTTPS)",
|
|
53
|
+
min_length=1,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
tag: str = Field(
|
|
57
|
+
description="Git tag (e.g., v1.2.3, v0.1.0-alpha). "
|
|
58
|
+
"Branches and commit hashes are not supported.",
|
|
59
|
+
min_length=1,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
@field_validator("repo")
|
|
63
|
+
@classmethod
|
|
64
|
+
def _validate_repo_url(cls, value: str) -> str:
|
|
65
|
+
"""Validate Git repository URL format."""
|
|
66
|
+
if not (
|
|
67
|
+
value.startswith("git@") or value.startswith("https://") or value.startswith("http://")
|
|
68
|
+
):
|
|
69
|
+
msg = f"Invalid repo URL '{value}'. Must start with 'git@', 'https://', or 'http://'"
|
|
70
|
+
raise ValueError(msg)
|
|
71
|
+
return value
|
|
72
|
+
|
|
73
|
+
@field_validator("tag")
|
|
74
|
+
@classmethod
|
|
75
|
+
def _validate_tag(cls, value: str) -> str:
|
|
76
|
+
"""Validate Git tag is not empty."""
|
|
77
|
+
if not value.strip():
|
|
78
|
+
msg = "Git tag cannot be empty"
|
|
79
|
+
raise ValueError(msg)
|
|
80
|
+
return value.strip()
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class LocalPluginConfig(_BasePluginConfig):
|
|
84
|
+
"""Local filesystem plugin configuration.
|
|
85
|
+
|
|
86
|
+
Supports:
|
|
87
|
+
- Absolute paths: /home/user/my-plugin
|
|
88
|
+
- Relative paths: ../my-plugin (resolved relative to manifest file)
|
|
89
|
+
- Windows paths: C:\\Users\\user\\my-plugin
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
path: str = Field(
|
|
93
|
+
description="Absolute or relative path to plugin directory",
|
|
94
|
+
min_length=1,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
@field_validator("path")
|
|
98
|
+
@classmethod
|
|
99
|
+
def _validate_path(cls, value: str) -> str:
|
|
100
|
+
"""Validate path is not empty."""
|
|
101
|
+
if not value.strip():
|
|
102
|
+
msg = "Path cannot be empty"
|
|
103
|
+
raise ValueError(msg)
|
|
104
|
+
return value.strip()
|
|
105
|
+
|
|
106
|
+
def resolve_path(self, manifest_dir: Path) -> Path:
|
|
107
|
+
"""Resolve relative paths to absolute paths.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
manifest_dir: Directory containing the manifest file
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Absolute path to plugin directory
|
|
114
|
+
"""
|
|
115
|
+
plugin_path = Path(self.path)
|
|
116
|
+
if not plugin_path.is_absolute():
|
|
117
|
+
plugin_path = (manifest_dir / plugin_path).resolve()
|
|
118
|
+
return plugin_path
|
|
@@ -1,95 +1,95 @@
|
|
|
1
|
-
"""Plugin manifest schema."""
|
|
2
|
-
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
from typing import Annotated
|
|
5
|
-
|
|
6
|
-
import yaml
|
|
7
|
-
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
8
|
-
|
|
9
|
-
from cuvis_ai_schemas.plugin.config import GitPluginConfig, LocalPluginConfig
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class PluginManifest(BaseModel):
|
|
13
|
-
"""Complete plugin manifest containing all plugin configurations.
|
|
14
|
-
|
|
15
|
-
This is the root configuration object validated when loading
|
|
16
|
-
a plugins.yaml file or dictionary.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
model_config = ConfigDict(
|
|
20
|
-
extra="forbid",
|
|
21
|
-
validate_assignment=True,
|
|
22
|
-
)
|
|
23
|
-
|
|
24
|
-
plugins: dict[
|
|
25
|
-
str,
|
|
26
|
-
Annotated[
|
|
27
|
-
GitPluginConfig | LocalPluginConfig,
|
|
28
|
-
Field(discriminator=None), # Pydantic will auto-detect based on fields
|
|
29
|
-
],
|
|
30
|
-
] = Field(
|
|
31
|
-
description="Map of plugin names to their configurations",
|
|
32
|
-
default_factory=dict,
|
|
33
|
-
)
|
|
34
|
-
|
|
35
|
-
@field_validator("plugins")
|
|
36
|
-
@classmethod
|
|
37
|
-
def _validate_plugin_names(cls, value: dict) -> dict:
|
|
38
|
-
"""Ensure plugin names are valid Python identifiers."""
|
|
39
|
-
for name in value.keys():
|
|
40
|
-
if not name.isidentifier():
|
|
41
|
-
msg = f"Invalid plugin name '{name}'. Must be a valid Python identifier"
|
|
42
|
-
raise ValueError(msg)
|
|
43
|
-
return value
|
|
44
|
-
|
|
45
|
-
@classmethod
|
|
46
|
-
def from_yaml(cls, yaml_path: Path) -> "PluginManifest":
|
|
47
|
-
"""Load and validate manifest from YAML file.
|
|
48
|
-
|
|
49
|
-
Args:
|
|
50
|
-
yaml_path: Path to YAML file
|
|
51
|
-
|
|
52
|
-
Returns:
|
|
53
|
-
Validated PluginManifest instance
|
|
54
|
-
|
|
55
|
-
Raises:
|
|
56
|
-
FileNotFoundError: If yaml_path doesn't exist
|
|
57
|
-
"""
|
|
58
|
-
if not yaml_path.exists():
|
|
59
|
-
msg = f"Plugin manifest not found: {yaml_path}"
|
|
60
|
-
raise FileNotFoundError(msg)
|
|
61
|
-
|
|
62
|
-
with yaml_path.open("r", encoding="utf-8") as f:
|
|
63
|
-
data = yaml.safe_load(f)
|
|
64
|
-
|
|
65
|
-
if not data:
|
|
66
|
-
return cls(plugins={})
|
|
67
|
-
|
|
68
|
-
return cls.model_validate(data)
|
|
69
|
-
|
|
70
|
-
@classmethod
|
|
71
|
-
def from_dict(cls, data: dict) -> "PluginManifest":
|
|
72
|
-
"""Load and validate manifest from dictionary.
|
|
73
|
-
|
|
74
|
-
Args:
|
|
75
|
-
data: Dictionary containing plugin configurations
|
|
76
|
-
|
|
77
|
-
Returns:
|
|
78
|
-
Validated PluginManifest instance
|
|
79
|
-
"""
|
|
80
|
-
return cls.model_validate(data)
|
|
81
|
-
|
|
82
|
-
def to_yaml(self, yaml_path: Path) -> None:
|
|
83
|
-
"""Save manifest to YAML file.
|
|
84
|
-
|
|
85
|
-
Args:
|
|
86
|
-
yaml_path: Path where YAML file should be saved
|
|
87
|
-
"""
|
|
88
|
-
yaml_path.parent.mkdir(parents=True, exist_ok=True)
|
|
89
|
-
with yaml_path.open("w", encoding="utf-8") as f:
|
|
90
|
-
yaml.safe_dump(
|
|
91
|
-
self.model_dump(exclude_none=True),
|
|
92
|
-
f,
|
|
93
|
-
sort_keys=False,
|
|
94
|
-
default_flow_style=False,
|
|
95
|
-
)
|
|
1
|
+
"""Plugin manifest schema."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Annotated
|
|
5
|
+
|
|
6
|
+
import yaml
|
|
7
|
+
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
8
|
+
|
|
9
|
+
from cuvis_ai_schemas.plugin.config import GitPluginConfig, LocalPluginConfig
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class PluginManifest(BaseModel):
|
|
13
|
+
"""Complete plugin manifest containing all plugin configurations.
|
|
14
|
+
|
|
15
|
+
This is the root configuration object validated when loading
|
|
16
|
+
a plugins.yaml file or dictionary.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
model_config = ConfigDict(
|
|
20
|
+
extra="forbid",
|
|
21
|
+
validate_assignment=True,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
plugins: dict[
|
|
25
|
+
str,
|
|
26
|
+
Annotated[
|
|
27
|
+
GitPluginConfig | LocalPluginConfig,
|
|
28
|
+
Field(discriminator=None), # Pydantic will auto-detect based on fields
|
|
29
|
+
],
|
|
30
|
+
] = Field(
|
|
31
|
+
description="Map of plugin names to their configurations",
|
|
32
|
+
default_factory=dict,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
@field_validator("plugins")
|
|
36
|
+
@classmethod
|
|
37
|
+
def _validate_plugin_names(cls, value: dict) -> dict:
|
|
38
|
+
"""Ensure plugin names are valid Python identifiers."""
|
|
39
|
+
for name in value.keys():
|
|
40
|
+
if not name.isidentifier():
|
|
41
|
+
msg = f"Invalid plugin name '{name}'. Must be a valid Python identifier"
|
|
42
|
+
raise ValueError(msg)
|
|
43
|
+
return value
|
|
44
|
+
|
|
45
|
+
@classmethod
|
|
46
|
+
def from_yaml(cls, yaml_path: Path) -> "PluginManifest":
|
|
47
|
+
"""Load and validate manifest from YAML file.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
yaml_path: Path to YAML file
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Validated PluginManifest instance
|
|
54
|
+
|
|
55
|
+
Raises:
|
|
56
|
+
FileNotFoundError: If yaml_path doesn't exist
|
|
57
|
+
"""
|
|
58
|
+
if not yaml_path.exists():
|
|
59
|
+
msg = f"Plugin manifest not found: {yaml_path}"
|
|
60
|
+
raise FileNotFoundError(msg)
|
|
61
|
+
|
|
62
|
+
with yaml_path.open("r", encoding="utf-8") as f:
|
|
63
|
+
data = yaml.safe_load(f)
|
|
64
|
+
|
|
65
|
+
if not data:
|
|
66
|
+
return cls(plugins={})
|
|
67
|
+
|
|
68
|
+
return cls.model_validate(data)
|
|
69
|
+
|
|
70
|
+
@classmethod
|
|
71
|
+
def from_dict(cls, data: dict) -> "PluginManifest":
|
|
72
|
+
"""Load and validate manifest from dictionary.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
data: Dictionary containing plugin configurations
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Validated PluginManifest instance
|
|
79
|
+
"""
|
|
80
|
+
return cls.model_validate(data)
|
|
81
|
+
|
|
82
|
+
def to_yaml(self, yaml_path: Path) -> None:
|
|
83
|
+
"""Save manifest to YAML file.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
yaml_path: Path where YAML file should be saved
|
|
87
|
+
"""
|
|
88
|
+
yaml_path.parent.mkdir(parents=True, exist_ok=True)
|
|
89
|
+
with yaml_path.open("w", encoding="utf-8") as f:
|
|
90
|
+
yaml.safe_dump(
|
|
91
|
+
self.model_dump(exclude_none=True),
|
|
92
|
+
f,
|
|
93
|
+
sort_keys=False,
|
|
94
|
+
default_flow_style=False,
|
|
95
|
+
)
|
|
@@ -1,111 +1,111 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: cuvis-ai-schemas
|
|
3
|
-
Version: 0.1.
|
|
4
|
-
Summary: Lightweight schema definitions for cuvis-ai ecosystem
|
|
5
|
-
Author-email: "Cubert GmbH, Ulm, Germany" <SDK@cubert-gmbh.com>
|
|
6
|
-
License: Apache-2.0
|
|
7
|
-
Project-URL: Homepage, https://www.cubert-hyperspectral.com/
|
|
8
|
-
Project-URL: Repository, https://github.com/cubert-hyperspectral/cuvis-ai-schemas
|
|
9
|
-
Project-URL: Documentation, https://cubert-hyperspectral.github.io/cuvis-ai-schemas/
|
|
10
|
-
Project-URL: Issues, https://github.com/cubert-hyperspectral/cuvis-ai-schemas/issues
|
|
11
|
-
Classifier: Intended Audience :: Developers
|
|
12
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
13
|
-
Classifier: Programming Language :: Python :: 3
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
-
Classifier: Topic :: Scientific/Engineering
|
|
16
|
-
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
17
|
-
Requires-Python: <3.12,>=3.11
|
|
18
|
-
Description-Content-Type: text/markdown
|
|
19
|
-
License-File: LICENSE
|
|
20
|
-
Requires-Dist: pydantic<3.0.0,>=2.0.0
|
|
21
|
-
Requires-Dist: pyyaml>=6.0
|
|
22
|
-
Provides-Extra: proto
|
|
23
|
-
Requires-Dist: grpcio>=1.56.0; extra == "proto"
|
|
24
|
-
Requires-Dist: protobuf>=4.25.0; extra == "proto"
|
|
25
|
-
Requires-Dist: grpcio-tools>=1.56.0; extra == "proto"
|
|
26
|
-
Provides-Extra: torch
|
|
27
|
-
Requires-Dist: torch>=2.0.0; extra == "torch"
|
|
28
|
-
Requires-Dist: torchvision; extra == "torch"
|
|
29
|
-
Provides-Extra: numpy
|
|
30
|
-
Requires-Dist: numpy>=1.21.0; extra == "numpy"
|
|
31
|
-
Provides-Extra: lightning
|
|
32
|
-
Requires-Dist: pytorch-lightning>=2.0.0; extra == "lightning"
|
|
33
|
-
Provides-Extra: full
|
|
34
|
-
Requires-Dist: cuvis-ai-schemas[lightning,numpy,proto,torch]; extra == "full"
|
|
35
|
-
Provides-Extra: dev
|
|
36
|
-
Requires-Dist: ruff; extra == "dev"
|
|
37
|
-
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
38
|
-
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
39
|
-
Requires-Dist: mypy>=1.8.0; extra == "dev"
|
|
40
|
-
Requires-Dist: types-protobuf; extra == "dev"
|
|
41
|
-
Requires-Dist: types-grpcio; extra == "dev"
|
|
42
|
-
Requires-Dist: types-PyYAML; extra == "dev"
|
|
43
|
-
Requires-Dist: interrogate>=1.7.0; extra == "dev"
|
|
44
|
-
Dynamic: license-file
|
|
45
|
-
|
|
46
|
-
# cuvis-ai-schemas
|
|
47
|
-
|
|
48
|
-
Lightweight schema definitions for the cuvis-ai ecosystem.
|
|
49
|
-
|
|
50
|
-
[](LICENSE)
|
|
51
|
-
[](https://www.python.org/downloads/)
|
|
52
|
-
|
|
53
|
-
## Overview
|
|
54
|
-
|
|
55
|
-
`cuvis-ai-schemas` is a centralized, dependency-light package of schema definitions used across the cuvis-ai ecosystem. It enables type-safe communication between services without heavy runtime requirements.
|
|
56
|
-
|
|
57
|
-
Key points:
|
|
58
|
-
- Minimal deps (pydantic + pyyaml)
|
|
59
|
-
- Full Pydantic validation
|
|
60
|
-
- Optional extras for proto, torch, numpy, lightning
|
|
61
|
-
|
|
62
|
-
## Installation
|
|
63
|
-
|
|
64
|
-
```bash
|
|
65
|
-
uv add cuvis-ai-schemas
|
|
66
|
-
uv add "cuvis-ai-schemas[proto]"
|
|
67
|
-
uv add "cuvis-ai-schemas[full]"
|
|
68
|
-
```
|
|
69
|
-
|
|
70
|
-
Extras:
|
|
71
|
-
- `proto`: gRPC and protobuf support
|
|
72
|
-
- `torch`: PyTorch dtype handling (validation only)
|
|
73
|
-
- `numpy`: NumPy array support
|
|
74
|
-
- `lightning`: PyTorch Lightning training configs
|
|
75
|
-
- `full`: All features
|
|
76
|
-
- `dev`: Development dependencies
|
|
77
|
-
|
|
78
|
-
## Usage
|
|
79
|
-
|
|
80
|
-
```python
|
|
81
|
-
from cuvis_ai_schemas.pipeline import PipelineConfig, NodeConfig
|
|
82
|
-
|
|
83
|
-
pipeline = PipelineConfig(
|
|
84
|
-
nodes=[NodeConfig(id="node_1", class_name="DataLoader", params={"batch_size": 32})],
|
|
85
|
-
connections=[],
|
|
86
|
-
)
|
|
87
|
-
|
|
88
|
-
pipeline_json = pipeline.to_json()
|
|
89
|
-
pipeline = PipelineConfig.from_json(pipeline_json)
|
|
90
|
-
```
|
|
91
|
-
|
|
92
|
-
## Development
|
|
93
|
-
|
|
94
|
-
```bash
|
|
95
|
-
uv sync --extra dev
|
|
96
|
-
uv run pytest tests/ -v
|
|
97
|
-
uv run ruff check cuvis_ai_schemas/ tests/
|
|
98
|
-
uv run ruff format cuvis_ai_schemas/ tests/
|
|
99
|
-
uv run mypy cuvis_ai_schemas/
|
|
100
|
-
```
|
|
101
|
-
|
|
102
|
-
## Contributing
|
|
103
|
-
|
|
104
|
-
Contributions are welcome. Please:
|
|
105
|
-
1. Ensure tests pass
|
|
106
|
-
2. Run ruff format and ruff check
|
|
107
|
-
3. Keep type hints and update docs as needed
|
|
108
|
-
|
|
109
|
-
## License
|
|
110
|
-
|
|
111
|
-
Licensed under the Apache License 2.0. See [LICENSE](LICENSE) for details.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: cuvis-ai-schemas
|
|
3
|
+
Version: 0.1.2
|
|
4
|
+
Summary: Lightweight schema definitions for cuvis-ai ecosystem
|
|
5
|
+
Author-email: "Cubert GmbH, Ulm, Germany" <SDK@cubert-gmbh.com>
|
|
6
|
+
License: Apache-2.0
|
|
7
|
+
Project-URL: Homepage, https://www.cubert-hyperspectral.com/
|
|
8
|
+
Project-URL: Repository, https://github.com/cubert-hyperspectral/cuvis-ai-schemas
|
|
9
|
+
Project-URL: Documentation, https://cubert-hyperspectral.github.io/cuvis-ai-schemas/
|
|
10
|
+
Project-URL: Issues, https://github.com/cubert-hyperspectral/cuvis-ai-schemas/issues
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
+
Classifier: Topic :: Scientific/Engineering
|
|
16
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
17
|
+
Requires-Python: <3.12,>=3.11
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
License-File: LICENSE
|
|
20
|
+
Requires-Dist: pydantic<3.0.0,>=2.0.0
|
|
21
|
+
Requires-Dist: pyyaml>=6.0
|
|
22
|
+
Provides-Extra: proto
|
|
23
|
+
Requires-Dist: grpcio>=1.56.0; extra == "proto"
|
|
24
|
+
Requires-Dist: protobuf>=4.25.0; extra == "proto"
|
|
25
|
+
Requires-Dist: grpcio-tools>=1.56.0; extra == "proto"
|
|
26
|
+
Provides-Extra: torch
|
|
27
|
+
Requires-Dist: torch>=2.0.0; extra == "torch"
|
|
28
|
+
Requires-Dist: torchvision; extra == "torch"
|
|
29
|
+
Provides-Extra: numpy
|
|
30
|
+
Requires-Dist: numpy>=1.21.0; extra == "numpy"
|
|
31
|
+
Provides-Extra: lightning
|
|
32
|
+
Requires-Dist: pytorch-lightning>=2.0.0; extra == "lightning"
|
|
33
|
+
Provides-Extra: full
|
|
34
|
+
Requires-Dist: cuvis-ai-schemas[lightning,numpy,proto,torch]; extra == "full"
|
|
35
|
+
Provides-Extra: dev
|
|
36
|
+
Requires-Dist: ruff; extra == "dev"
|
|
37
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
38
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
39
|
+
Requires-Dist: mypy>=1.8.0; extra == "dev"
|
|
40
|
+
Requires-Dist: types-protobuf; extra == "dev"
|
|
41
|
+
Requires-Dist: types-grpcio; extra == "dev"
|
|
42
|
+
Requires-Dist: types-PyYAML; extra == "dev"
|
|
43
|
+
Requires-Dist: interrogate>=1.7.0; extra == "dev"
|
|
44
|
+
Dynamic: license-file
|
|
45
|
+
|
|
46
|
+
# cuvis-ai-schemas
|
|
47
|
+
|
|
48
|
+
Lightweight schema definitions for the cuvis-ai ecosystem.
|
|
49
|
+
|
|
50
|
+
[](LICENSE)
|
|
51
|
+
[](https://www.python.org/downloads/)
|
|
52
|
+
|
|
53
|
+
## Overview
|
|
54
|
+
|
|
55
|
+
`cuvis-ai-schemas` is a centralized, dependency-light package of schema definitions used across the cuvis-ai ecosystem. It enables type-safe communication between services without heavy runtime requirements.
|
|
56
|
+
|
|
57
|
+
Key points:
|
|
58
|
+
- Minimal deps (pydantic + pyyaml)
|
|
59
|
+
- Full Pydantic validation
|
|
60
|
+
- Optional extras for proto, torch, numpy, lightning
|
|
61
|
+
|
|
62
|
+
## Installation
|
|
63
|
+
|
|
64
|
+
```bash
|
|
65
|
+
uv add cuvis-ai-schemas
|
|
66
|
+
uv add "cuvis-ai-schemas[proto]"
|
|
67
|
+
uv add "cuvis-ai-schemas[full]"
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
Extras:
|
|
71
|
+
- `proto`: gRPC and protobuf support
|
|
72
|
+
- `torch`: PyTorch dtype handling (validation only)
|
|
73
|
+
- `numpy`: NumPy array support
|
|
74
|
+
- `lightning`: PyTorch Lightning training configs
|
|
75
|
+
- `full`: All features
|
|
76
|
+
- `dev`: Development dependencies
|
|
77
|
+
|
|
78
|
+
## Usage
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
from cuvis_ai_schemas.pipeline import PipelineConfig, NodeConfig
|
|
82
|
+
|
|
83
|
+
pipeline = PipelineConfig(
|
|
84
|
+
nodes=[NodeConfig(id="node_1", class_name="DataLoader", params={"batch_size": 32})],
|
|
85
|
+
connections=[],
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
pipeline_json = pipeline.to_json()
|
|
89
|
+
pipeline = PipelineConfig.from_json(pipeline_json)
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
## Development
|
|
93
|
+
|
|
94
|
+
```bash
|
|
95
|
+
uv sync --extra dev
|
|
96
|
+
uv run pytest tests/ -v
|
|
97
|
+
uv run ruff check cuvis_ai_schemas/ tests/
|
|
98
|
+
uv run ruff format cuvis_ai_schemas/ tests/
|
|
99
|
+
uv run mypy cuvis_ai_schemas/
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
## Contributing
|
|
103
|
+
|
|
104
|
+
Contributions are welcome. Please:
|
|
105
|
+
1. Ensure tests pass
|
|
106
|
+
2. Run ruff format and ruff check
|
|
107
|
+
3. Keep type hints and update docs as needed
|
|
108
|
+
|
|
109
|
+
## License
|
|
110
|
+
|
|
111
|
+
Licensed under the Apache License 2.0. See [LICENSE](LICENSE) for details.
|