oceanprotocol-job-details 0.2.3__tar.gz → 0.3.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. oceanprotocol_job_details-0.3.2/PKG-INFO +114 -0
  2. oceanprotocol_job_details-0.3.2/README.md +88 -0
  3. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/__init__.py +4 -0
  4. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/di.py +39 -0
  5. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/domain.py +187 -0
  6. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/helpers.py +29 -0
  7. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/loaders/impl/ddo.py +24 -0
  8. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/loaders/impl/files.py +48 -0
  9. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/loaders/impl/job_details.py +28 -0
  10. {oceanprotocol_job_details-0.2.3 → oceanprotocol_job_details-0.3.2}/oceanprotocol_job_details/loaders/loader.py +0 -4
  11. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/ocean.py +51 -0
  12. oceanprotocol_job_details-0.3.2/oceanprotocol_job_details/settings.py +31 -0
  13. {oceanprotocol_job_details-0.2.3 → oceanprotocol_job_details-0.3.2}/pyproject.toml +9 -3
  14. oceanprotocol_job_details-0.2.3/PKG-INFO +0 -76
  15. oceanprotocol_job_details-0.2.3/README.md +0 -52
  16. oceanprotocol_job_details-0.2.3/oceanprotocol_job_details/__init__.py +0 -4
  17. oceanprotocol_job_details-0.2.3/oceanprotocol_job_details/di.py +0 -47
  18. oceanprotocol_job_details-0.2.3/oceanprotocol_job_details/loaders/impl/ddo.py +0 -32
  19. oceanprotocol_job_details-0.2.3/oceanprotocol_job_details/loaders/impl/files.py +0 -54
  20. oceanprotocol_job_details-0.2.3/oceanprotocol_job_details/loaders/impl/job_details.py +0 -35
  21. oceanprotocol_job_details-0.2.3/oceanprotocol_job_details/ocean.py +0 -304
  22. oceanprotocol_job_details-0.2.3/oceanprotocol_job_details/paths.py +0 -38
  23. {oceanprotocol_job_details-0.2.3 → oceanprotocol_job_details-0.3.2}/.gitignore +0 -0
  24. {oceanprotocol_job_details-0.2.3 → oceanprotocol_job_details-0.3.2}/LICENSE +0 -0
  25. {oceanprotocol_job_details-0.2.3 → oceanprotocol_job_details-0.3.2}/oceanprotocol_job_details/loaders/__init__.py +0 -0
  26. {oceanprotocol_job_details-0.2.3 → oceanprotocol_job_details-0.3.2}/oceanprotocol_job_details/loaders/impl/__init__.py +0 -0
@@ -0,0 +1,114 @@
1
+ Metadata-Version: 2.4
2
+ Name: oceanprotocol-job-details
3
+ Version: 0.3.2
4
+ Summary: A Python package to get details from OceanProtocol jobs
5
+ Project-URL: Homepage, https://github.com/AgrospAI/oceanprotocol-job-details
6
+ Project-URL: Issues, https://github.com/AgrospAI/oceanprotocol-job-details/issues
7
+ Author-email: Agrospai <agrospai@udl.cat>, Christian López García <christian.lopez@udl.cat>
8
+ License: Copyright 2025 Agrospai
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
15
+ License-File: LICENSE
16
+ Classifier: License :: OSI Approved :: MIT License
17
+ Classifier: Operating System :: OS Independent
18
+ Classifier: Programming Language :: Python :: 3
19
+ Requires-Python: >=3.10
20
+ Requires-Dist: aiofiles>=25.1.0
21
+ Requires-Dist: dependency-injector>=4.48.2
22
+ Requires-Dist: orjson>=3.11.3
23
+ Requires-Dist: pydantic-settings>=2.12.0
24
+ Requires-Dist: pydantic>=2.12.5
25
+ Description-Content-Type: text/markdown
26
+
27
+ A Python package to get details from OceanProtocol jobs
28
+
29
+ ---
30
+
31
+ ## Installation
32
+
33
+ ```bash
34
+ pip install oceanprotocol-job-details
35
+ ```
36
+
37
+ ```bash
38
+ uv add oceanprotocol-job-details
39
+ ```
40
+
41
+ ## Usage
42
+
43
+ As a simple library, we only need to import `load_job_details` and run it. It will:
44
+
45
+ 1. Fetch the needed parameters to populate the `JobDetails` instance from the environment variables or use the passed values to the function.
46
+ 1. Look for the files corresponding to the passed DIDs in the filesystem according to the [Ocean Protocol Structure](#oceanprotocol-structure) and load them into the `JobDetails` instance.
47
+
48
+ ### Minimal Example
49
+
50
+ ```python
51
+ from oceanprotocol_job_details import load_job_details
52
+
53
+ class InputParameters(BaseModel): ...
54
+
55
+ job_details = load_job_details({}, InputParameters)
56
+ ```
57
+
58
+ ### Custom Input Parameters
59
+
60
+ If our algorithm has custom input parameters and we want to load them into our algorithm, we can do it as follows:
61
+
62
+ ```python
63
+ from pydantic import BaseModel
64
+ from oceanprotocol_job_details import load_job_details
65
+
66
+
67
+ class Foo(BaseModel):
68
+ bar: str
69
+
70
+
71
+ class InputParameters(BaseModel):
72
+ # Allows for nested types
73
+ foo: Foo
74
+
75
+
76
+ job_details = load_job_details({}, InputParameters)
77
+
78
+ # Usage
79
+ job_details.input_parameters.foo
80
+ job_details.input_parameters.foo.bar
81
+ ```
82
+
83
+ The values to fill the custom `InputParameters` will be parsed from the `algoCustomData.json` located next to the input data directories.
84
+
85
+ ### Iterating Input Files the clean way
86
+
87
+ ```python
88
+ from oceanprotocol_job_details import load_job_details
89
+
90
+
91
+ job_details = load_job_details
92
+
93
+ for idx, file_path in job_details.inputs():
94
+ ...
95
+
96
+ _, file_path = next(job_details.inputs())
97
+ ```
98
+
99
+ ## OceanProtocol Structure
100
+
101
+ ```bash
102
+ data # Root /data directory
103
+ ├── ddos # Contains the loaded dataset's DDO
104
+ │ ├── 17feb...e42 # DDO file
105
+ │ └── ... # One DDO per loaded dataset
106
+ ├── inputs # Datasets dir
107
+ │ ├── 17feb...e42 # Dir holding the data of its name DID, contains files named 0..X
108
+ │ │ └── 0 # Data file
109
+ │ └── algoCustomData.json # Custom algorithm input data
110
+ ├── logs # Algorithm output logs dir
111
+ └── outputs # Algorithm output files dir
112
+ ```
113
+
114
+ > **_Note:_** Even though it's possible that the algorithm is passed multiple datasets, right now the implementation only allows to use **one dataset** per algorithm execution, so **normally** the executing job will only have **one ddo**, **one dir** inside inputs, and **one data file** named `0`.
@@ -0,0 +1,88 @@
1
+ A Python package to get details from OceanProtocol jobs
2
+
3
+ ---
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install oceanprotocol-job-details
9
+ ```
10
+
11
+ ```bash
12
+ uv add oceanprotocol-job-details
13
+ ```
14
+
15
+ ## Usage
16
+
17
+ As a simple library, we only need to import `load_job_details` and run it. It will:
18
+
19
+ 1. Fetch the needed parameters to populate the `JobDetails` instance from the environment variables or use the passed values to the function.
20
+ 1. Look for the files corresponding to the passed DIDs in the filesystem according to the [Ocean Protocol Structure](#oceanprotocol-structure) and load them into the `JobDetails` instance.
21
+
22
+ ### Minimal Example
23
+
24
+ ```python
25
+ from oceanprotocol_job_details import load_job_details
26
+
27
+ class InputParameters(BaseModel): ...
28
+
29
+ job_details = load_job_details({}, InputParameters)
30
+ ```
31
+
32
+ ### Custom Input Parameters
33
+
34
+ If our algorithm has custom input parameters and we want to load them into our algorithm, we can do it as follows:
35
+
36
+ ```python
37
+ from pydantic import BaseModel
38
+ from oceanprotocol_job_details import load_job_details
39
+
40
+
41
+ class Foo(BaseModel):
42
+ bar: str
43
+
44
+
45
+ class InputParameters(BaseModel):
46
+ # Allows for nested types
47
+ foo: Foo
48
+
49
+
50
+ job_details = load_job_details({}, InputParameters)
51
+
52
+ # Usage
53
+ job_details.input_parameters.foo
54
+ job_details.input_parameters.foo.bar
55
+ ```
56
+
57
+ The values to fill the custom `InputParameters` will be parsed from the `algoCustomData.json` located next to the input data directories.
58
+
59
+ ### Iterating Input Files the clean way
60
+
61
+ ```python
62
+ from oceanprotocol_job_details import load_job_details
63
+
64
+
65
+ job_details = load_job_details
66
+
67
+ for idx, file_path in job_details.inputs():
68
+ ...
69
+
70
+ _, file_path = next(job_details.inputs())
71
+ ```
72
+
73
+ ## OceanProtocol Structure
74
+
75
+ ```bash
76
+ data # Root /data directory
77
+ ├── ddos # Contains the loaded dataset's DDO
78
+ │ ├── 17feb...e42 # DDO file
79
+ │ └── ... # One DDO per loaded dataset
80
+ ├── inputs # Datasets dir
81
+ │ ├── 17feb...e42 # Dir holding the data of its name DID, contains files named 0..X
82
+ │ │ └── 0 # Data file
83
+ │ └── algoCustomData.json # Custom algorithm input data
84
+ ├── logs # Algorithm output logs dir
85
+ └── outputs # Algorithm output files dir
86
+ ```
87
+
88
+ > **_Note:_** Even though it's possible that the algorithm is passed multiple datasets, right now the implementation only allows to use **one dataset** per algorithm execution, so **normally** the executing job will only have **one ddo**, **one dir** inside inputs, and **one data file** named `0`.
@@ -0,0 +1,4 @@
1
+ from .helpers import create_container, load_job_details
2
+ from .ocean import JobDetails
3
+
4
+ __all__ = [JobDetails, load_job_details, create_container] # type: ignore
@@ -0,0 +1,39 @@
1
+ from typing import Generic, TypeVar
2
+ from dependency_injector import containers, providers
3
+ from pydantic import BaseModel
4
+
5
+ from oceanprotocol_job_details.loaders.impl.ddo import DDOLoader
6
+ from oceanprotocol_job_details.loaders.impl.files import FilesLoader
7
+ from oceanprotocol_job_details.loaders.impl.job_details import JobDetailsLoader
8
+ from oceanprotocol_job_details.domain import Paths
9
+
10
+
11
+ InputParametersT = TypeVar("InputParametersT", bound=BaseModel)
12
+
13
+
14
+ class Container(containers.DeclarativeContainer, Generic[InputParametersT]):
15
+ config = providers.Configuration()
16
+
17
+ paths = providers.Singleton(Paths, base_dir=config.base_dir)
18
+
19
+ file_loader = providers.Singleton(
20
+ FilesLoader,
21
+ dids=config.dids,
22
+ transformation_did=config.transformation_did,
23
+ paths=paths,
24
+ logger=config.logger,
25
+ )
26
+
27
+ files = providers.Factory(lambda loader: loader.load(), loader=file_loader)
28
+ ddo_loader = providers.Factory(DDOLoader, files=files)
29
+ ddos = providers.Factory(lambda loader: loader.load(), loader=ddo_loader)
30
+
31
+ job_details_loader: providers.Factory[JobDetailsLoader[InputParametersT]] = (
32
+ providers.Factory(
33
+ JobDetailsLoader,
34
+ files=files,
35
+ secret=config.secret,
36
+ paths=paths,
37
+ ddos=ddos,
38
+ )
39
+ )
@@ -0,0 +1,187 @@
1
+ # mypy: disable-error-code=explicit-any
2
+ from dataclasses import InitVar, dataclass, field
3
+ from pathlib import Path
4
+ from typing import Generator, List, Optional, Sequence, TypeAlias, TypeVar
5
+
6
+ from pydantic import BaseModel, ConfigDict, Field, JsonValue
7
+
8
+
9
+ class Credential(BaseModel):
10
+ type: str
11
+ values: list[str]
12
+
13
+
14
+ class Credentials(BaseModel):
15
+ allow: list[Credential]
16
+ deny: list[Credential]
17
+
18
+
19
+ class DockerContainer(BaseModel):
20
+ image: str
21
+ tag: str
22
+ entrypoint: str
23
+
24
+
25
+ class Algorithm(BaseModel):
26
+ container: DockerContainer
27
+ language: str
28
+ version: str
29
+ consumerParameters: JsonValue
30
+
31
+
32
+ class Metadata(BaseModel):
33
+ description: str
34
+ name: str
35
+ type: str
36
+ author: str
37
+ license: str
38
+ algorithm: Optional[Algorithm] = None
39
+ tags: Optional[list[str]] = None
40
+ created: Optional[str] = None
41
+ updated: Optional[str] = None
42
+ copyrightHolder: Optional[str] = None
43
+ links: Optional[list[str]] = None
44
+ contentLanguage: Optional[str] = None
45
+ categories: Optional[list[str]] = None
46
+
47
+
48
+ class ConsumerParameters(BaseModel):
49
+ name: str
50
+ type: str
51
+ label: str
52
+ required: bool
53
+ description: str
54
+ default: str
55
+ option: Optional[list[str]] = None
56
+
57
+
58
+ class Service(BaseModel):
59
+ id: str
60
+ type: str
61
+ timeout: int
62
+ files: str
63
+ datatokenAddress: str
64
+ serviceEndpoint: str
65
+ additionalInformation: Optional[str] = None
66
+ name: Optional[str] = None
67
+ description: Optional[str] = None
68
+
69
+
70
+ class Event(BaseModel):
71
+ tx: str
72
+ block: int
73
+ from_: str = Field(alias="from")
74
+ contract: str
75
+ datetime: str
76
+
77
+ model_config = ConfigDict(populate_by_name=True)
78
+
79
+
80
+ class NFT(BaseModel):
81
+ address: str
82
+ name: str
83
+ symbol: str
84
+ state: int
85
+ tokenURI: str
86
+ owner: str
87
+ created: str
88
+
89
+
90
+ class DataToken(BaseModel):
91
+ address: str
92
+ name: str
93
+ symbol: str
94
+ serviceId: str
95
+
96
+
97
+ class Price(BaseModel):
98
+ value: int
99
+
100
+
101
+ class Stats(BaseModel):
102
+ allocated: int
103
+ orders: int
104
+ price: Price
105
+
106
+
107
+ class Purgatory(BaseModel):
108
+ state: bool
109
+
110
+
111
+ class DDO(BaseModel):
112
+ id: str
113
+ context: list[str] = Field(alias="@context")
114
+ nftAddress: str
115
+ chainId: int
116
+ version: str
117
+ metadata: Metadata
118
+ services: list[Service]
119
+ credentials: Credentials
120
+ event: Event
121
+ nft: NFT
122
+ datatokens: list[DataToken]
123
+ stats: Stats
124
+ purgatory: Purgatory
125
+
126
+ model_config = ConfigDict(populate_by_name=True)
127
+
128
+
129
+ @dataclass(frozen=True)
130
+ class DIDPaths:
131
+ did: str
132
+ ddo: Path = field(repr=False)
133
+
134
+ files: InitVar[Generator[Path, None, None]]
135
+
136
+ _input: List[Path] = field(init=False, repr=False)
137
+
138
+ def __post_init__(self, files: Generator[Path, None, None]) -> None:
139
+ assert self.ddo.exists(), f"DDO {self.ddo} does not exist"
140
+
141
+ object.__setattr__(self, "_input", list(files))
142
+
143
+ @property
144
+ def input_files(self) -> List[Path]:
145
+ return self._input
146
+
147
+ def __len__(self) -> int:
148
+ return len(self._input)
149
+
150
+
151
+ Files: TypeAlias = Sequence[DIDPaths]
152
+
153
+
154
+ @dataclass(frozen=True)
155
+ class Paths:
156
+ """Configuration class for the Ocean Protocol Job Details"""
157
+
158
+ base_dir: InitVar[Path | None] = None
159
+
160
+ _base: Path = field(init=False, repr=False)
161
+
162
+ def __post_init__(self, base_dir: Path | None) -> None:
163
+ object.__setattr__(self, "_base", base_dir if base_dir else Path("/data"))
164
+
165
+ @property
166
+ def data(self) -> Path:
167
+ return self._base
168
+
169
+ @property
170
+ def inputs(self) -> Path:
171
+ return self.data / "inputs"
172
+
173
+ @property
174
+ def ddos(self) -> Path:
175
+ return self.data / "ddos"
176
+
177
+ @property
178
+ def outputs(self) -> Path:
179
+ return self.data / "outputs"
180
+
181
+ @property
182
+ def logs(self) -> Path:
183
+ return self.data / "logs"
184
+
185
+ @property
186
+ def algorithm_custom_parameters(self) -> Path:
187
+ return self.inputs / "algoCustomData.json"
@@ -0,0 +1,29 @@
1
+ from typing import Any, Dict, Type, TypeVar
2
+
3
+ from pydantic import BaseModel, JsonValue
4
+
5
+ from oceanprotocol_job_details.di import Container
6
+ from oceanprotocol_job_details.ocean import JobDetails
7
+ from oceanprotocol_job_details.settings import JobSettings
8
+
9
+ InputParametersT = TypeVar("InputParametersT", bound=BaseModel)
10
+
11
+
12
+ def create_container(config: Dict[str, Any]) -> Container[InputParametersT]: # type: ignore[explicit-any]
13
+ """Return a fully configured Container from a config dict."""
14
+ container = Container[InputParametersT]()
15
+ settings = JobSettings(**config)
16
+ container.config.from_pydantic(settings)
17
+ return container
18
+
19
+
20
+ def load_job_details(
21
+ config: Dict[str, JsonValue],
22
+ input_type: Type[InputParametersT],
23
+ ) -> JobDetails[InputParametersT]:
24
+ """
25
+ Load JobDetails for a given input_type using the config.
26
+ Returns a fully initialized JobDetails instance.
27
+ """
28
+ container: Container[InputParametersT] = create_container(config)
29
+ return container.job_details_loader(input_type=input_type).load()
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import InitVar, dataclass, field
4
+ from pathlib import Path
5
+ from typing import final
6
+
7
+ from oceanprotocol_job_details.domain import DDO, Files
8
+
9
+
10
+ @final
11
+ @dataclass(frozen=True)
12
+ class DDOLoader:
13
+ files: InitVar[Files]
14
+ """The files to load the DDOs from"""
15
+
16
+ _ddo_paths: list[Path] = field(init=False)
17
+
18
+ def __post_init__(self, files: Files) -> None:
19
+ assert files is not None and len(files) != 0, "Missing files"
20
+
21
+ object.__setattr__(self, "_ddo_paths", [f.ddo for f in files])
22
+
23
+ def load(self) -> list[DDO]:
24
+ return [DDO.model_validate_json(p.read_text()) for p in self._ddo_paths]
@@ -0,0 +1,48 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import InitVar, dataclass, field
4
+ from logging import Logger
5
+ from pathlib import Path
6
+ from typing import Literal, final
7
+
8
+ from oceanprotocol_job_details.domain import DIDPaths, Files, Paths
9
+
10
+
11
+ @final
12
+ @dataclass(frozen=True)
13
+ class FilesLoader:
14
+ paths: Paths
15
+ """Path configurations of the project"""
16
+
17
+ logger: Logger = field(repr=False)
18
+ """Logger to use"""
19
+
20
+ dids: list[str]
21
+ """Input DIDs"""
22
+
23
+ transformation_did: InitVar[str | None] = None
24
+ """DID for the transformation algorithm"""
25
+
26
+ _transformation_did: str = field(init=False)
27
+
28
+ def __post_init__(self, transformation_did: str | None) -> None:
29
+ object.__setattr__(self, "_transformation_did", transformation_did)
30
+
31
+ assert self.dids, "Missing input DIDs"
32
+
33
+ def calculate_path(self, did: str, path_type: Literal["input", "ddo"]) -> Path:
34
+ match path_type:
35
+ case "ddo":
36
+ return self.paths.ddos / did
37
+ case "input":
38
+ return self.paths.inputs / did
39
+
40
+ def load(self) -> Files:
41
+ return [
42
+ DIDPaths(
43
+ did=did,
44
+ ddo=self.calculate_path(did, "ddo"),
45
+ files=self.calculate_path(did, "input").iterdir(),
46
+ )
47
+ for did in self.dids
48
+ ]
@@ -0,0 +1,28 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Generic, Type, TypeVar, final
3
+
4
+ from pydantic import BaseModel
5
+
6
+ from oceanprotocol_job_details.domain import DDO, Files, Paths
7
+ from oceanprotocol_job_details.ocean import JobDetails
8
+
9
+ T = TypeVar("T", bound=BaseModel)
10
+
11
+
12
+ @final
13
+ @dataclass(frozen=True)
14
+ class JobDetailsLoader(Generic[T]):
15
+ input_type: Type[T] = field(repr=False)
16
+ files: Files
17
+ secret: str
18
+ paths: Paths
19
+ ddos: list[DDO]
20
+
21
+ def load(self) -> JobDetails[T]:
22
+ return JobDetails[T](
23
+ files=self.files,
24
+ secret=self.secret,
25
+ ddos=self.ddos,
26
+ paths=self.paths,
27
+ input_type=self.input_type,
28
+ )
@@ -4,10 +4,6 @@ T = TypeVar("T", covariant=True)
4
4
 
5
5
 
6
6
  class Loader(Protocol[T]):
7
-
8
7
  def load(self) -> T:
9
8
  """Load an instance of the given type"""
10
9
  ...
11
-
12
-
13
- del T
@@ -0,0 +1,51 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ from functools import cached_property
5
+ from pathlib import Path
6
+ from typing import Generator, Generic, Tuple, Type, TypeVar, final
7
+
8
+ import aiofiles
9
+ from pydantic import BaseModel, ConfigDict, Secret, model_validator
10
+
11
+ from oceanprotocol_job_details.domain import DDO, Files, Paths
12
+
13
+ InputParemetersT = TypeVar("InputParemetersT", bound=BaseModel)
14
+
15
+
16
+ @final
17
+ class JobDetails(BaseModel, Generic[InputParemetersT]): # type: ignore[explicit-any]
18
+ files: Files
19
+ ddos: list[DDO]
20
+ paths: Paths
21
+ input_type: Type[InputParemetersT]
22
+ secret: Secret[str] | None = None
23
+
24
+ model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
25
+
26
+ @model_validator(mode="after")
27
+ def validate_type(self) -> JobDetails[InputParemetersT]:
28
+ assert issubclass(self.input_type, BaseModel), (
29
+ f"{self.input_type} must be subtype of pydantic.BaseModel"
30
+ )
31
+ return self
32
+
33
+ def inputs(self) -> Generator[Tuple[int, Path], None, None]:
34
+ yield from (
35
+ (idx, file)
36
+ for idx, files in enumerate(self.files)
37
+ for file in files.input_files
38
+ )
39
+
40
+ @cached_property
41
+ def input_parameters(self) -> InputParemetersT:
42
+ return asyncio.run(self.ainput_parameters())
43
+
44
+ async def ainput_parameters(self) -> InputParemetersT:
45
+ path = self.paths.algorithm_custom_parameters
46
+ async with aiofiles.open(path) as f:
47
+ raw = await f.read()
48
+
49
+ raw = raw.strip()
50
+ assert raw is not None, f"Empty file {path}"
51
+ return self.input_type.model_validate_json(raw)
@@ -0,0 +1,31 @@
1
+ # mypy: disable-error-code=call-overload
2
+ from logging import Logger, getLogger
3
+ from pathlib import Path
4
+
5
+ import orjson
6
+ from pydantic import Field, field_validator
7
+ from pydantic_settings import BaseSettings, SettingsConfigDict
8
+
9
+
10
+ class JobSettings(BaseSettings): # type: ignore[explicit-any]
11
+ base_dir: Path = Field(alias="BASE_DIR")
12
+ dids: list[str] = Field(alias="DIDS")
13
+ transformation_did: str = Field(alias="TRANSFORMATION_DID")
14
+ secret: str | None = Field(default=None, alias="SECRET")
15
+ logger: Logger = Field(default_factory=lambda: getLogger(__name__))
16
+
17
+ model_config = SettingsConfigDict(
18
+ extra="forbid",
19
+ validate_default=True,
20
+ populate_by_name=True,
21
+ arbitrary_types_allowed=True,
22
+ )
23
+
24
+ @field_validator("dids", mode="before")
25
+ @classmethod
26
+ def split_dids(cls, v: list[str] | str) -> list[str]:
27
+ if isinstance(v, str):
28
+ data = orjson.loads(v)
29
+ assert isinstance(data, list)
30
+ return data
31
+ return v