oceanprotocol-job-details 0.2.8__tar.gz → 0.3.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/PKG-INFO +4 -2
  2. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/__init__.py +4 -0
  3. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/di.py +39 -0
  4. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/domain.py +187 -0
  5. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/loaders/impl/ddo.py +24 -0
  6. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/loaders/impl/files.py +48 -0
  7. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/loaders/impl/job_details.py +28 -0
  8. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/oceanprotocol_job_details/loaders/loader.py +0 -4
  9. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/ocean.py +51 -0
  10. oceanprotocol_job_details-0.3.1/oceanprotocol_job_details/settings.py +31 -0
  11. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/pyproject.toml +7 -4
  12. oceanprotocol_job_details-0.2.8/oceanprotocol_job_details/__init__.py +0 -4
  13. oceanprotocol_job_details-0.2.8/oceanprotocol_job_details/di.py +0 -48
  14. oceanprotocol_job_details-0.2.8/oceanprotocol_job_details/loaders/impl/ddo.py +0 -32
  15. oceanprotocol_job_details-0.2.8/oceanprotocol_job_details/loaders/impl/files.py +0 -64
  16. oceanprotocol_job_details-0.2.8/oceanprotocol_job_details/loaders/impl/job_details.py +0 -35
  17. oceanprotocol_job_details-0.2.8/oceanprotocol_job_details/ocean.py +0 -313
  18. oceanprotocol_job_details-0.2.8/oceanprotocol_job_details/paths.py +0 -38
  19. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/.gitignore +0 -0
  20. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/LICENSE +0 -0
  21. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/README.md +0 -0
  22. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/oceanprotocol_job_details/loaders/__init__.py +0 -0
  23. {oceanprotocol_job_details-0.2.8 → oceanprotocol_job_details-0.3.1}/oceanprotocol_job_details/loaders/impl/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: oceanprotocol-job-details
3
- Version: 0.2.8
3
+ Version: 0.3.1
4
4
  Summary: A Python package to get details from OceanProtocol jobs
5
5
  Project-URL: Homepage, https://github.com/AgrospAI/oceanprotocol-job-details
6
6
  Project-URL: Issues, https://github.com/AgrospAI/oceanprotocol-job-details/issues
@@ -17,9 +17,11 @@ Classifier: License :: OSI Approved :: MIT License
17
17
  Classifier: Operating System :: OS Independent
18
18
  Classifier: Programming Language :: Python :: 3
19
19
  Requires-Python: >=3.10
20
- Requires-Dist: dataclasses-json>=0.6.7
20
+ Requires-Dist: aiofiles>=25.1.0
21
21
  Requires-Dist: dependency-injector>=4.48.2
22
22
  Requires-Dist: orjson>=3.11.3
23
+ Requires-Dist: pydantic-settings>=2.12.0
24
+ Requires-Dist: pydantic>=2.12.5
23
25
  Description-Content-Type: text/markdown
24
26
 
25
27
  A Python package to get details from OceanProtocol jobs
@@ -0,0 +1,4 @@
1
+ from .ocean import JobDetails
2
+
3
+
4
+ __all__ = [JobDetails] # type: ignore
@@ -0,0 +1,39 @@
1
+ from typing import Generic, TypeVar
2
+ from dependency_injector import containers, providers
3
+ from pydantic import BaseModel
4
+
5
+ from oceanprotocol_job_details.loaders.impl.ddo import DDOLoader
6
+ from oceanprotocol_job_details.loaders.impl.files import FilesLoader
7
+ from oceanprotocol_job_details.loaders.impl.job_details import JobDetailsLoader
8
+ from oceanprotocol_job_details.domain import Paths
9
+
10
+
11
+ InputParametersT = TypeVar("InputParametersT", bound=BaseModel)
12
+
13
+
14
+ class Container(containers.DeclarativeContainer, Generic[InputParametersT]):
15
+ config = providers.Configuration()
16
+
17
+ paths = providers.Singleton(Paths, base_dir=config.base_dir)
18
+
19
+ file_loader = providers.Singleton(
20
+ FilesLoader,
21
+ dids=config.dids,
22
+ transformation_did=config.transformation_did,
23
+ paths=paths,
24
+ logger=config.logger,
25
+ )
26
+
27
+ files = providers.Factory(lambda loader: loader.load(), loader=file_loader)
28
+ ddo_loader = providers.Factory(DDOLoader, files=files)
29
+ ddos = providers.Factory(lambda loader: loader.load(), loader=ddo_loader)
30
+
31
+ job_details_loader: providers.Factory[JobDetailsLoader[InputParametersT]] = (
32
+ providers.Factory(
33
+ JobDetailsLoader,
34
+ files=files,
35
+ secret=config.secret,
36
+ paths=paths,
37
+ ddos=ddos,
38
+ )
39
+ )
@@ -0,0 +1,187 @@
1
+ # mypy: disable-error-code=explicit-any
2
+ from dataclasses import InitVar, dataclass, field
3
+ from pathlib import Path
4
+ from typing import Generator, List, Optional, Sequence, TypeAlias, TypeVar
5
+
6
+ from pydantic import BaseModel, ConfigDict, Field, JsonValue
7
+
8
+
9
+ class Credential(BaseModel):
10
+ type: str
11
+ values: list[str]
12
+
13
+
14
+ class Credentials(BaseModel):
15
+ allow: list[Credential]
16
+ deny: list[Credential]
17
+
18
+
19
+ class DockerContainer(BaseModel):
20
+ image: str
21
+ tag: str
22
+ entrypoint: str
23
+
24
+
25
+ class Algorithm(BaseModel):
26
+ container: DockerContainer
27
+ language: str
28
+ version: str
29
+ consumerParameters: JsonValue
30
+
31
+
32
+ class Metadata(BaseModel):
33
+ description: str
34
+ name: str
35
+ type: str
36
+ author: str
37
+ license: str
38
+ algorithm: Optional[Algorithm] = None
39
+ tags: Optional[list[str]] = None
40
+ created: Optional[str] = None
41
+ updated: Optional[str] = None
42
+ copyrightHolder: Optional[str] = None
43
+ links: Optional[list[str]] = None
44
+ contentLanguage: Optional[str] = None
45
+ categories: Optional[list[str]] = None
46
+
47
+
48
+ class ConsumerParameters(BaseModel):
49
+ name: str
50
+ type: str
51
+ label: str
52
+ required: bool
53
+ description: str
54
+ default: str
55
+ option: Optional[list[str]] = None
56
+
57
+
58
+ class Service(BaseModel):
59
+ id: str
60
+ type: str
61
+ timeout: int
62
+ files: str
63
+ datatokenAddress: str
64
+ serviceEndpoint: str
65
+ additionalInformation: Optional[str] = None
66
+ name: Optional[str] = None
67
+ description: Optional[str] = None
68
+
69
+
70
+ class Event(BaseModel):
71
+ tx: str
72
+ block: int
73
+ from_: str = Field(alias="from")
74
+ contract: str
75
+ datetime: str
76
+
77
+ model_config = ConfigDict(populate_by_name=True)
78
+
79
+
80
+ class NFT(BaseModel):
81
+ address: str
82
+ name: str
83
+ symbol: str
84
+ state: int
85
+ tokenURI: str
86
+ owner: str
87
+ created: str
88
+
89
+
90
+ class DataToken(BaseModel):
91
+ address: str
92
+ name: str
93
+ symbol: str
94
+ serviceId: str
95
+
96
+
97
+ class Price(BaseModel):
98
+ value: int
99
+
100
+
101
+ class Stats(BaseModel):
102
+ allocated: int
103
+ orders: int
104
+ price: Price
105
+
106
+
107
+ class Purgatory(BaseModel):
108
+ state: bool
109
+
110
+
111
+ class DDO(BaseModel):
112
+ id: str
113
+ context: list[str] = Field(alias="@context")
114
+ nftAddress: str
115
+ chainId: int
116
+ version: str
117
+ metadata: Metadata
118
+ services: list[Service]
119
+ credentials: Credentials
120
+ event: Event
121
+ nft: NFT
122
+ datatokens: list[DataToken]
123
+ stats: Stats
124
+ purgatory: Purgatory
125
+
126
+ model_config = ConfigDict(populate_by_name=True)
127
+
128
+
129
+ @dataclass(frozen=True)
130
+ class DIDPaths:
131
+ did: str
132
+ ddo: Path = field(repr=False)
133
+
134
+ files: InitVar[Generator[Path, None, None]]
135
+
136
+ _input: List[Path] = field(init=False, repr=False)
137
+
138
+ def __post_init__(self, files: Generator[Path, None, None]) -> None:
139
+ assert self.ddo.exists(), f"DDO {self.ddo} does not exist"
140
+
141
+ object.__setattr__(self, "_input", list(files))
142
+
143
+ @property
144
+ def input_files(self) -> List[Path]:
145
+ return self._input
146
+
147
+ def __len__(self) -> int:
148
+ return len(self._input)
149
+
150
+
151
+ Files: TypeAlias = Sequence[DIDPaths]
152
+
153
+
154
+ @dataclass(frozen=True)
155
+ class Paths:
156
+ """Configuration class for the Ocean Protocol Job Details"""
157
+
158
+ base_dir: InitVar[Path | None] = None
159
+
160
+ _base: Path = field(init=False, repr=False)
161
+
162
+ def __post_init__(self, base_dir: Path | None) -> None:
163
+ object.__setattr__(self, "_base", base_dir if base_dir else Path("/data"))
164
+
165
+ @property
166
+ def data(self) -> Path:
167
+ return self._base
168
+
169
+ @property
170
+ def inputs(self) -> Path:
171
+ return self.data / "inputs"
172
+
173
+ @property
174
+ def ddos(self) -> Path:
175
+ return self.data / "ddos"
176
+
177
+ @property
178
+ def outputs(self) -> Path:
179
+ return self.data / "outputs"
180
+
181
+ @property
182
+ def logs(self) -> Path:
183
+ return self.data / "logs"
184
+
185
+ @property
186
+ def algorithm_custom_parameters(self) -> Path:
187
+ return self.inputs / "algoCustomData.json"
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import InitVar, dataclass, field
4
+ from pathlib import Path
5
+ from typing import final
6
+
7
+ from oceanprotocol_job_details.domain import DDO, Files
8
+
9
+
10
+ @final
11
+ @dataclass(frozen=True)
12
+ class DDOLoader:
13
+ files: InitVar[Files]
14
+ """The files to load the DDOs from"""
15
+
16
+ _ddo_paths: list[Path] = field(init=False)
17
+
18
+ def __post_init__(self, files: Files) -> None:
19
+ assert files is not None and len(files) != 0, "Missing files"
20
+
21
+ object.__setattr__(self, "_ddo_paths", [f.ddo for f in files])
22
+
23
+ def load(self) -> list[DDO]:
24
+ return [DDO.model_validate_json(p.read_text()) for p in self._ddo_paths]
@@ -0,0 +1,48 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import InitVar, dataclass, field
4
+ from logging import Logger
5
+ from pathlib import Path
6
+ from typing import Literal, final
7
+
8
+ from oceanprotocol_job_details.domain import DIDPaths, Files, Paths
9
+
10
+
11
+ @final
12
+ @dataclass(frozen=True)
13
+ class FilesLoader:
14
+ paths: Paths
15
+ """Path configurations of the project"""
16
+
17
+ logger: Logger = field(repr=False)
18
+ """Logger to use"""
19
+
20
+ dids: list[str]
21
+ """Input DIDs"""
22
+
23
+ transformation_did: InitVar[str | None] = None
24
+ """DID for the transformation algorithm"""
25
+
26
+ _transformation_did: str = field(init=False)
27
+
28
+ def __post_init__(self, transformation_did: str | None) -> None:
29
+ object.__setattr__(self, "_transformation_did", transformation_did)
30
+
31
+ assert self.dids, "Missing input DIDs"
32
+
33
+ def calculate_path(self, did: str, path_type: Literal["input", "ddo"]) -> Path:
34
+ match path_type:
35
+ case "ddo":
36
+ return self.paths.ddos / did
37
+ case "input":
38
+ return self.paths.inputs / did
39
+
40
+ def load(self) -> Files:
41
+ return [
42
+ DIDPaths(
43
+ did=did,
44
+ ddo=self.calculate_path(did, "ddo"),
45
+ files=self.calculate_path(did, "input").iterdir(),
46
+ )
47
+ for did in self.dids
48
+ ]
@@ -0,0 +1,28 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Generic, Type, TypeVar, final
3
+
4
+ from pydantic import BaseModel
5
+
6
+ from oceanprotocol_job_details.domain import DDO, Files, Paths
7
+ from oceanprotocol_job_details.ocean import JobDetails
8
+
9
+ T = TypeVar("T", bound=BaseModel)
10
+
11
+
12
+ @final
13
+ @dataclass(frozen=True)
14
+ class JobDetailsLoader(Generic[T]):
15
+ input_type: Type[T] = field(repr=False)
16
+ files: Files
17
+ secret: str
18
+ paths: Paths
19
+ ddos: list[DDO]
20
+
21
+ def load(self) -> JobDetails[T]:
22
+ return JobDetails[T](
23
+ files=self.files,
24
+ secret=self.secret,
25
+ ddos=self.ddos,
26
+ paths=self.paths,
27
+ input_type=self.input_type,
28
+ )
@@ -4,10 +4,6 @@ T = TypeVar("T", covariant=True)
4
4
 
5
5
 
6
6
  class Loader(Protocol[T]):
7
-
8
7
  def load(self) -> T:
9
8
  """Load an instance of the given type"""
10
9
  ...
11
-
12
-
13
- del T
@@ -0,0 +1,51 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ from functools import cached_property
5
+ from pathlib import Path
6
+ from typing import Generator, Generic, Tuple, Type, TypeVar, final
7
+
8
+ import aiofiles
9
+ from pydantic import BaseModel, ConfigDict, Secret, model_validator
10
+
11
+ from oceanprotocol_job_details.domain import DDO, Files, Paths
12
+
13
+ InputParemetersT = TypeVar("InputParemetersT", bound=BaseModel)
14
+
15
+
16
+ @final
17
+ class JobDetails(BaseModel, Generic[InputParemetersT]): # type: ignore[explicit-any]
18
+ files: Files
19
+ ddos: list[DDO]
20
+ paths: Paths
21
+ input_type: Type[InputParemetersT]
22
+ secret: Secret[str] | None = None
23
+
24
+ model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
25
+
26
+ @model_validator(mode="after")
27
+ def validate_type(self) -> JobDetails[InputParemetersT]:
28
+ assert issubclass(self.input_type, BaseModel), (
29
+ f"{self.input_type} must be subtype of pydantic.BaseModel"
30
+ )
31
+ return self
32
+
33
+ def inputs(self) -> Generator[Tuple[int, Path], None, None]:
34
+ yield from (
35
+ (idx, file)
36
+ for idx, files in enumerate(self.files)
37
+ for file in files.input_files
38
+ )
39
+
40
+ @cached_property
41
+ def input_parameters(self) -> InputParemetersT:
42
+ return asyncio.run(self.ainput_parameters())
43
+
44
+ async def ainput_parameters(self) -> InputParemetersT:
45
+ path = self.paths.algorithm_custom_parameters
46
+ async with aiofiles.open(path) as f:
47
+ raw = await f.read()
48
+
49
+ raw = raw.strip()
50
+ assert raw is not None, f"Empty file {path}"
51
+ return self.input_type.model_validate_json(raw)
@@ -0,0 +1,31 @@
1
+ # mypy: disable-error-code=call-overload
2
+ from logging import Logger, getLogger
3
+ from pathlib import Path
4
+
5
+ import orjson
6
+ from pydantic import Field, field_validator
7
+ from pydantic_settings import BaseSettings, SettingsConfigDict
8
+
9
+
10
+ class JobSettings(BaseSettings): # type: ignore[explicit-any]
11
+ base_dir: Path = Field(alias="BASE_DIR")
12
+ dids: list[str] = Field(alias="DIDS")
13
+ transformation_did: str = Field(alias="TRANSFORMATION_DID")
14
+ secret: str | None = Field(default=None, alias="SECRET")
15
+ logger: Logger = Field(default_factory=lambda: getLogger(__name__))
16
+
17
+ model_config = SettingsConfigDict(
18
+ extra="forbid",
19
+ validate_default=True,
20
+ populate_by_name=True,
21
+ arbitrary_types_allowed=True,
22
+ )
23
+
24
+ @field_validator("dids", mode="before")
25
+ @classmethod
26
+ def split_dids(cls, v: list[str] | str) -> list[str]:
27
+ if isinstance(v, str):
28
+ data = orjson.loads(v)
29
+ assert isinstance(data, list)
30
+ return data
31
+ return v
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "oceanprotocol-job-details"
3
- version = "0.2.8"
3
+ version = "0.3.1"
4
4
  description = "A Python package to get details from OceanProtocol jobs"
5
5
  authors = [
6
6
  { name = "Agrospai", email = "agrospai@udl.cat" },
@@ -15,9 +15,11 @@ classifiers = [
15
15
  "License :: OSI Approved :: MIT License",
16
16
  ]
17
17
  dependencies = [
18
- "dataclasses-json>=0.6.7",
18
+ "aiofiles>=25.1.0",
19
19
  "dependency-injector>=4.48.2",
20
20
  "orjson>=3.11.3",
21
+ "pydantic>=2.12.5",
22
+ "pydantic-settings>=2.12.0",
21
23
  ]
22
24
 
23
25
  [project.urls]
@@ -25,7 +27,7 @@ Homepage = "https://github.com/AgrospAI/oceanprotocol-job-details"
25
27
  Issues = "https://github.com/AgrospAI/oceanprotocol-job-details/issues"
26
28
 
27
29
  [tool.pytest.ini_options]
28
- log_level = "DEBUG"
30
+ log_level = "INFO"
29
31
  pythonpath = "oceanprotocol_job_details"
30
32
  log_cli = true
31
33
 
@@ -40,9 +42,10 @@ include = ["oceanprotocol_job_details"]
40
42
  include = ["oceanprotocol_job_details"]
41
43
 
42
44
  [tool.mypy]
45
+ plugins = ["pydantic.mypy"]
43
46
  strict = true
44
47
  warn_return_any = true
45
48
  disallow_any_explicit = true
46
49
 
47
50
  [dependency-groups]
48
- dev = ["mypy>=1.15.0", "pytest>=8.3.4"]
51
+ dev = ["mypy>=1.15.0", "pytest>=8.3.4", "types-aiofiles>=25.1.0.20251011"]
@@ -1,4 +0,0 @@
1
- from .ocean import JobDetails
2
-
3
-
4
- __all__ = [JobDetails]
@@ -1,48 +0,0 @@
1
- from dependency_injector import containers, providers
2
-
3
- from oceanprotocol_job_details.loaders.impl.ddo import DDOLoader
4
- from oceanprotocol_job_details.loaders.impl.files import FilesLoader
5
- from oceanprotocol_job_details.loaders.impl.job_details import JobDetailsLoader
6
- from oceanprotocol_job_details.paths import Paths
7
-
8
-
9
- class Container(containers.DeclarativeContainer):
10
-
11
- config = providers.Configuration()
12
-
13
- paths = providers.Singleton(
14
- Paths,
15
- base_dir=config.base_dir,
16
- )
17
-
18
- file_loader = providers.Singleton(
19
- FilesLoader,
20
- dids=config.dids,
21
- transformation_did=config.transformation_did,
22
- paths=paths,
23
- logger=config.logger,
24
- )
25
-
26
- files = providers.Factory(
27
- lambda loader: loader.load(),
28
- loader=file_loader,
29
- )
30
-
31
- # DDOLoader depends on Files loaded from FilesLoader
32
- ddo_loader = providers.Factory(
33
- DDOLoader,
34
- files=files,
35
- )
36
-
37
- ddos = providers.Factory(
38
- lambda loader: loader.load(),
39
- loader=ddo_loader,
40
- )
41
-
42
- job_details_loader = providers.Factory(
43
- JobDetailsLoader,
44
- files=files,
45
- secret=config.secret,
46
- paths=paths,
47
- ddos=ddos,
48
- )
@@ -1,32 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import InitVar, dataclass, field
4
- from pathlib import Path
5
- from typing import TYPE_CHECKING, final
6
-
7
- if TYPE_CHECKING:
8
- from oceanprotocol_job_details.ocean import DDO, Files
9
-
10
-
11
- @final
12
- @dataclass(frozen=True)
13
- class DDOLoader:
14
-
15
- files: InitVar[list[Files]]
16
- """The files to load the DDOs from"""
17
-
18
- _ddo_paths: list[Path] = field(init=False)
19
-
20
- def __post_init__(self, files: list[Files]) -> None:
21
- assert files, "Missing files"
22
-
23
- object.__setattr__(self, "_ddo_paths", [f.ddo for f in files])
24
-
25
- def load(self) -> list[DDO]:
26
- from oceanprotocol_job_details.ocean import DDO
27
-
28
- ddos = []
29
- for path in self._ddo_paths:
30
- with open(path, "r") as f:
31
- ddos.append(DDO.from_json(f.read()))
32
- return ddos
@@ -1,64 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import json
4
- from dataclasses import InitVar, dataclass, field
5
- from logging import Logger
6
- from typing import TYPE_CHECKING, final
7
-
8
- from oceanprotocol_job_details.paths import Paths
9
-
10
- if TYPE_CHECKING:
11
- from oceanprotocol_job_details.ocean import DIDPaths, Files
12
-
13
-
14
- @final
15
- @dataclass(frozen=True)
16
- class FilesLoader:
17
-
18
- dids: InitVar[str | None]
19
- """Input DIDs"""
20
-
21
- transformation_did: InitVar[str | None]
22
- """DID for the transformation algorithm"""
23
-
24
- paths: Paths
25
- """Path configurations of the project"""
26
-
27
- logger: Logger
28
- """Logger to use"""
29
-
30
- _dids: str = field(init=False)
31
- _transformation_did: str = field(init=False)
32
-
33
- def __post_init__(
34
- self,
35
- dids: str | None,
36
- transformation_did: str | None,
37
- ) -> None:
38
- def _load_dids(dids, logger):
39
- if dids:
40
- return json.loads(dids)
41
-
42
- logger.info("Missing DIDS, Inferring DIDS from input DDOs")
43
- return [f.parts[-1] for f in self.paths.ddos.iterdir()]
44
-
45
- object.__setattr__(self, "_transformation_did", transformation_did)
46
- object.__setattr__(self, "_dids", _load_dids(dids, self.logger))
47
-
48
- assert self._dids, "Missing input DIDs"
49
-
50
- def load(self) -> Files:
51
- from oceanprotocol_job_details.ocean import DIDPaths, Files
52
-
53
- files: list[DIDPaths] = []
54
- for did in self._dids:
55
- base = self.paths.inputs / did
56
- files.append(
57
- DIDPaths(
58
- did=did,
59
- ddo=self.paths.ddos / did,
60
- input_files=list(base.iterdir()),
61
- )
62
- )
63
-
64
- return Files(files)
@@ -1,35 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import dataclass, field
4
- from typing import TYPE_CHECKING, Generic, Type, TypeVar, final
5
-
6
- from oceanprotocol_job_details.paths import Paths
7
-
8
- if TYPE_CHECKING:
9
- from oceanprotocol_job_details.ocean import DDO, Files, JobDetails
10
-
11
-
12
- T = TypeVar("T")
13
-
14
-
15
- @final
16
- @dataclass(frozen=True)
17
- class JobDetailsLoader(Generic[T]):
18
-
19
- _type: Type[T] = field(repr=False)
20
-
21
- files: Files
22
- secret: str
23
- paths: Paths
24
- ddos: list[DDO]
25
-
26
- def load(self) -> JobDetails[T]:
27
- from oceanprotocol_job_details.ocean import JobDetails
28
-
29
- return JobDetails(
30
- files=self.files,
31
- secret=self.secret,
32
- ddos=self.ddos,
33
- paths=self.paths,
34
- _type=self._type,
35
- )
@@ -1,313 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import os
4
- from dataclasses import dataclass, field
5
- from functools import cached_property
6
- from logging import Logger, getLogger
7
- from pathlib import Path
8
- from typing import (
9
- Any,
10
- Generator,
11
- Generic,
12
- Iterator,
13
- Optional,
14
- Sequence,
15
- Type,
16
- TypeVar,
17
- final,
18
- )
19
-
20
- import orjson
21
- from dataclasses_json import config as dc_config
22
- from dataclasses_json import dataclass_json
23
-
24
- from oceanprotocol_job_details.di import Container
25
- from oceanprotocol_job_details.paths import Paths
26
-
27
- InputParemetersT = TypeVar("InputParemetersT")
28
-
29
-
30
- @dataclass_json
31
- @dataclass
32
- class Credential:
33
- type: str
34
- values: list[str]
35
-
36
-
37
- @dataclass_json
38
- @dataclass
39
- class Credentials:
40
- allow: list[Credential]
41
- deny: list[Credential]
42
-
43
-
44
- @dataclass_json
45
- @dataclass
46
- class DockerContainer:
47
- image: str
48
- tag: str
49
- entrypoint: str
50
-
51
-
52
- @dataclass_json
53
- @dataclass
54
- class Algorithm: # type: ignore
55
- container: DockerContainer
56
- language: str
57
- version: str
58
- consumerParameters: Any # type: ignore
59
-
60
-
61
- @dataclass_json
62
- @dataclass
63
- class Metadata:
64
- description: str
65
- name: str
66
- type: str
67
- author: str
68
- license: str
69
- algorithm: Optional[Algorithm] = None
70
- tags: Optional[list[str]] = None
71
- created: Optional[str] = None
72
- updated: Optional[str] = None
73
- copyrightHolder: Optional[str] = None
74
- links: Optional[list[str]] = None
75
- contentLanguage: Optional[str] = None
76
- categories: Optional[list[str]] = None
77
-
78
-
79
- @dataclass_json
80
- @dataclass
81
- class ConsumerParameters:
82
- name: str
83
- type: str
84
- label: str
85
- required: bool
86
- description: str
87
- default: str
88
- option: Optional[list[str]] = None
89
-
90
-
91
- @dataclass_json
92
- @dataclass
93
- class Service:
94
- id: str
95
- type: str
96
- timeout: int
97
- files: str
98
- datatokenAddress: str
99
- serviceEndpoint: str
100
- additionalInformation: Optional[str] = None
101
- name: Optional[str] = None
102
- description: Optional[str] = None
103
-
104
-
105
- @dataclass_json
106
- @dataclass
107
- class Event:
108
- tx: str
109
- block: int
110
- from_: str = field(metadata=dc_config(field_name="from"))
111
- contract: str
112
- datetime: str
113
-
114
-
115
- @dataclass_json
116
- @dataclass
117
- class NFT:
118
- address: str
119
- name: str
120
- symbol: str
121
- state: int
122
- tokenURI: str
123
- owner: str
124
- created: str
125
-
126
-
127
- @dataclass_json
128
- @dataclass
129
- class DataToken:
130
- address: str
131
- name: str
132
- symbol: str
133
- serviceId: str
134
-
135
-
136
- @dataclass_json
137
- @dataclass
138
- class Price:
139
- value: int
140
-
141
-
142
- @dataclass_json
143
- @dataclass
144
- class Stats:
145
- allocated: int
146
- orders: int
147
- price: Price
148
-
149
-
150
- @dataclass_json
151
- @dataclass
152
- class Purgatory:
153
- state: bool
154
-
155
-
156
- @dataclass_json
157
- @dataclass
158
- class DDO:
159
- id: str
160
- context: list[str] = field(metadata=dc_config(field_name="@context"))
161
- nftAddress: str
162
- chainId: int
163
- version: str
164
- metadata: Metadata
165
- services: list[Service]
166
- credentials: Credentials
167
- event: Event
168
- nft: NFT
169
- datatokens: list[DataToken]
170
- stats: Stats
171
- purgatory: Purgatory
172
-
173
-
174
- @dataclass(frozen=True)
175
- class DIDPaths:
176
- did: str
177
- ddo: Path
178
- input_files: Sequence[Path]
179
-
180
- def __post_init__(self) -> None:
181
- assert self.ddo.exists(), f"DDO {self.ddo} does not exist"
182
- for input_file in self.input_files:
183
- assert input_file.exists(), f"File {input_file} does not exist"
184
-
185
- def __len__(self) -> int:
186
- return len(self.input_files)
187
-
188
-
189
- @dataclass(frozen=True)
190
- class Files:
191
- _files: Sequence[DIDPaths]
192
-
193
- @property
194
- def files(self) -> Sequence[DIDPaths]:
195
- return self._files
196
-
197
- def __getitem__(self, index: int) -> DIDPaths:
198
- return self.files[index]
199
-
200
- def __iter__(self) -> Iterator[DIDPaths]:
201
- return iter(self.files)
202
-
203
- def __len__(self) -> int:
204
- return len(self.files)
205
-
206
-
207
- def _normalize_json(value):
208
- if isinstance(value, str):
209
- try:
210
- decoded = orjson.loads(value)
211
- return _normalize_json(decoded) # recurse if nested again
212
- except orjson.JSONDecodeError:
213
- return value
214
- elif isinstance(value, dict):
215
- return {k: _normalize_json(v) for k, v in value.items()}
216
- elif isinstance(value, list):
217
- return [_normalize_json(v) for v in value]
218
- return value
219
-
220
-
221
- @final
222
- @dataclass_json
223
- @dataclass
224
- class _EmptyJobDetails: ...
225
-
226
-
227
- @final
228
- @dataclass_json
229
- @dataclass(frozen=True)
230
- class JobDetails(Generic[InputParemetersT]):
231
- files: Files
232
- """The input filepaths"""
233
-
234
- ddos: list[DDO]
235
- """list of paths to the DDOs"""
236
-
237
- paths: Paths
238
- """Configuration paths"""
239
-
240
- # Store the type explicitly to avoid issues
241
- _type: Type[InputParemetersT] = field(repr=False)
242
-
243
- secret: str | None = None
244
- """Shh it's a secret"""
245
-
246
- def __post_init__(self) -> None:
247
- if not hasattr(self._type, "__dataclass_fields__"):
248
- raise TypeError(f"{self._type} is not a dataclass type")
249
-
250
- def next_path(self) -> Generator[tuple[int, Path], None, None]:
251
- for idx, did_files in enumerate(self.files):
252
- for file in did_files.input_files:
253
- yield (idx, file)
254
-
255
- @cached_property
256
- def input_parameters(self) -> InputParemetersT:
257
- """Read the input parameters and return them in an instance of the dataclass InputParemetersT"""
258
-
259
- with open(self.paths.algorithm_custom_parameters, "r") as f:
260
- raw = f.read().strip()
261
- if not raw:
262
- raise ValueError(
263
- f"Custom parameters file {self.paths.algorithm_custom_parameters} is empty"
264
- )
265
- try:
266
- parsed = _normalize_json(orjson.loads(raw))
267
- return dataclass_json(self._type).from_dict(parsed) # type: ignore
268
- except Exception as e:
269
- raise ValueError(
270
- f"Failed to parse input paramers into {self._type.__name__}: {e}\n"
271
- f"Raw content: {raw}"
272
- ) from e
273
-
274
- @classmethod
275
- def load(
276
- cls,
277
- _type: Type[InputParemetersT] | None = None,
278
- *,
279
- base_dir: str | None = None,
280
- dids: str | None = None,
281
- transformation_did: str | None = None,
282
- secret: str | None = None,
283
- logger: Logger | None = None,
284
- ) -> JobDetails[InputParemetersT]:
285
- """Load a JobDetails instance that holds the runtime details.
286
-
287
- Loading it will check the following:
288
- 1. That the needed environment variables are set.
289
- 1. That the ocean protocol contains the needed data based on the passed environment variables.
290
-
291
- Those needed environment variables are:
292
- - BASE_DIR: Base directory to read the data from, parent of the ddos, inputs, outputs and logs directories.
293
- - DIDS: The DIDs of the inputs
294
- - TRANSFORMATION_DID: The DID of the transformation algorithm
295
- - SECRET (optional): A really secret secret
296
- """
297
-
298
- if _type is None:
299
- _type = _EmptyJobDetails
300
-
301
- container = Container()
302
- container.config.from_dict(
303
- {
304
- "base_dir": base_dir or os.environ.get("BASE_DIR", None),
305
- "dids": dids or os.environ.get("DIDS", None),
306
- "transformation_did": transformation_did
307
- or os.environ.get("TRANSFORMATION_DID", None),
308
- "secret": secret or os.environ.get("SECRET", None),
309
- "logger": logger or getLogger(__name__),
310
- }
311
- )
312
-
313
- return container.job_details_loader(_type=_type).load()
@@ -1,38 +0,0 @@
1
- from dataclasses import InitVar, dataclass, field
2
- from pathlib import Path
3
-
4
-
5
- @dataclass
6
- class Paths:
7
- """Configuration class for the Ocean Protocol Job Details"""
8
-
9
- base_dir: InitVar[Path | None]
10
-
11
- _base: Path = field(init=False)
12
-
13
- def __post_init__(self, base_dir: str | Path | None) -> None:
14
- self._base = Path(base_dir) if base_dir else Path("/data")
15
-
16
- @property
17
- def data(self) -> Path:
18
- return self._base
19
-
20
- @property
21
- def inputs(self) -> Path:
22
- return self.data / "inputs"
23
-
24
- @property
25
- def ddos(self) -> Path:
26
- return self.data / "ddos"
27
-
28
- @property
29
- def outputs(self) -> Path:
30
- return self.data / "outputs"
31
-
32
- @property
33
- def logs(self) -> Path:
34
- return self.data / "logs"
35
-
36
- @property
37
- def algorithm_custom_parameters(self) -> Path:
38
- return self.inputs / "algoCustomData.json"