oceanprotocol-job-details 0.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oceanprotocol_job_details/__init__.py +4 -0
- oceanprotocol_job_details/di.py +40 -0
- oceanprotocol_job_details/domain.py +187 -0
- oceanprotocol_job_details/helpers.py +29 -0
- oceanprotocol_job_details/loaders/__init__.py +0 -0
- oceanprotocol_job_details/loaders/impl/__init__.py +0 -0
- oceanprotocol_job_details/loaders/impl/ddo.py +24 -0
- oceanprotocol_job_details/loaders/impl/files.py +44 -0
- oceanprotocol_job_details/loaders/impl/job_details.py +29 -0
- oceanprotocol_job_details/loaders/loader.py +9 -0
- oceanprotocol_job_details/ocean.py +48 -0
- oceanprotocol_job_details/py.types +0 -0
- oceanprotocol_job_details/settings.py +37 -0
- oceanprotocol_job_details-0.3.7.dist-info/METADATA +112 -0
- oceanprotocol_job_details-0.3.7.dist-info/RECORD +17 -0
- oceanprotocol_job_details-0.3.7.dist-info/WHEEL +4 -0
- oceanprotocol_job_details-0.3.7.dist-info/licenses/LICENSE +7 -0
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from types import NoneType
|
|
2
|
+
from typing import Generic, TypeVar
|
|
3
|
+
from dependency_injector import containers, providers
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
from oceanprotocol_job_details.loaders.impl.ddo import DDOLoader
|
|
7
|
+
from oceanprotocol_job_details.loaders.impl.files import FilesLoader
|
|
8
|
+
from oceanprotocol_job_details.loaders.impl.job_details import JobDetailsLoader
|
|
9
|
+
from oceanprotocol_job_details.domain import Paths
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
InputParametersT = TypeVar("InputParametersT", BaseModel, None)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Container(containers.DeclarativeContainer, Generic[InputParametersT]):
|
|
16
|
+
config = providers.Configuration()
|
|
17
|
+
|
|
18
|
+
paths = providers.Singleton(Paths, base_dir=config.base_dir)
|
|
19
|
+
|
|
20
|
+
file_loader = providers.Singleton(
|
|
21
|
+
FilesLoader,
|
|
22
|
+
dids=config.dids,
|
|
23
|
+
transformation_did=config.transformation_did,
|
|
24
|
+
paths=paths,
|
|
25
|
+
logger=config.logger,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
files = providers.Factory(lambda loader: loader.load(), loader=file_loader)
|
|
29
|
+
ddo_loader = providers.Factory(DDOLoader, files=files)
|
|
30
|
+
ddos = providers.Factory(lambda loader: loader.load(), loader=ddo_loader)
|
|
31
|
+
|
|
32
|
+
job_details_loader: providers.Factory[JobDetailsLoader[InputParametersT]] = (
|
|
33
|
+
providers.Factory(
|
|
34
|
+
JobDetailsLoader,
|
|
35
|
+
files=files,
|
|
36
|
+
secret=config.secret,
|
|
37
|
+
paths=paths,
|
|
38
|
+
ddos=ddos,
|
|
39
|
+
)
|
|
40
|
+
)
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
# mypy: disable-error-code=explicit-any
|
|
2
|
+
from dataclasses import InitVar, dataclass, field
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Generator, List, Optional, Sequence, TypeAlias, TypeVar
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, ConfigDict, Field, JsonValue
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Credential(BaseModel):
|
|
10
|
+
type: str
|
|
11
|
+
values: list[str]
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Credentials(BaseModel):
|
|
15
|
+
allow: list[Credential]
|
|
16
|
+
deny: list[Credential]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DockerContainer(BaseModel):
|
|
20
|
+
image: str
|
|
21
|
+
tag: str
|
|
22
|
+
entrypoint: str
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Algorithm(BaseModel):
|
|
26
|
+
container: DockerContainer
|
|
27
|
+
language: str
|
|
28
|
+
version: str
|
|
29
|
+
consumerParameters: JsonValue
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class Metadata(BaseModel):
|
|
33
|
+
description: str
|
|
34
|
+
name: str
|
|
35
|
+
type: str
|
|
36
|
+
author: str
|
|
37
|
+
license: str
|
|
38
|
+
algorithm: Optional[Algorithm] = None
|
|
39
|
+
tags: Optional[list[str]] = None
|
|
40
|
+
created: Optional[str] = None
|
|
41
|
+
updated: Optional[str] = None
|
|
42
|
+
copyrightHolder: Optional[str] = None
|
|
43
|
+
links: Optional[list[str]] = None
|
|
44
|
+
contentLanguage: Optional[str] = None
|
|
45
|
+
categories: Optional[list[str]] = None
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class ConsumerParameters(BaseModel):
|
|
49
|
+
name: str
|
|
50
|
+
type: str
|
|
51
|
+
label: str
|
|
52
|
+
required: bool
|
|
53
|
+
description: str
|
|
54
|
+
default: str
|
|
55
|
+
option: Optional[list[str]] = None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class Service(BaseModel):
|
|
59
|
+
id: str
|
|
60
|
+
type: str
|
|
61
|
+
timeout: int
|
|
62
|
+
files: str
|
|
63
|
+
datatokenAddress: str
|
|
64
|
+
serviceEndpoint: str
|
|
65
|
+
additionalInformation: Optional[str] = None
|
|
66
|
+
name: Optional[str] = None
|
|
67
|
+
description: Optional[str] = None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class Event(BaseModel):
|
|
71
|
+
tx: str
|
|
72
|
+
block: int
|
|
73
|
+
from_: str = Field(alias="from")
|
|
74
|
+
contract: str
|
|
75
|
+
datetime: str
|
|
76
|
+
|
|
77
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class NFT(BaseModel):
|
|
81
|
+
address: str
|
|
82
|
+
name: str
|
|
83
|
+
symbol: str
|
|
84
|
+
state: int
|
|
85
|
+
tokenURI: str
|
|
86
|
+
owner: str
|
|
87
|
+
created: str
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class DataToken(BaseModel):
|
|
91
|
+
address: str
|
|
92
|
+
name: str
|
|
93
|
+
symbol: str
|
|
94
|
+
serviceId: str
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class Price(BaseModel):
|
|
98
|
+
value: int
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class Stats(BaseModel):
|
|
102
|
+
allocated: int
|
|
103
|
+
orders: int
|
|
104
|
+
price: Price
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class Purgatory(BaseModel):
|
|
108
|
+
state: bool
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class DDO(BaseModel):
|
|
112
|
+
id: str
|
|
113
|
+
context: list[str] = Field(alias="@context")
|
|
114
|
+
nftAddress: str
|
|
115
|
+
chainId: int
|
|
116
|
+
version: str
|
|
117
|
+
metadata: Metadata
|
|
118
|
+
services: list[Service]
|
|
119
|
+
credentials: Credentials
|
|
120
|
+
event: Event
|
|
121
|
+
nft: NFT
|
|
122
|
+
datatokens: list[DataToken]
|
|
123
|
+
stats: Stats
|
|
124
|
+
purgatory: Purgatory
|
|
125
|
+
|
|
126
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@dataclass(frozen=True)
|
|
130
|
+
class DIDPaths:
|
|
131
|
+
did: str
|
|
132
|
+
ddo: Path = field(repr=False)
|
|
133
|
+
|
|
134
|
+
files: InitVar[Generator[Path, None, None]]
|
|
135
|
+
|
|
136
|
+
_input: List[Path] = field(init=False, repr=False)
|
|
137
|
+
|
|
138
|
+
def __post_init__(self, files: Generator[Path, None, None]) -> None:
|
|
139
|
+
assert self.ddo.exists(), f"DDO {self.ddo} does not exist"
|
|
140
|
+
|
|
141
|
+
object.__setattr__(self, "_input", list(files))
|
|
142
|
+
|
|
143
|
+
@property
|
|
144
|
+
def input_files(self) -> List[Path]:
|
|
145
|
+
return self._input
|
|
146
|
+
|
|
147
|
+
def __len__(self) -> int:
|
|
148
|
+
return len(self._input)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
Files: TypeAlias = Sequence[DIDPaths]
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
@dataclass(frozen=True)
|
|
155
|
+
class Paths:
|
|
156
|
+
"""Configuration class for the Ocean Protocol Job Details"""
|
|
157
|
+
|
|
158
|
+
base_dir: InitVar[Path | None] = None
|
|
159
|
+
|
|
160
|
+
_base: Path = field(init=False, repr=False)
|
|
161
|
+
|
|
162
|
+
def __post_init__(self, base_dir: Path | None) -> None:
|
|
163
|
+
object.__setattr__(self, "_base", base_dir if base_dir else Path("/data"))
|
|
164
|
+
|
|
165
|
+
@property
|
|
166
|
+
def data(self) -> Path:
|
|
167
|
+
return self._base
|
|
168
|
+
|
|
169
|
+
@property
|
|
170
|
+
def inputs(self) -> Path:
|
|
171
|
+
return self.data / "inputs"
|
|
172
|
+
|
|
173
|
+
@property
|
|
174
|
+
def ddos(self) -> Path:
|
|
175
|
+
return self.data / "ddos"
|
|
176
|
+
|
|
177
|
+
@property
|
|
178
|
+
def outputs(self) -> Path:
|
|
179
|
+
return self.data / "outputs"
|
|
180
|
+
|
|
181
|
+
@property
|
|
182
|
+
def logs(self) -> Path:
|
|
183
|
+
return self.data / "logs"
|
|
184
|
+
|
|
185
|
+
@property
|
|
186
|
+
def algorithm_custom_parameters(self) -> Path:
|
|
187
|
+
return self.inputs / "algoCustomData.json"
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from typing import Any, Dict, Type, TypeVar
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, JsonValue
|
|
4
|
+
|
|
5
|
+
from oceanprotocol_job_details.di import Container
|
|
6
|
+
from oceanprotocol_job_details.ocean import JobDetails
|
|
7
|
+
from oceanprotocol_job_details.settings import JobSettings
|
|
8
|
+
|
|
9
|
+
InputParametersT = TypeVar("InputParametersT", BaseModel, None)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def create_container(config: Dict[str, Any]) -> Container[InputParametersT]: # type: ignore[explicit-any]
|
|
13
|
+
"""Return a fully configured Container from a config dict."""
|
|
14
|
+
container = Container[InputParametersT]()
|
|
15
|
+
settings = JobSettings(**config)
|
|
16
|
+
container.config.from_pydantic(settings)
|
|
17
|
+
return container
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def load_job_details(
|
|
21
|
+
config: Dict[str, JsonValue] = {},
|
|
22
|
+
input_type: Type[InputParametersT] | None = None,
|
|
23
|
+
) -> JobDetails[InputParametersT]:
|
|
24
|
+
"""
|
|
25
|
+
Load JobDetails for a given input_type using the config.
|
|
26
|
+
Returns a fully initialized JobDetails instance.
|
|
27
|
+
"""
|
|
28
|
+
container: Container[InputParametersT] = create_container(config)
|
|
29
|
+
return container.job_details_loader(input_type=input_type).load()
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import InitVar, dataclass, field
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import final
|
|
6
|
+
|
|
7
|
+
from oceanprotocol_job_details.domain import DDO, Files
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@final
|
|
11
|
+
@dataclass(frozen=True)
|
|
12
|
+
class DDOLoader:
|
|
13
|
+
files: InitVar[Files]
|
|
14
|
+
"""The files to load the DDOs from"""
|
|
15
|
+
|
|
16
|
+
_ddo_paths: list[Path] = field(init=False)
|
|
17
|
+
|
|
18
|
+
def __post_init__(self, files: Files) -> None:
|
|
19
|
+
assert files is not None and len(files) != 0, "Missing files"
|
|
20
|
+
|
|
21
|
+
object.__setattr__(self, "_ddo_paths", [f.ddo for f in files])
|
|
22
|
+
|
|
23
|
+
def load(self) -> list[DDO]:
|
|
24
|
+
return [DDO.model_validate_json(p.read_text()) for p in self._ddo_paths]
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import InitVar, dataclass, field
|
|
4
|
+
from logging import Logger
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Literal, final
|
|
7
|
+
|
|
8
|
+
from oceanprotocol_job_details.domain import DIDPaths, Files, Paths
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@final
|
|
12
|
+
@dataclass(frozen=True)
|
|
13
|
+
class FilesLoader:
|
|
14
|
+
paths: Paths
|
|
15
|
+
"""Path configurations of the project"""
|
|
16
|
+
|
|
17
|
+
logger: Logger = field(repr=False)
|
|
18
|
+
"""Logger to use"""
|
|
19
|
+
|
|
20
|
+
dids: list[str]
|
|
21
|
+
"""Input DIDs"""
|
|
22
|
+
|
|
23
|
+
transformation_did: str
|
|
24
|
+
"""DID for the transformation algorithm"""
|
|
25
|
+
|
|
26
|
+
def __post_init__(self) -> None:
|
|
27
|
+
assert self.dids, "Missing input DIDs"
|
|
28
|
+
|
|
29
|
+
def calculate_path(self, did: str, path_type: Literal["input", "ddo"]) -> Path:
|
|
30
|
+
match path_type:
|
|
31
|
+
case "ddo":
|
|
32
|
+
return self.paths.ddos / did
|
|
33
|
+
case "input":
|
|
34
|
+
return self.paths.inputs / did
|
|
35
|
+
|
|
36
|
+
def load(self) -> Files:
|
|
37
|
+
return [
|
|
38
|
+
DIDPaths(
|
|
39
|
+
did=did,
|
|
40
|
+
ddo=self.calculate_path(did, "ddo"),
|
|
41
|
+
files=self.calculate_path(did, "input").iterdir(),
|
|
42
|
+
)
|
|
43
|
+
for did in self.dids
|
|
44
|
+
]
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from types import NoneType
|
|
3
|
+
from typing import Generic, Type, TypeVar, final
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
from oceanprotocol_job_details.domain import DDO, Files, Paths
|
|
8
|
+
from oceanprotocol_job_details.ocean import JobDetails
|
|
9
|
+
|
|
10
|
+
T = TypeVar("T", BaseModel, None)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@final
|
|
14
|
+
@dataclass(frozen=True)
|
|
15
|
+
class JobDetailsLoader(Generic[T]):
|
|
16
|
+
input_type: Type[T] = field(repr=False)
|
|
17
|
+
files: Files
|
|
18
|
+
secret: str
|
|
19
|
+
paths: Paths
|
|
20
|
+
ddos: list[DDO]
|
|
21
|
+
|
|
22
|
+
def load(self) -> JobDetails[T]:
|
|
23
|
+
return JobDetails[T](
|
|
24
|
+
files=self.files,
|
|
25
|
+
secret=self.secret,
|
|
26
|
+
ddos=self.ddos,
|
|
27
|
+
paths=self.paths,
|
|
28
|
+
input_type=self.input_type,
|
|
29
|
+
)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from functools import cached_property
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from types import NoneType
|
|
7
|
+
from typing import Generator, Generic, Tuple, Type, TypeVar, final
|
|
8
|
+
|
|
9
|
+
import aiofiles
|
|
10
|
+
from pydantic import BaseModel, ConfigDict, Secret
|
|
11
|
+
|
|
12
|
+
from oceanprotocol_job_details.domain import DDO, Files, Paths
|
|
13
|
+
|
|
14
|
+
InputParametersT = TypeVar("InputParametersT", BaseModel, None)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@final
|
|
18
|
+
class JobDetails(BaseModel, Generic[InputParametersT]): # type: ignore[explicit-any]
|
|
19
|
+
files: Files
|
|
20
|
+
ddos: list[DDO]
|
|
21
|
+
paths: Paths
|
|
22
|
+
input_type: Type[InputParametersT] | None
|
|
23
|
+
secret: Secret[str] | None = None
|
|
24
|
+
|
|
25
|
+
model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
|
|
26
|
+
|
|
27
|
+
def inputs(self) -> Generator[Tuple[int, Path], None, None]:
|
|
28
|
+
yield from (
|
|
29
|
+
(idx, file)
|
|
30
|
+
for idx, files in enumerate(self.files)
|
|
31
|
+
for file in files.input_files
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
@cached_property
|
|
35
|
+
def input_parameters(self) -> InputParametersT | None:
|
|
36
|
+
return asyncio.run(self.ainput_parameters())
|
|
37
|
+
|
|
38
|
+
async def ainput_parameters(self) -> InputParametersT | None:
|
|
39
|
+
if self.input_type is None:
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
path = self.paths.algorithm_custom_parameters
|
|
43
|
+
async with aiofiles.open(path) as f:
|
|
44
|
+
raw = await f.read()
|
|
45
|
+
|
|
46
|
+
raw = raw.strip()
|
|
47
|
+
assert raw is not None, f"Empty file {path}"
|
|
48
|
+
return self.input_type.model_validate_json(raw) # type: ignore
|
|
File without changes
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# mypy: disable-error-code=call-overload
|
|
2
|
+
from logging import Logger, getLogger
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Self
|
|
5
|
+
|
|
6
|
+
import orjson
|
|
7
|
+
from pydantic import Field, field_validator, model_validator
|
|
8
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class JobSettings(BaseSettings): # type: ignore[explicit-any]
|
|
12
|
+
base_dir: Path = Field(alias="BASE_DIR")
|
|
13
|
+
dids: list[str] = Field(default_factory=list, alias="DIDS")
|
|
14
|
+
transformation_did: str = Field(alias="TRANSFORMATION_DID")
|
|
15
|
+
secret: str | None = Field(default=None, alias="SECRET")
|
|
16
|
+
logger: Logger = Field(default_factory=lambda: getLogger(__name__))
|
|
17
|
+
|
|
18
|
+
model_config = SettingsConfigDict(
|
|
19
|
+
extra="forbid",
|
|
20
|
+
populate_by_name=True,
|
|
21
|
+
arbitrary_types_allowed=True,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
@field_validator("dids", mode="before")
|
|
25
|
+
@classmethod
|
|
26
|
+
def split_dids(cls, v: list[str] | str) -> list[str]:
|
|
27
|
+
if isinstance(v, str):
|
|
28
|
+
data = orjson.loads(v)
|
|
29
|
+
assert isinstance(data, list)
|
|
30
|
+
return data
|
|
31
|
+
return v
|
|
32
|
+
|
|
33
|
+
@model_validator(mode="after")
|
|
34
|
+
def validate_dids(self) -> Self:
|
|
35
|
+
if not self.dids:
|
|
36
|
+
self.dids.extend([f.name for f in (self.base_dir / "ddos").glob("*")])
|
|
37
|
+
return self
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: oceanprotocol-job-details
|
|
3
|
+
Version: 0.3.7
|
|
4
|
+
Summary: A Python package to get details from OceanProtocol jobs
|
|
5
|
+
Project-URL: Homepage, https://github.com/AgrospAI/oceanprotocol-job-details
|
|
6
|
+
Project-URL: Issues, https://github.com/AgrospAI/oceanprotocol-job-details/issues
|
|
7
|
+
Author-email: Agrospai <agrospai@udl.cat>, Christian López García <christian.lopez@udl.cat>
|
|
8
|
+
License: Copyright 2025 Agrospai
|
|
9
|
+
|
|
10
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
13
|
+
|
|
14
|
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
17
|
+
Classifier: Operating System :: OS Independent
|
|
18
|
+
Classifier: Programming Language :: Python :: 3
|
|
19
|
+
Requires-Python: >=3.10
|
|
20
|
+
Requires-Dist: aiofiles>=25.1.0
|
|
21
|
+
Requires-Dist: dependency-injector>=4.48.2
|
|
22
|
+
Requires-Dist: orjson>=3.11.3
|
|
23
|
+
Requires-Dist: pydantic-settings>=2.12.0
|
|
24
|
+
Requires-Dist: pydantic>=2.12.5
|
|
25
|
+
Description-Content-Type: text/markdown
|
|
26
|
+
|
|
27
|
+
A Python package to get details from OceanProtocol jobs
|
|
28
|
+
|
|
29
|
+
---
|
|
30
|
+
|
|
31
|
+
## Installation
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pip install oceanprotocol-job-details
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
uv add oceanprotocol-job-details
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## Usage
|
|
42
|
+
|
|
43
|
+
As a simple library, we only need to import `load_job_details` and run it. It will:
|
|
44
|
+
|
|
45
|
+
1. Read from disk the needed parameters to populate the `JobDetails` from the given `base_dir`. Looking for the files corresponding to the passed DIDs in the filesystem according to the [Ocean Protocol Structure](#oceanprotocol-structure).
|
|
46
|
+
2. If given a `InputParameters` type that inherits from `pydantic.BaseModel`, it will create an instance from the environment variables.
|
|
47
|
+
|
|
48
|
+
### Minimal Example
|
|
49
|
+
|
|
50
|
+
```python
|
|
51
|
+
from oceanprotocol_job_details import load_job_details
|
|
52
|
+
|
|
53
|
+
job_details = load_job_details({"base_dir": "...", "transformation_did": "..."})
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
### Custom Input Parameters
|
|
57
|
+
|
|
58
|
+
If our algorithm has custom input parameters and we want to load them into our algorithm, we can do it as follows:
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
from pydantic import BaseModel
|
|
62
|
+
from oceanprotocol_job_details import load_job_details
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class Foo(BaseModel):
|
|
66
|
+
bar: str
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class InputParameters(BaseModel):
|
|
70
|
+
# Allows for nested types
|
|
71
|
+
foo: Foo
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
job_details = load_job_details({"base_dir": "...", "transformation_did": "..."}, InputParameters)
|
|
75
|
+
|
|
76
|
+
# Usage
|
|
77
|
+
job_details.input_parameters.foo
|
|
78
|
+
job_details.input_parameters.foo.bar
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
The values to fill the custom `InputParameters` will be parsed from the `algoCustomData.json` located next to the input data directories.
|
|
82
|
+
|
|
83
|
+
### Iterating Input Files the clean way
|
|
84
|
+
|
|
85
|
+
```python
|
|
86
|
+
from oceanprotocol_job_details import load_job_details
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
job_details = load_job_details(...)
|
|
90
|
+
|
|
91
|
+
for idx, file_path in job_details.inputs():
|
|
92
|
+
...
|
|
93
|
+
|
|
94
|
+
_, file_path = next(job_details.inputs())
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
## OceanProtocol Structure
|
|
98
|
+
|
|
99
|
+
```bash
|
|
100
|
+
data # Root /data directory
|
|
101
|
+
├── ddos # Contains the loaded dataset's DDO
|
|
102
|
+
│ ├── 17feb...e42 # DDO file
|
|
103
|
+
│ └── ... # One DDO per loaded dataset
|
|
104
|
+
├── inputs # Datasets dir
|
|
105
|
+
│ ├── 17feb...e42 # Dir holding the data of its name DID, contains files named 0..X
|
|
106
|
+
│ │ └── 0 # Data file
|
|
107
|
+
│ └── algoCustomData.json # Custom algorithm input data
|
|
108
|
+
├── logs # Algorithm output logs dir
|
|
109
|
+
└── outputs # Algorithm output files dir
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
> **_Note:_** Even though it's possible that the algorithm is passed multiple datasets, right now the implementation only allows to use **one dataset** per algorithm execution, so **normally** the executing job will only have **one ddo**, **one dir** inside inputs, and **one data file** named `0`.
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
oceanprotocol_job_details/__init__.py,sha256=nJMrZsEC5F1n9WF-v5QV095Yyc8UkhFw0AzD9o7X0IE,162
|
|
2
|
+
oceanprotocol_job_details/di.py,sha256=URBCcwla3pBKt4hWhRwG7s-Ib_KzoBk2-EtLzQOyAyM,1343
|
|
3
|
+
oceanprotocol_job_details/domain.py,sha256=2_USbeA_7VIEYS8DVb2MW6dCZasjiqIxQaGUnNUKspY,3851
|
|
4
|
+
oceanprotocol_job_details/helpers.py,sha256=ABm3oIRwPd-4XeCOIszCbfL2wkUJqVJJ2bqy3hR4jyw,1064
|
|
5
|
+
oceanprotocol_job_details/ocean.py,sha256=ocoI4OO8A5_SovXbASVucFq4W-tJr_M4C67UbS4aSF8,1473
|
|
6
|
+
oceanprotocol_job_details/py.types,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
oceanprotocol_job_details/settings.py,sha256=zgIYPzaXjsgcmuhT7L2ipSP-2eNaodugHZr0rn2Z420,1248
|
|
8
|
+
oceanprotocol_job_details/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
oceanprotocol_job_details/loaders/loader.py,sha256=36X2s_0lN89kCUpItxEXfIzuBBNJySebP2B_tdWK2E0,186
|
|
10
|
+
oceanprotocol_job_details/loaders/impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
oceanprotocol_job_details/loaders/impl/ddo.py,sha256=XthrQFhmP85XSVzVjBlLePtTowGR3BAsmVp3jngiQ08,668
|
|
12
|
+
oceanprotocol_job_details/loaders/impl/files.py,sha256=Y2vFBT2T9w9zrdpmf550-LQJxwtNPUGa0UU6bBzk9AU,1145
|
|
13
|
+
oceanprotocol_job_details/loaders/impl/job_details.py,sha256=QwlUaG9KozkI1wX66oDTPg4TjGkvSsi8O-TctF6eWvo,724
|
|
14
|
+
oceanprotocol_job_details-0.3.7.dist-info/METADATA,sha256=QfVmh66COpoZrBfxOizJdYQfKutmudNuWzKL0IqRpxk,4503
|
|
15
|
+
oceanprotocol_job_details-0.3.7.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
16
|
+
oceanprotocol_job_details-0.3.7.dist-info/licenses/LICENSE,sha256=ni3ix7P_GxK1W3VGC4fJ3o6QoCngCEpSuTJwO4nkpbw,1055
|
|
17
|
+
oceanprotocol_job_details-0.3.7.dist-info/RECORD,,
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
Copyright 2025 Agrospai
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
4
|
+
|
|
5
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
6
|
+
|
|
7
|
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|