oceanprotocol-job-details 0.0.12__tar.gz → 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {oceanprotocol_job_details-0.0.12 → oceanprotocol_job_details-0.1.1}/PKG-INFO +27 -28
- {oceanprotocol_job_details-0.0.12 → oceanprotocol_job_details-0.1.1}/README.md +25 -24
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/config.py +53 -0
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/job_details.py +47 -0
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/loaders/impl/ddo.py +26 -0
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/loaders/impl/files.py +71 -0
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/loaders/impl/job_details.py +25 -0
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/loaders/loader.py +13 -0
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/ocean.py +183 -0
- oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/utils.py +33 -0
- {oceanprotocol_job_details-0.0.12 → oceanprotocol_job_details-0.1.1}/pyproject.toml +13 -2
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/dataclasses/constants.py +0 -41
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/dataclasses/job_details.py +0 -82
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/dataclasses/ocean.py +0 -67
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/job_details.py +0 -40
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/loaders/impl/__init__.py +0 -0
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/loaders/impl/map.py +0 -120
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/loaders/impl/utils.py +0 -57
- oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/loaders/loader.py +0 -15
- {oceanprotocol_job_details-0.0.12 → oceanprotocol_job_details-0.1.1}/.gitignore +0 -0
- {oceanprotocol_job_details-0.0.12 → oceanprotocol_job_details-0.1.1}/LICENSE +0 -0
- {oceanprotocol_job_details-0.0.12 → oceanprotocol_job_details-0.1.1}/oceanprotocol_job_details/__init__.py +0 -0
- {oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/dataclasses → oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/loaders}/__init__.py +0 -0
- {oceanprotocol_job_details-0.0.12/oceanprotocol_job_details/loaders → oceanprotocol_job_details-0.1.1/oceanprotocol_job_details/loaders/impl}/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: oceanprotocol-job-details
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: A Python package to get details from OceanProtocol jobs
|
|
5
5
|
Project-URL: Homepage, https://github.com/AgrospAI/oceanprotocol-job-details
|
|
6
6
|
Project-URL: Issues, https://github.com/AgrospAI/oceanprotocol-job-details/issues
|
|
@@ -17,9 +17,7 @@ Classifier: License :: OSI Approved :: MIT License
|
|
|
17
17
|
Classifier: Operating System :: OS Independent
|
|
18
18
|
Classifier: Programming Language :: Python :: 3
|
|
19
19
|
Requires-Python: >=3.10
|
|
20
|
-
Requires-Dist:
|
|
21
|
-
Requires-Dist: pydantic>=2.10.6
|
|
22
|
-
Requires-Dist: pytest<9,>=8.3.4
|
|
20
|
+
Requires-Dist: dataclasses-json>=0.6.7
|
|
23
21
|
Description-Content-Type: text/markdown
|
|
24
22
|
|
|
25
23
|
A Python package to get details from OceanProtocol jobs
|
|
@@ -39,8 +37,30 @@ As a simple library, we only need to import the main object and use it once:
|
|
|
39
37
|
```Python
|
|
40
38
|
from oceanprotocol_job_details.job_details import OceanProtocolJobDetails
|
|
41
39
|
|
|
42
|
-
#
|
|
43
|
-
job_details = OceanProtocolJobDetails().load()
|
|
40
|
+
# Having no algorithm input parameters
|
|
41
|
+
job_details = OceanProtocolJobDetails().load() # type: ignore
|
|
42
|
+
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
If our algorithm has custom input parameters and we want to load them into our algorithm, we can do it as follows:
|
|
46
|
+
|
|
47
|
+
```Python
|
|
48
|
+
|
|
49
|
+
from dataclasses import dataclass
|
|
50
|
+
from oceanprotocol_job_details.job_details import OceanProtocolJobDetails
|
|
51
|
+
from oceanprotocol_job_details.ocean import JobDetails
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class Input:
|
|
55
|
+
name: str
|
|
56
|
+
age: int
|
|
57
|
+
|
|
58
|
+
job_details: JobDetails[Input] = OceanProtocolJobDetails(Input).load()
|
|
59
|
+
|
|
60
|
+
# Usage (is type hinted)
|
|
61
|
+
job_details.input_parameters.name
|
|
62
|
+
job_details.input_parameters.age
|
|
63
|
+
|
|
44
64
|
```
|
|
45
65
|
|
|
46
66
|
Assumes the following directory structure:
|
|
@@ -48,6 +68,7 @@ Assumes the following directory structure:
|
|
|
48
68
|
<ROOT_FOLDER>
|
|
49
69
|
└───data
|
|
50
70
|
├───ddos
|
|
71
|
+
├───transformation
|
|
51
72
|
├───inputs
|
|
52
73
|
└───logs
|
|
53
74
|
```
|
|
@@ -59,25 +80,3 @@ Given the Ocean Protocol job details structure as in [https://github.com/GX4FM-B
|
|
|
59
80
|
1. Parsing JSON
|
|
60
81
|
1. Validation
|
|
61
82
|
1. Metadata and service extraction
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
### Advanced Usage (not recommended)
|
|
65
|
-
|
|
66
|
-
If instead of the environment variables, we want to use another kind of mapping, can pass it as a parameter and it will work as long as it has the same key values (Can be implemented in a more generic way, but there is no need right now).
|
|
67
|
-
|
|
68
|
-
```Python
|
|
69
|
-
from oceanprotocol_job_details.job_details import OceanProtocolJobDetails
|
|
70
|
-
from oceanprotocol_job_details.loaders.impl.environment import Keys
|
|
71
|
-
|
|
72
|
-
# Fill in with values that will be used instead of env
|
|
73
|
-
custom_mapper = {
|
|
74
|
-
Keys.ROOT_FOLDER: " ... ", # Use when you don't want the algorithm to take '/' as base Path
|
|
75
|
-
Keys.ALGORITHM: " ... ",
|
|
76
|
-
Keys.DIDS: " ... ",
|
|
77
|
-
Keys.SECRET: " ... ",
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
job_details = OceanProtocolJobDetails(mapper=custom_mapper).load()
|
|
81
|
-
```
|
|
82
|
-
|
|
83
|
-
|
|
@@ -15,8 +15,30 @@ As a simple library, we only need to import the main object and use it once:
|
|
|
15
15
|
```Python
|
|
16
16
|
from oceanprotocol_job_details.job_details import OceanProtocolJobDetails
|
|
17
17
|
|
|
18
|
-
#
|
|
19
|
-
job_details = OceanProtocolJobDetails().load()
|
|
18
|
+
# Having no algorithm input parameters
|
|
19
|
+
job_details = OceanProtocolJobDetails().load() # type: ignore
|
|
20
|
+
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
If our algorithm has custom input parameters and we want to load them into our algorithm, we can do it as follows:
|
|
24
|
+
|
|
25
|
+
```Python
|
|
26
|
+
|
|
27
|
+
from dataclasses import dataclass
|
|
28
|
+
from oceanprotocol_job_details.job_details import OceanProtocolJobDetails
|
|
29
|
+
from oceanprotocol_job_details.ocean import JobDetails
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class Input:
|
|
33
|
+
name: str
|
|
34
|
+
age: int
|
|
35
|
+
|
|
36
|
+
job_details: JobDetails[Input] = OceanProtocolJobDetails(Input).load()
|
|
37
|
+
|
|
38
|
+
# Usage (is type hinted)
|
|
39
|
+
job_details.input_parameters.name
|
|
40
|
+
job_details.input_parameters.age
|
|
41
|
+
|
|
20
42
|
```
|
|
21
43
|
|
|
22
44
|
Assumes the following directory structure:
|
|
@@ -24,6 +46,7 @@ Assumes the following directory structure:
|
|
|
24
46
|
<ROOT_FOLDER>
|
|
25
47
|
└───data
|
|
26
48
|
├───ddos
|
|
49
|
+
├───transformation
|
|
27
50
|
├───inputs
|
|
28
51
|
└───logs
|
|
29
52
|
```
|
|
@@ -35,25 +58,3 @@ Given the Ocean Protocol job details structure as in [https://github.com/GX4FM-B
|
|
|
35
58
|
1. Parsing JSON
|
|
36
59
|
1. Validation
|
|
37
60
|
1. Metadata and service extraction
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
### Advanced Usage (not recommended)
|
|
41
|
-
|
|
42
|
-
If instead of the environment variables, we want to use another kind of mapping, can pass it as a parameter and it will work as long as it has the same key values (Can be implemented in a more generic way, but there is no need right now).
|
|
43
|
-
|
|
44
|
-
```Python
|
|
45
|
-
from oceanprotocol_job_details.job_details import OceanProtocolJobDetails
|
|
46
|
-
from oceanprotocol_job_details.loaders.impl.environment import Keys
|
|
47
|
-
|
|
48
|
-
# Fill in with values that will be used instead of env
|
|
49
|
-
custom_mapper = {
|
|
50
|
-
Keys.ROOT_FOLDER: " ... ", # Use when you don't want the algorithm to take '/' as base Path
|
|
51
|
-
Keys.ALGORITHM: " ... ",
|
|
52
|
-
Keys.DIDS: " ... ",
|
|
53
|
-
Keys.SECRET: " ... ",
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
job_details = OceanProtocolJobDetails(mapper=custom_mapper).load()
|
|
57
|
-
```
|
|
58
|
-
|
|
59
|
-
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from dataclasses import dataclass, fields
|
|
2
|
+
from logging import getLogger
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
logger = getLogger(__name__)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class Config:
|
|
10
|
+
"""Configuration class for the Ocean Protocol Job Details"""
|
|
11
|
+
|
|
12
|
+
path_data: str = "/data"
|
|
13
|
+
"""The path to the data directory"""
|
|
14
|
+
|
|
15
|
+
path_inputs: str = path_data + "/inputs"
|
|
16
|
+
"""The path to the inputs directory"""
|
|
17
|
+
|
|
18
|
+
path_ddos: str = path_data + "/ddos"
|
|
19
|
+
"""The path to the DDOs directory"""
|
|
20
|
+
|
|
21
|
+
path_outputs: str = path_data + "/outputs"
|
|
22
|
+
"""The path to the outputs directory"""
|
|
23
|
+
|
|
24
|
+
path_logs: str = path_data + "/logs"
|
|
25
|
+
"""The path to the logs directory"""
|
|
26
|
+
|
|
27
|
+
path_algorithm_custom_parameters: str = path_inputs + "/algoCustomData.json"
|
|
28
|
+
"""The path to the algorithm's custom parameters file"""
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
config = Config()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def update_config_from(base: Path) -> None:
|
|
35
|
+
"""Updates the configuration to use the new base path, ensures that the base path exists.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
base (Path): The new base path to use.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
logger.info(f"Updating config to use base path: {base}")
|
|
42
|
+
|
|
43
|
+
base.mkdir(parents=True, exist_ok=True)
|
|
44
|
+
|
|
45
|
+
for field in fields(config):
|
|
46
|
+
default_value = field.default
|
|
47
|
+
if default_value is None or not isinstance(default_value, Path):
|
|
48
|
+
raise ValueError(f"Field {field.name} has no default value")
|
|
49
|
+
|
|
50
|
+
object.__setattr__(config, field.name, str(base / default_value))
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
__all__ = ["config"]
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Generic, Type, TypeVar
|
|
4
|
+
|
|
5
|
+
from dataclasses_json import dataclass_json
|
|
6
|
+
|
|
7
|
+
from oceanprotocol_job_details.loaders.impl.job_details import JobDetailsLoader
|
|
8
|
+
from oceanprotocol_job_details.loaders.loader import Loader
|
|
9
|
+
from oceanprotocol_job_details.ocean import JobDetails
|
|
10
|
+
|
|
11
|
+
logging.basicConfig(
|
|
12
|
+
level=logging.INFO,
|
|
13
|
+
format="%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s",
|
|
14
|
+
handlers=[logging.StreamHandler()],
|
|
15
|
+
)
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass_json
|
|
20
|
+
@dataclass
|
|
21
|
+
class _EmptyJobDetails: ...
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
T = TypeVar("T")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class OceanProtocolJobDetails(Generic[T]):
|
|
28
|
+
"""The JobDetails class is a dataclass that holds the details of the current job.
|
|
29
|
+
|
|
30
|
+
Loading it will check the following:
|
|
31
|
+
1. That the needed environment variables are set
|
|
32
|
+
1. That the ocean protocol contains the needed data based on the passed environment variables
|
|
33
|
+
|
|
34
|
+
Those needed environment variables are:
|
|
35
|
+
- DIDS: The DIDs of the inputs
|
|
36
|
+
- TRANSFORMATION_DID: The DID of the transformation algorithm
|
|
37
|
+
- SECRET (optional): A really secret secret
|
|
38
|
+
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
def __init__(self, _type: Type[T] | None = None) -> None:
|
|
42
|
+
if _type is None:
|
|
43
|
+
_type = _EmptyJobDetails # type: ignore[assignment]
|
|
44
|
+
self.job_details_loader: Loader[JobDetails[T]] = JobDetailsLoader(_type) # type: ignore[arg-type]
|
|
45
|
+
|
|
46
|
+
def load(self) -> JobDetails[T]:
|
|
47
|
+
return self.job_details_loader.load()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from dataclasses import InitVar, dataclass, field
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import final
|
|
4
|
+
|
|
5
|
+
from oceanprotocol_job_details.ocean import DDO
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@final
|
|
9
|
+
@dataclass(frozen=True)
|
|
10
|
+
class DDOLoader:
|
|
11
|
+
ddo_paths: InitVar[list[Path]]
|
|
12
|
+
"""The files to load the DDOs from"""
|
|
13
|
+
|
|
14
|
+
_ddo_paths: list[Path] = field(init=False)
|
|
15
|
+
|
|
16
|
+
def __post_init__(self, ddo_paths: list[Path]) -> None:
|
|
17
|
+
assert ddo_paths, "Missing DDO paths"
|
|
18
|
+
|
|
19
|
+
object.__setattr__(self, "_ddo_paths", ddo_paths)
|
|
20
|
+
|
|
21
|
+
def load(self) -> list[DDO]:
|
|
22
|
+
ddos = []
|
|
23
|
+
for path in self._ddo_paths:
|
|
24
|
+
with open(path, "r") as f:
|
|
25
|
+
ddos.append(DDO.from_json(f.read())) # type: ignore
|
|
26
|
+
return ddos
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
from dataclasses import InitVar, dataclass, field
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Iterator, Sequence, final
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
|
|
7
|
+
from oceanprotocol_job_details.config import config
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class DIDPaths:
|
|
12
|
+
did: str
|
|
13
|
+
ddo: Path
|
|
14
|
+
input_files: Sequence[Path]
|
|
15
|
+
|
|
16
|
+
def __post_init__(self) -> None:
|
|
17
|
+
assert self.ddo.exists(), f"DDO {self.ddo} does not exist"
|
|
18
|
+
for input_file in self.input_files:
|
|
19
|
+
assert input_file.exists(), f"File {input_file} does not exist"
|
|
20
|
+
|
|
21
|
+
def __len__(self) -> int:
|
|
22
|
+
return len(self.input_files)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass(frozen=True)
|
|
26
|
+
class Files:
|
|
27
|
+
files: Sequence[DIDPaths]
|
|
28
|
+
|
|
29
|
+
def __iter__(self) -> Iterator[DIDPaths]:
|
|
30
|
+
return iter(self.files)
|
|
31
|
+
|
|
32
|
+
def __len__(self) -> int:
|
|
33
|
+
return len(self.files)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@final
|
|
37
|
+
@dataclass(frozen=True)
|
|
38
|
+
class FilesLoader:
|
|
39
|
+
dids: InitVar[str | None]
|
|
40
|
+
"""Input DIDs"""
|
|
41
|
+
|
|
42
|
+
transformation_did: InitVar[str | None]
|
|
43
|
+
"""DID for the transformation algorithm"""
|
|
44
|
+
|
|
45
|
+
_dids: Sequence[str] = field(init=False)
|
|
46
|
+
_transformation_did: str = field(init=False)
|
|
47
|
+
|
|
48
|
+
def __post_init__(
|
|
49
|
+
self,
|
|
50
|
+
dids: str | None,
|
|
51
|
+
transformation_did: str | None,
|
|
52
|
+
) -> None:
|
|
53
|
+
assert dids, "Missing DIDs"
|
|
54
|
+
assert transformation_did, "Missing transformation DID"
|
|
55
|
+
|
|
56
|
+
object.__setattr__(self, "_dids", json.loads(dids))
|
|
57
|
+
object.__setattr__(self, "_transformation_did", transformation_did)
|
|
58
|
+
|
|
59
|
+
def load(self) -> Files:
|
|
60
|
+
files: list[DIDPaths] = []
|
|
61
|
+
for did in self._dids:
|
|
62
|
+
base = Path(config.path_inputs) / did
|
|
63
|
+
files.append(
|
|
64
|
+
DIDPaths(
|
|
65
|
+
did=did,
|
|
66
|
+
ddo=Path(config.path_ddos) / did,
|
|
67
|
+
input_files=list(base.iterdir()),
|
|
68
|
+
)
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
return Files(files)
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from typing import Generic, Type, TypeVar, final
|
|
4
|
+
|
|
5
|
+
from oceanprotocol_job_details.loaders.impl.ddo import DDOLoader
|
|
6
|
+
from oceanprotocol_job_details.loaders.impl.files import FilesLoader
|
|
7
|
+
from oceanprotocol_job_details.ocean import JobDetails
|
|
8
|
+
|
|
9
|
+
T = TypeVar("T")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@final
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class JobDetailsLoader(Generic[T]):
|
|
15
|
+
_type: Type[T] = field(repr=False)
|
|
16
|
+
|
|
17
|
+
def load(self) -> JobDetails[T]:
|
|
18
|
+
dids = os.environ.get("DIDS")
|
|
19
|
+
transformation_did = os.environ.get("TRANSFORMATION_DID")
|
|
20
|
+
secret = os.environ.get("SECRET")
|
|
21
|
+
|
|
22
|
+
files = FilesLoader(dids, transformation_did).load()
|
|
23
|
+
ddos = DDOLoader([f.ddo for f in files]).load()
|
|
24
|
+
|
|
25
|
+
return JobDetails(files=files, secret=secret, ddos=ddos, _type=self._type)
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from functools import cached_property
|
|
3
|
+
from typing import Any, Generic, Optional, Type, TypeVar, final
|
|
4
|
+
|
|
5
|
+
from dataclasses_json import config as dc_config
|
|
6
|
+
from dataclasses_json import dataclass_json
|
|
7
|
+
|
|
8
|
+
from oceanprotocol_job_details.config import config
|
|
9
|
+
from oceanprotocol_job_details.loaders.impl.files import Files
|
|
10
|
+
|
|
11
|
+
T = TypeVar("T")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass_json
|
|
15
|
+
@dataclass
|
|
16
|
+
class Credential:
|
|
17
|
+
type: str
|
|
18
|
+
values: list[str]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass_json
|
|
22
|
+
@dataclass
|
|
23
|
+
class Credentials:
|
|
24
|
+
allow: list[Credential]
|
|
25
|
+
deny: list[Credential]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass_json
|
|
29
|
+
@dataclass
|
|
30
|
+
class Container:
|
|
31
|
+
image: str
|
|
32
|
+
tag: str
|
|
33
|
+
entrypoint: str
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass_json
|
|
37
|
+
@dataclass
|
|
38
|
+
class Algorithm: # type: ignore
|
|
39
|
+
container: Container
|
|
40
|
+
language: str
|
|
41
|
+
version: str
|
|
42
|
+
consumerParameters: Any # type: ignore
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass_json
|
|
46
|
+
@dataclass
|
|
47
|
+
class Metadata:
|
|
48
|
+
description: str
|
|
49
|
+
name: str
|
|
50
|
+
type: str
|
|
51
|
+
author: str
|
|
52
|
+
license: str
|
|
53
|
+
algorithm: Optional[Algorithm] = None
|
|
54
|
+
tags: Optional[list[str]] = None
|
|
55
|
+
created: Optional[str] = None
|
|
56
|
+
updated: Optional[str] = None
|
|
57
|
+
copyrightHolder: Optional[str] = None
|
|
58
|
+
links: Optional[list[str]] = None
|
|
59
|
+
contentLanguage: Optional[str] = None
|
|
60
|
+
categories: Optional[list[str]] = None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass_json
|
|
64
|
+
@dataclass
|
|
65
|
+
class ConsumerParameters:
|
|
66
|
+
name: str
|
|
67
|
+
type: str
|
|
68
|
+
label: str
|
|
69
|
+
required: bool
|
|
70
|
+
description: str
|
|
71
|
+
default: str
|
|
72
|
+
option: Optional[list[str]] = None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass_json
|
|
76
|
+
@dataclass
|
|
77
|
+
class Service:
|
|
78
|
+
id: str
|
|
79
|
+
type: str
|
|
80
|
+
timeout: int
|
|
81
|
+
files: str
|
|
82
|
+
datatokenAddress: str
|
|
83
|
+
serviceEndpoint: str
|
|
84
|
+
additionalInformation: Optional[str] = None
|
|
85
|
+
name: Optional[str] = None
|
|
86
|
+
description: Optional[str] = None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@dataclass_json
|
|
90
|
+
@dataclass
|
|
91
|
+
class Event:
|
|
92
|
+
tx: str
|
|
93
|
+
block: int
|
|
94
|
+
from_: str = field(metadata=dc_config(field_name="from"))
|
|
95
|
+
contract: str
|
|
96
|
+
datetime: str
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@dataclass_json
|
|
100
|
+
@dataclass
|
|
101
|
+
class NFT:
|
|
102
|
+
address: str
|
|
103
|
+
name: str
|
|
104
|
+
symbol: str
|
|
105
|
+
state: int
|
|
106
|
+
tokenURI: str
|
|
107
|
+
owner: str
|
|
108
|
+
created: str
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
@dataclass_json
|
|
112
|
+
@dataclass
|
|
113
|
+
class DataToken:
|
|
114
|
+
address: str
|
|
115
|
+
name: str
|
|
116
|
+
symbol: str
|
|
117
|
+
serviceId: str
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@dataclass_json
|
|
121
|
+
@dataclass
|
|
122
|
+
class Price:
|
|
123
|
+
value: int
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@dataclass_json
|
|
127
|
+
@dataclass
|
|
128
|
+
class Stats:
|
|
129
|
+
allocated: int
|
|
130
|
+
orders: int
|
|
131
|
+
price: Price
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@dataclass_json
|
|
135
|
+
@dataclass
|
|
136
|
+
class Purgatory:
|
|
137
|
+
state: bool
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
@dataclass_json
|
|
141
|
+
@dataclass
|
|
142
|
+
class DDO:
|
|
143
|
+
id: str
|
|
144
|
+
context: list[str] = field(metadata=dc_config(field_name="@context"))
|
|
145
|
+
nftAddress: str
|
|
146
|
+
chainId: int
|
|
147
|
+
version: str
|
|
148
|
+
metadata: Metadata
|
|
149
|
+
services: list[Service]
|
|
150
|
+
credentials: Credentials
|
|
151
|
+
event: Event
|
|
152
|
+
nft: NFT
|
|
153
|
+
datatokens: list[DataToken]
|
|
154
|
+
stats: Stats
|
|
155
|
+
purgatory: Purgatory
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
@final
|
|
159
|
+
@dataclass_json
|
|
160
|
+
@dataclass(frozen=True)
|
|
161
|
+
class JobDetails(Generic[T]):
|
|
162
|
+
files: Files
|
|
163
|
+
"""The input filepaths"""
|
|
164
|
+
|
|
165
|
+
ddos: list[DDO]
|
|
166
|
+
"""list of paths to the DDOs"""
|
|
167
|
+
|
|
168
|
+
# Store the type explicitly to avoid issues
|
|
169
|
+
_type: Type[T] = field(repr=False)
|
|
170
|
+
|
|
171
|
+
secret: str | None = None
|
|
172
|
+
"""Shh it's a secret"""
|
|
173
|
+
|
|
174
|
+
def __post_init__(self) -> None:
|
|
175
|
+
if not hasattr(self._type, "__dataclass_fields__"):
|
|
176
|
+
raise TypeError(f"{self._type} is not a dataclass type")
|
|
177
|
+
|
|
178
|
+
@cached_property
|
|
179
|
+
def input_parameters(self) -> T:
|
|
180
|
+
"""Read the input parameters and return them in an instance of the dataclass T"""
|
|
181
|
+
|
|
182
|
+
with open(config.path_algorithm_custom_parameters, "r") as f:
|
|
183
|
+
return dataclass_json(self._type).from_json(f.read()) # type: ignore
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from logging import getLogger
|
|
2
|
+
from typing import Mapping, Optional, TypeVar
|
|
3
|
+
|
|
4
|
+
T = TypeVar("T")
|
|
5
|
+
logger = getLogger(__name__)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def get(
|
|
9
|
+
map: Mapping[str, T],
|
|
10
|
+
key: str,
|
|
11
|
+
default: Optional[T] = None,
|
|
12
|
+
) -> T | None:
|
|
13
|
+
"""Get the value of a key from a dictionary, if not found return the default value if given, otherwise raise a KeyError
|
|
14
|
+
|
|
15
|
+
:param map: original map to get the item from
|
|
16
|
+
:type map: Mapping[str, T]
|
|
17
|
+
:param key: key to get the value from
|
|
18
|
+
:type key: str
|
|
19
|
+
:param default: default value if missing, defaults to None
|
|
20
|
+
:type default: Optional[T], optional
|
|
21
|
+
:raises KeyError: if the value is missing and no default is provided
|
|
22
|
+
:return: value of the key
|
|
23
|
+
:rtype: T
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
if key in map.keys():
|
|
27
|
+
return map.get(key)
|
|
28
|
+
|
|
29
|
+
if default is None:
|
|
30
|
+
raise KeyError(f"Key {key} not found")
|
|
31
|
+
|
|
32
|
+
logger.info(f"Key {key} not found, returning default value {default}")
|
|
33
|
+
return default
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "oceanprotocol-job-details"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.1.1"
|
|
4
4
|
description = "A Python package to get details from OceanProtocol jobs"
|
|
5
5
|
authors = [
|
|
6
6
|
{ name = "Christian López García", email = "christian.lopez@udl.cat" },
|
|
@@ -13,12 +13,15 @@ classifiers = [
|
|
|
13
13
|
"Operating System :: OS Independent",
|
|
14
14
|
"License :: OSI Approved :: MIT License",
|
|
15
15
|
]
|
|
16
|
-
dependencies = ["
|
|
16
|
+
dependencies = ["dataclasses-json>=0.6.7"]
|
|
17
17
|
|
|
18
18
|
[project.urls]
|
|
19
19
|
Homepage = "https://github.com/AgrospAI/oceanprotocol-job-details"
|
|
20
20
|
Issues = "https://github.com/AgrospAI/oceanprotocol-job-details/issues"
|
|
21
21
|
|
|
22
|
+
[tool.pytest.ini_options]
|
|
23
|
+
pythonpath = "oceanprotocol_job_details"
|
|
24
|
+
|
|
22
25
|
[build-system]
|
|
23
26
|
requires = ["hatchling"]
|
|
24
27
|
build-backend = "hatchling.build"
|
|
@@ -28,3 +31,11 @@ include = ["oceanprotocol_job_details"]
|
|
|
28
31
|
|
|
29
32
|
[tool.hatch.build.targets.wheel]
|
|
30
33
|
include = ["oceanprotocol_job_details"]
|
|
34
|
+
|
|
35
|
+
[tool.mypy]
|
|
36
|
+
strict = true
|
|
37
|
+
warn_return_any = true
|
|
38
|
+
disallow_any_explicit = true
|
|
39
|
+
|
|
40
|
+
[dependency-groups]
|
|
41
|
+
dev = ["mypy>=1.15.0", "pytest>=8.3.4"]
|
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
@dataclass(frozen=True)
|
|
6
|
-
class _DidKeys:
|
|
7
|
-
"""Common keys inside the DIDs"""
|
|
8
|
-
|
|
9
|
-
SERVICES: str = "services"
|
|
10
|
-
SERVICE_TYPE: str = "type"
|
|
11
|
-
ATTRIBUTES: str = "attributes"
|
|
12
|
-
MAIN: str = "main"
|
|
13
|
-
FILES: str = "files"
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
@dataclass(frozen=True)
|
|
17
|
-
class _ServiceType:
|
|
18
|
-
"""Service types inside the DIDs"""
|
|
19
|
-
|
|
20
|
-
METADATA: str = "metadata"
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
@dataclass()
|
|
24
|
-
class _Paths:
|
|
25
|
-
"""Common paths used in the Ocean Protocol directories"""
|
|
26
|
-
|
|
27
|
-
DATA: Path = Path("/data")
|
|
28
|
-
|
|
29
|
-
INPUTS: Path = DATA / "inputs"
|
|
30
|
-
DDOS: Path = DATA / "ddos"
|
|
31
|
-
OUTPUTS: Path = DATA / "outputs"
|
|
32
|
-
LOGS: Path = DATA / "logs"
|
|
33
|
-
|
|
34
|
-
ALGORITHM_CUSTOM_PARAMETERS: Path = INPUTS / "algoCustomData.json"
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
DidKeys = _DidKeys()
|
|
38
|
-
ServiceType = _ServiceType()
|
|
39
|
-
Paths = _Paths()
|
|
40
|
-
|
|
41
|
-
del _DidKeys, _ServiceType, _Paths
|
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import os
|
|
3
|
-
from dataclasses import InitVar, dataclass
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
from typing import Any, Mapping, Optional, Sequence
|
|
6
|
-
|
|
7
|
-
from orjson import JSONDecodeError, loads
|
|
8
|
-
|
|
9
|
-
from oceanprotocol_job_details.dataclasses.constants import Paths
|
|
10
|
-
|
|
11
|
-
_MetadataType = Mapping[str, Any]
|
|
12
|
-
|
|
13
|
-
logger = logging.getLogger(__name__)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
@dataclass(frozen=True)
|
|
17
|
-
class Parameters:
|
|
18
|
-
"""Custom data for the algorithm, such as the algorithm's parameters"""
|
|
19
|
-
|
|
20
|
-
parameters: _MetadataType
|
|
21
|
-
"""The parameters used by the algorithm"""
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
@dataclass(frozen=True)
|
|
25
|
-
class Algorithm:
|
|
26
|
-
"""Details of the algorithm used to process the data"""
|
|
27
|
-
|
|
28
|
-
did: str
|
|
29
|
-
"""The DID of the algorithm used to process the data"""
|
|
30
|
-
|
|
31
|
-
ddo: Path
|
|
32
|
-
"""The DDO path of the algorithm used to process the data"""
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
@dataclass
|
|
36
|
-
class JobDetails:
|
|
37
|
-
"""Details of the current job, such as the used inputs and algorithm"""
|
|
38
|
-
|
|
39
|
-
dids: Sequence[Path]
|
|
40
|
-
"""Identifiers for the inputs"""
|
|
41
|
-
|
|
42
|
-
files: Mapping[str, Sequence[Path]]
|
|
43
|
-
"""Paths to the input files"""
|
|
44
|
-
|
|
45
|
-
secret: Optional[str]
|
|
46
|
-
"""The secret used to process the data"""
|
|
47
|
-
|
|
48
|
-
algorithm: Optional[Algorithm]
|
|
49
|
-
"""Details of the used algorithm"""
|
|
50
|
-
|
|
51
|
-
# Cache parameters, should not be included as _fields_ of the class
|
|
52
|
-
_parameters: InitVar[Optional[_MetadataType]] = None
|
|
53
|
-
|
|
54
|
-
@property
|
|
55
|
-
def parameters(self, parameters: Optional[Path] = None) -> _MetadataType:
|
|
56
|
-
"""Parameters for algorithm job, read from default path"""
|
|
57
|
-
|
|
58
|
-
if parameters is None:
|
|
59
|
-
parameters = Paths.ALGORITHM_CUSTOM_PARAMETERS
|
|
60
|
-
|
|
61
|
-
if self._parameters is None:
|
|
62
|
-
if not parameters.exists():
|
|
63
|
-
logging.warning(f"Missing parameters file: {parameters} not found")
|
|
64
|
-
self._parameters = {}
|
|
65
|
-
else:
|
|
66
|
-
# Load the parameters from filesystem
|
|
67
|
-
with open(parameters, "r") as f:
|
|
68
|
-
try:
|
|
69
|
-
self._parameters = loads(f.read())
|
|
70
|
-
except JSONDecodeError as e:
|
|
71
|
-
self._parameters = {}
|
|
72
|
-
logger.warning(
|
|
73
|
-
f"Error loading parameters file {parameters}: {e}"
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
return self._parameters
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
del _MetadataType
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
__all__ = ["Algorithm", "Parameters", "JobDetails"]
|
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
from dataclasses import Field
|
|
2
|
-
from datetime import datetime
|
|
3
|
-
from typing import Annotated, Any, List, Optional
|
|
4
|
-
|
|
5
|
-
from pydantic import BaseModel, HttpUrl
|
|
6
|
-
|
|
7
|
-
"""Base classes for the Ocean Protocol algorithm structure"""
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class Credential:
|
|
11
|
-
type: Annotated[str, Field(frozen=True)]
|
|
12
|
-
values: Annotated[List[str], Field(frozen=True)]
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class Credentials:
|
|
16
|
-
allow: Optional[Annotated[List[Credential], Field(frozen=True)]] = []
|
|
17
|
-
deny: Optional[Annotated[List[Credential], Field(frozen=True)]] = []
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
class Metadata(BaseModel):
|
|
21
|
-
"""Base class for the Metadata structure"""
|
|
22
|
-
|
|
23
|
-
description: Annotated[str, Field(frozen=True)]
|
|
24
|
-
name: Annotated[str, Field(frozen=True)]
|
|
25
|
-
type: Annotated[str, Field(frozen=True)]
|
|
26
|
-
author: Annotated[str, Field(frozen=True)]
|
|
27
|
-
license: Annotated[str, Field(frozen=True)]
|
|
28
|
-
|
|
29
|
-
algorithm: Any
|
|
30
|
-
tags: Optional[Annotated[List[str], Field(frozen=True)]] = None
|
|
31
|
-
created: Optional[Annotated[datetime, Field(frozen=True)]] = None
|
|
32
|
-
updated: Optional[Annotated[datetime, Field(frozen=True)]] = None
|
|
33
|
-
copyrightHolder: Optional[Annotated[str, Field(frozen=True)]] = None
|
|
34
|
-
links: Optional[Annotated[List[HttpUrl], Field(frozen=True)]] = None
|
|
35
|
-
contentLanguage: Optional[Annotated[str, Field(frozen=True)]] = None
|
|
36
|
-
categories: Optional[Annotated[List[str], Field(frozen=True)]] = None
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
class Service(BaseModel):
|
|
40
|
-
"""Base class for the Service structure"""
|
|
41
|
-
|
|
42
|
-
id: Annotated[str, Field(frozen=True)]
|
|
43
|
-
type: Annotated[str, Field(frozen=True)]
|
|
44
|
-
timeout: Annotated[int, Field(frozen=True)]
|
|
45
|
-
files: Annotated[str, Field(frozen=True)]
|
|
46
|
-
datatokenAddress: Annotated[str, Field(frozen=True)]
|
|
47
|
-
serviceEndpoint: Annotated[HttpUrl, Field(frozen=True)]
|
|
48
|
-
|
|
49
|
-
compute: Any
|
|
50
|
-
consumerParameters: Any
|
|
51
|
-
additionalInformation: Any
|
|
52
|
-
name: Optional[Annotated[str, Field(frozen=True)]] = None
|
|
53
|
-
description: Optional[Annotated[str, Field(frozen=True)]] = None
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
class DDO(BaseModel):
|
|
57
|
-
"""DDO structure in Ocean Protocol"""
|
|
58
|
-
|
|
59
|
-
id: Annotated[str, Field(frozen=True)]
|
|
60
|
-
context: Annotated[List[str], Field(frozen=True)]
|
|
61
|
-
version: Annotated[str, Field(frozen=True)]
|
|
62
|
-
chainId: Annotated[int, Field(frozen=True)]
|
|
63
|
-
nftAddress: Annotated[str, Field(frozen=True)]
|
|
64
|
-
metadata: Annotated[Metadata, Field(frozen=True)]
|
|
65
|
-
services: Annotated[List[Service], Field(frozen=True)]
|
|
66
|
-
|
|
67
|
-
credentials: Annotated[Optional[str], Field(frozen=True)] = None
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import os
|
|
3
|
-
from typing import Any, Literal, Mapping, Optional
|
|
4
|
-
|
|
5
|
-
from oceanprotocol_job_details.dataclasses.job_details import JobDetails
|
|
6
|
-
from oceanprotocol_job_details.loaders.impl.map import Keys, Map
|
|
7
|
-
from oceanprotocol_job_details.loaders.loader import Loader
|
|
8
|
-
|
|
9
|
-
# Logging setup for the module
|
|
10
|
-
logging.basicConfig(
|
|
11
|
-
level=logging.INFO,
|
|
12
|
-
format="%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s",
|
|
13
|
-
handlers=[logging.StreamHandler()],
|
|
14
|
-
)
|
|
15
|
-
|
|
16
|
-
_Implementations = Literal["env"]
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class OceanProtocolJobDetails(Loader[JobDetails]):
|
|
20
|
-
"""Decorator that loads the JobDetails from the given implementation"""
|
|
21
|
-
|
|
22
|
-
def __init__(
|
|
23
|
-
self,
|
|
24
|
-
implementation: Optional[_Implementations] = "map",
|
|
25
|
-
mapper: Mapping[str, Any] = os.environ,
|
|
26
|
-
keys: Keys = Keys(),
|
|
27
|
-
*args,
|
|
28
|
-
**kwargs,
|
|
29
|
-
):
|
|
30
|
-
if implementation == "map":
|
|
31
|
-
# As there are not more implementations, we can use the EnvironmentLoader directly
|
|
32
|
-
self._loader = lambda: Map(mapper=mapper, keys=keys, *args, **kwargs)
|
|
33
|
-
else:
|
|
34
|
-
raise NotImplementedError(f"Implementation {implementation} not supported")
|
|
35
|
-
|
|
36
|
-
def load(self) -> JobDetails:
|
|
37
|
-
return self._loader().load()
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
del _Implementations
|
|
File without changes
|
|
@@ -1,120 +0,0 @@
|
|
|
1
|
-
"""Loads the current Job Details from the environment variables, could be abstracted to a more general 'mapper loader' but won't, since right now it fits our needs"""
|
|
2
|
-
|
|
3
|
-
from dataclasses import dataclass
|
|
4
|
-
from logging import getLogger
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
from typing import Mapping, Optional, Sequence, final
|
|
7
|
-
|
|
8
|
-
from orjson import JSONDecodeError, loads
|
|
9
|
-
|
|
10
|
-
from oceanprotocol_job_details.dataclasses.constants import DidKeys, Paths, ServiceType
|
|
11
|
-
from oceanprotocol_job_details.dataclasses.job_details import Algorithm, JobDetails
|
|
12
|
-
from oceanprotocol_job_details.loaders.impl.utils import do, execute_predicate
|
|
13
|
-
from oceanprotocol_job_details.loaders.loader import Loader
|
|
14
|
-
|
|
15
|
-
logger = getLogger(__name__)
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
@dataclass(frozen=True)
|
|
19
|
-
class Keys:
|
|
20
|
-
"""Environment keys passed to the algorithm"""
|
|
21
|
-
|
|
22
|
-
ROOT_FOLDER = "ROOT_FOLDER"
|
|
23
|
-
SECRET: str = "secret"
|
|
24
|
-
ALGORITHM: str = "TRANSFORMATION_DID"
|
|
25
|
-
DIDS: str = "DIDS"
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def _update_paths_from_root(root: Path):
|
|
29
|
-
"""Update the default from a root folder
|
|
30
|
-
|
|
31
|
-
:param root: root folder to update the paths
|
|
32
|
-
:type root: Path
|
|
33
|
-
"""
|
|
34
|
-
|
|
35
|
-
Paths.DATA = root / "data"
|
|
36
|
-
Paths.INPUTS = Paths.DATA / "inputs"
|
|
37
|
-
Paths.DDOS = Paths.DATA / "ddos"
|
|
38
|
-
Paths.OUTPUTS = Paths.DATA / "outputs"
|
|
39
|
-
Paths.LOGS = Paths.DATA / "logs"
|
|
40
|
-
Paths.ALGORITHM_CUSTOM_PARAMETERS = Paths.INPUTS / "algoCustomData.json"
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def _files_from_service(service):
|
|
44
|
-
files = service[DidKeys.FILES]
|
|
45
|
-
if isinstance(files, str):
|
|
46
|
-
return [files]
|
|
47
|
-
return files
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
@final
|
|
51
|
-
class Map(Loader[JobDetails]):
|
|
52
|
-
"""Loads the current Job Details from the environment variables"""
|
|
53
|
-
|
|
54
|
-
def __init__(self, mapper: Mapping[str, str], keys: Keys, *args, **kwargs) -> None:
|
|
55
|
-
self._mapper = mapper
|
|
56
|
-
self._keys = keys
|
|
57
|
-
|
|
58
|
-
execute_predicate(
|
|
59
|
-
lambda: _update_paths_from_root(Path(self._mapper[Keys.ROOT_FOLDER])),
|
|
60
|
-
lambda: Keys.ROOT_FOLDER in self._mapper,
|
|
61
|
-
)
|
|
62
|
-
|
|
63
|
-
def load(self, *args, **kwargs) -> JobDetails:
|
|
64
|
-
return self._from_dids(self._dids())
|
|
65
|
-
|
|
66
|
-
def _from_dids(self, dids: Sequence[str]) -> JobDetails:
|
|
67
|
-
return JobDetails(
|
|
68
|
-
dids=dids,
|
|
69
|
-
files=self._files(dids),
|
|
70
|
-
algorithm=self._algorithm(),
|
|
71
|
-
secret=self._secret(),
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
def _dids(self) -> Sequence[str]:
|
|
75
|
-
return loads(self._mapper.get(self._keys.DIDS, []))
|
|
76
|
-
|
|
77
|
-
def _files(self, dids: Optional[Sequence[str]]) -> Mapping[str, Sequence[Path]]:
|
|
78
|
-
"""Iterate through the given DIDs and retrieve their respective filepaths
|
|
79
|
-
|
|
80
|
-
:param dids: dids to read the files from
|
|
81
|
-
:type dids: Optional[Sequence[str]]
|
|
82
|
-
:raises FileNotFoundError: if the DDO file does not exist
|
|
83
|
-
:return: _description_
|
|
84
|
-
:rtype: Mapping[str, Sequence[Path]]
|
|
85
|
-
"""
|
|
86
|
-
|
|
87
|
-
files: Mapping[str, Sequence[Path]] = {}
|
|
88
|
-
for did in dids:
|
|
89
|
-
# For each given DID, check if the DDO file exists and read its metadata
|
|
90
|
-
ddo_path = Paths.DDOS / did
|
|
91
|
-
do(lambda: ddo_path.exists(), exc=FileNotFoundError("Missing DDO file"))
|
|
92
|
-
|
|
93
|
-
with open(ddo_path, "r") as f:
|
|
94
|
-
ddo = do(lambda: loads(f.read()), JSONDecodeError)
|
|
95
|
-
if not ddo:
|
|
96
|
-
continue
|
|
97
|
-
|
|
98
|
-
for service in do(lambda: ddo[DidKeys.SERVICES], KeyError, default=[]):
|
|
99
|
-
# if service[DidKeys.SERVICE_TYPE] != ServiceType.METADATA:
|
|
100
|
-
# continue # Only read the metadata of the services
|
|
101
|
-
|
|
102
|
-
files_n = do(lambda: len(_files_from_service(service)), KeyError)
|
|
103
|
-
ddo_path = Paths.INPUTS / did
|
|
104
|
-
files[did] = [ddo_path / str(idx) for idx in range(files_n)]
|
|
105
|
-
return files
|
|
106
|
-
|
|
107
|
-
def _algorithm(self) -> Optional[Algorithm]:
|
|
108
|
-
did = self._mapper.get(self._keys.ALGORITHM, None)
|
|
109
|
-
if not did:
|
|
110
|
-
return None
|
|
111
|
-
|
|
112
|
-
ddo = Paths.DDOS / did
|
|
113
|
-
|
|
114
|
-
return Algorithm(
|
|
115
|
-
did,
|
|
116
|
-
do(lambda: ddo.exists() and ddo, exc=FileNotFoundError("Missing DDO file")),
|
|
117
|
-
)
|
|
118
|
-
|
|
119
|
-
def _secret(self) -> Optional[str]:
|
|
120
|
-
return self._mapper.get(self._keys.SECRET, None)
|
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
from logging import WARNING, getLogger
|
|
2
|
-
from typing import Callable, TypeVar
|
|
3
|
-
|
|
4
|
-
logger = getLogger(__name__)
|
|
5
|
-
R = TypeVar("R")
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def do(
|
|
9
|
-
function: Callable[[], R],
|
|
10
|
-
exception: Exception = Exception,
|
|
11
|
-
*,
|
|
12
|
-
log_level=WARNING,
|
|
13
|
-
default: R = None,
|
|
14
|
-
exc=False,
|
|
15
|
-
) -> R:
|
|
16
|
-
"""Executes a function and logs the exception if it fails
|
|
17
|
-
|
|
18
|
-
:param function: function to call
|
|
19
|
-
:type function: Callable
|
|
20
|
-
:param exception: exception to catch
|
|
21
|
-
:type exception: Exception
|
|
22
|
-
:param log_level: logging level to use
|
|
23
|
-
:type log_level: int
|
|
24
|
-
:param default: default value to return if the function fails
|
|
25
|
-
:type default: R
|
|
26
|
-
:param exc: if the exception should be raised
|
|
27
|
-
:type exc: bool
|
|
28
|
-
:return: result of the function and if it was successful
|
|
29
|
-
:rtype: R
|
|
30
|
-
"""
|
|
31
|
-
|
|
32
|
-
try:
|
|
33
|
-
return function()
|
|
34
|
-
except exception as e:
|
|
35
|
-
logger.log(log_level, e)
|
|
36
|
-
if exc:
|
|
37
|
-
if isinstance(exc, Exception):
|
|
38
|
-
raise exc from e
|
|
39
|
-
raise e
|
|
40
|
-
return default
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def execute_predicate(
|
|
44
|
-
function: Callable[[], R],
|
|
45
|
-
predicate: Callable[[], bool],
|
|
46
|
-
) -> R | bool:
|
|
47
|
-
"""Executes a function if the predicate is true"
|
|
48
|
-
|
|
49
|
-
:param function: function to call
|
|
50
|
-
:type function: Callable
|
|
51
|
-
:param predicate: predicate to check
|
|
52
|
-
:type predicate: Callable
|
|
53
|
-
:return: result of the function and if it was successful
|
|
54
|
-
:rtype: R | bool
|
|
55
|
-
"""
|
|
56
|
-
|
|
57
|
-
return predicate() and function()
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
from abc import ABC, abstractmethod
|
|
2
|
-
from typing import Generic, TypeVar
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
T = TypeVar("T")
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class Loader(ABC, Generic[T]):
|
|
9
|
-
@abstractmethod
|
|
10
|
-
def load(self, *args, **kwargs) -> T:
|
|
11
|
-
"""Load an instance of the given type"""
|
|
12
|
-
pass
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
del T
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|