oceanprotocol-job-details 0.3.12__tar.gz → 0.3.14__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/PKG-INFO +5 -4
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/README.md +4 -3
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/di.py +3 -3
- oceanprotocol_job_details-0.3.14/oceanprotocol_job_details/domain/__init__.py +10 -0
- oceanprotocol_job_details-0.3.12/oceanprotocol_job_details/domain.py → oceanprotocol_job_details-0.3.14/oceanprotocol_job_details/domain/ddo.py +1 -64
- oceanprotocol_job_details-0.3.14/oceanprotocol_job_details/domain/derived.py +71 -0
- oceanprotocol_job_details-0.3.14/oceanprotocol_job_details/executors.py +40 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/helpers.py +1 -1
- oceanprotocol_job_details-0.3.14/oceanprotocol_job_details/loaders/impl/ddo.py +26 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/loaders/impl/files.py +1 -1
- oceanprotocol_job_details-0.3.14/oceanprotocol_job_details/loaders/impl/job_details.py +28 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/ocean.py +16 -13
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/settings.py +3 -3
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/pyproject.toml +9 -2
- oceanprotocol_job_details-0.3.12/oceanprotocol_job_details/executors.py +0 -24
- oceanprotocol_job_details-0.3.12/oceanprotocol_job_details/loaders/impl/ddo.py +0 -24
- oceanprotocol_job_details-0.3.12/oceanprotocol_job_details/loaders/impl/job_details.py +0 -29
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/.gitignore +0 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/LICENSE +0 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/__init__.py +0 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/loaders/__init__.py +0 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/loaders/impl/__init__.py +0 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/loaders/loader.py +0 -0
- {oceanprotocol_job_details-0.3.12 → oceanprotocol_job_details-0.3.14}/oceanprotocol_job_details/py.typed +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: oceanprotocol-job-details
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.14
|
|
4
4
|
Summary: A Python package to get details from OceanProtocol jobs
|
|
5
5
|
Project-URL: Homepage, https://github.com/AgrospAI/oceanprotocol-job-details
|
|
6
6
|
Project-URL: Issues, https://github.com/AgrospAI/oceanprotocol-job-details/issues
|
|
@@ -72,8 +72,9 @@ class InputParameters(BaseModel):
|
|
|
72
72
|
job_details = load_job_details({"base_dir": "...", "transformation_did": "..."}, InputParameters)
|
|
73
73
|
|
|
74
74
|
# Usage
|
|
75
|
-
job_details.input_parameters
|
|
76
|
-
|
|
75
|
+
parameters = await job_details.input_parameters()
|
|
76
|
+
parameters.foo
|
|
77
|
+
parameters.foo.bar
|
|
77
78
|
```
|
|
78
79
|
|
|
79
80
|
The values to fill the custom `InputParameters` will be parsed from the `algoCustomData.json` located next to the input data directories.
|
|
@@ -96,7 +97,7 @@ _, file_path = next(job_details.inputs())
|
|
|
96
97
|
|
|
97
98
|
```bash
|
|
98
99
|
data # Root /data directory
|
|
99
|
-
├── ddos # Contains the loaded dataset's DDO
|
|
100
|
+
├── ddos # Contains the loaded dataset's DDO (metadata)
|
|
100
101
|
│ ├── 17feb...e42 # DDO file
|
|
101
102
|
│ └── ... # One DDO per loaded dataset
|
|
102
103
|
├── inputs # Datasets dir
|
|
@@ -46,8 +46,9 @@ class InputParameters(BaseModel):
|
|
|
46
46
|
job_details = load_job_details({"base_dir": "...", "transformation_did": "..."}, InputParameters)
|
|
47
47
|
|
|
48
48
|
# Usage
|
|
49
|
-
job_details.input_parameters
|
|
50
|
-
|
|
49
|
+
parameters = await job_details.input_parameters()
|
|
50
|
+
parameters.foo
|
|
51
|
+
parameters.foo.bar
|
|
51
52
|
```
|
|
52
53
|
|
|
53
54
|
The values to fill the custom `InputParameters` will be parsed from the `algoCustomData.json` located next to the input data directories.
|
|
@@ -70,7 +71,7 @@ _, file_path = next(job_details.inputs())
|
|
|
70
71
|
|
|
71
72
|
```bash
|
|
72
73
|
data # Root /data directory
|
|
73
|
-
├── ddos # Contains the loaded dataset's DDO
|
|
74
|
+
├── ddos # Contains the loaded dataset's DDO (metadata)
|
|
74
75
|
│ ├── 17feb...e42 # DDO file
|
|
75
76
|
│ └── ... # One DDO per loaded dataset
|
|
76
77
|
├── inputs # Datasets dir
|
|
@@ -8,7 +8,7 @@ from oceanprotocol_job_details.loaders.impl.job_details import JobDetailsLoader
|
|
|
8
8
|
from oceanprotocol_job_details.domain import Paths
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
InputParametersT = TypeVar("InputParametersT", BaseModel
|
|
11
|
+
InputParametersT = TypeVar("InputParametersT", bound=BaseModel)
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
class Container(containers.DeclarativeContainer, Generic[InputParametersT]):
|
|
@@ -26,7 +26,7 @@ class Container(containers.DeclarativeContainer, Generic[InputParametersT]):
|
|
|
26
26
|
|
|
27
27
|
files = providers.Factory(lambda loader: loader.load(), loader=file_loader)
|
|
28
28
|
ddo_loader = providers.Factory(DDOLoader, files=files)
|
|
29
|
-
|
|
29
|
+
metadata = providers.Factory(lambda loader: loader.load(), loader=ddo_loader)
|
|
30
30
|
|
|
31
31
|
job_details_loader: providers.Factory[JobDetailsLoader[InputParametersT]] = (
|
|
32
32
|
providers.Factory(
|
|
@@ -34,6 +34,6 @@ class Container(containers.DeclarativeContainer, Generic[InputParametersT]):
|
|
|
34
34
|
files=files,
|
|
35
35
|
secret=config.secret,
|
|
36
36
|
paths=paths,
|
|
37
|
-
|
|
37
|
+
metadata=metadata,
|
|
38
38
|
)
|
|
39
39
|
)
|
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
# mypy: disable-error-code=explicit-any
|
|
2
|
-
from
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
from typing import Generator, List, Optional, Sequence, TypeAlias
|
|
2
|
+
from typing import Optional
|
|
5
3
|
|
|
6
4
|
from pydantic import BaseModel, ConfigDict, Field, JsonValue
|
|
7
5
|
|
|
@@ -124,64 +122,3 @@ class DDO(BaseModel):
|
|
|
124
122
|
purgatory: Purgatory
|
|
125
123
|
|
|
126
124
|
model_config = ConfigDict(populate_by_name=True)
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
@dataclass(frozen=True)
|
|
130
|
-
class DIDPaths:
|
|
131
|
-
did: str
|
|
132
|
-
ddo: Path = field(repr=False)
|
|
133
|
-
|
|
134
|
-
files: InitVar[Generator[Path, None, None]]
|
|
135
|
-
|
|
136
|
-
_input: List[Path] = field(init=False, repr=False)
|
|
137
|
-
|
|
138
|
-
def __post_init__(self, files: Generator[Path, None, None]) -> None:
|
|
139
|
-
assert self.ddo.exists(), f"DDO {self.ddo} does not exist"
|
|
140
|
-
|
|
141
|
-
object.__setattr__(self, "_input", list(files))
|
|
142
|
-
|
|
143
|
-
@property
|
|
144
|
-
def input_files(self) -> List[Path]:
|
|
145
|
-
return self._input
|
|
146
|
-
|
|
147
|
-
def __len__(self) -> int:
|
|
148
|
-
return len(self._input)
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
Files: TypeAlias = Sequence[DIDPaths]
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
@dataclass(frozen=True)
|
|
155
|
-
class Paths:
|
|
156
|
-
"""Configuration class for the Ocean Protocol Job Details"""
|
|
157
|
-
|
|
158
|
-
base_dir: InitVar[Path | None] = None
|
|
159
|
-
|
|
160
|
-
_base: Path = field(init=False, repr=False)
|
|
161
|
-
|
|
162
|
-
def __post_init__(self, base_dir: Path | None) -> None:
|
|
163
|
-
object.__setattr__(self, "_base", base_dir if base_dir else Path("/data"))
|
|
164
|
-
|
|
165
|
-
@property
|
|
166
|
-
def data(self) -> Path:
|
|
167
|
-
return self._base
|
|
168
|
-
|
|
169
|
-
@property
|
|
170
|
-
def inputs(self) -> Path:
|
|
171
|
-
return self.data / "inputs"
|
|
172
|
-
|
|
173
|
-
@property
|
|
174
|
-
def ddos(self) -> Path:
|
|
175
|
-
return self.data / "ddos"
|
|
176
|
-
|
|
177
|
-
@property
|
|
178
|
-
def outputs(self) -> Path:
|
|
179
|
-
return self.data / "outputs"
|
|
180
|
-
|
|
181
|
-
@property
|
|
182
|
-
def logs(self) -> Path:
|
|
183
|
-
return self.data / "logs"
|
|
184
|
-
|
|
185
|
-
@property
|
|
186
|
-
def algorithm_custom_parameters(self) -> Path:
|
|
187
|
-
return self.inputs / "algoCustomData.json"
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
from dataclasses import InitVar, dataclass, field
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Dict, Generator, List, Sequence, TypeAlias
|
|
4
|
+
|
|
5
|
+
from oceanprotocol_job_details.domain.ddo import DDO
|
|
6
|
+
|
|
7
|
+
DID: TypeAlias = str
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class DIDPaths:
|
|
12
|
+
did: DID
|
|
13
|
+
ddo: Path = field(repr=False)
|
|
14
|
+
|
|
15
|
+
files: InitVar[Generator[Path, None, None]]
|
|
16
|
+
|
|
17
|
+
_input: List[Path] = field(init=False, repr=False)
|
|
18
|
+
|
|
19
|
+
def __post_init__(self, files: Generator[Path, None, None]) -> None:
|
|
20
|
+
assert self.ddo.exists(), f"DDO {self.ddo} does not exist"
|
|
21
|
+
|
|
22
|
+
object.__setattr__(self, "_input", list(files))
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def input_files(self) -> List[Path]:
|
|
26
|
+
return self._input
|
|
27
|
+
|
|
28
|
+
def __len__(self) -> int:
|
|
29
|
+
return len(self._input)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
Files: TypeAlias = Sequence[DIDPaths]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass(frozen=True)
|
|
36
|
+
class Paths:
|
|
37
|
+
"""Configuration class for the Ocean Protocol Job Details"""
|
|
38
|
+
|
|
39
|
+
base_dir: InitVar[Path | None] = None
|
|
40
|
+
|
|
41
|
+
_base: Path = field(init=False, repr=False)
|
|
42
|
+
|
|
43
|
+
def __post_init__(self, base_dir: Path | None) -> None:
|
|
44
|
+
object.__setattr__(self, "_base", base_dir if base_dir else Path("/data"))
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def data(self) -> Path:
|
|
48
|
+
return self._base
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def inputs(self) -> Path:
|
|
52
|
+
return self.data / "inputs"
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def ddos(self) -> Path:
|
|
56
|
+
return self.data / "ddos"
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def outputs(self) -> Path:
|
|
60
|
+
return self.data / "outputs"
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def logs(self) -> Path:
|
|
64
|
+
return self.data / "logs"
|
|
65
|
+
|
|
66
|
+
@property
|
|
67
|
+
def algorithm_custom_parameters(self) -> Path:
|
|
68
|
+
return self.inputs / "algoCustomData.json"
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
DDOMetadata: TypeAlias = Dict[DID, DDO]
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# mypy: disable-error-code=explicit-any
|
|
2
|
+
import asyncio
|
|
3
|
+
import inspect
|
|
4
|
+
from functools import partial
|
|
5
|
+
from typing import Any, Callable, Coroutine, TypeGuard, TypeVar, cast
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def is_coro_function(
|
|
11
|
+
obj: Any,
|
|
12
|
+
) -> TypeGuard[Callable[..., Coroutine[Any, Any, T]]]:
|
|
13
|
+
return inspect.iscoroutinefunction(obj)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def is_coro(obj: Any) -> TypeGuard[Coroutine[Any, Any, T]]:
|
|
17
|
+
return inspect.iscoroutine(obj)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def run_in_executor(
|
|
21
|
+
obj: Callable[..., T]
|
|
22
|
+
| Callable[..., Coroutine[Any, Any, T]]
|
|
23
|
+
| Coroutine[Any, Any, T],
|
|
24
|
+
*args: Any,
|
|
25
|
+
**kwargs: Any,
|
|
26
|
+
) -> T:
|
|
27
|
+
if is_coro_function(obj):
|
|
28
|
+
return await obj(*args, **kwargs)
|
|
29
|
+
|
|
30
|
+
if is_coro(obj):
|
|
31
|
+
return await obj
|
|
32
|
+
|
|
33
|
+
if callable(obj):
|
|
34
|
+
loop = asyncio.get_running_loop()
|
|
35
|
+
|
|
36
|
+
# just to comply with mypy
|
|
37
|
+
func = partial(obj, *args, **kwargs)
|
|
38
|
+
return await loop.run_in_executor(None, cast(Callable[[], T], func))
|
|
39
|
+
|
|
40
|
+
return cast(T, obj)
|
|
@@ -6,7 +6,7 @@ from oceanprotocol_job_details.di import Container
|
|
|
6
6
|
from oceanprotocol_job_details.ocean import JobDetails
|
|
7
7
|
from oceanprotocol_job_details.settings import JobSettings
|
|
8
8
|
|
|
9
|
-
InputParametersT = TypeVar("InputParametersT", BaseModel
|
|
9
|
+
InputParametersT = TypeVar("InputParametersT", bound=BaseModel)
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def create_container(config: Dict[str, Any]) -> Container[InputParametersT]: # type: ignore[explicit-any]
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from dataclasses import InitVar, dataclass, field
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import List, Tuple, final
|
|
4
|
+
|
|
5
|
+
import orjson
|
|
6
|
+
|
|
7
|
+
from oceanprotocol_job_details.domain import DDO, DID, DDOMetadata, Files
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@final
|
|
11
|
+
@dataclass(frozen=True)
|
|
12
|
+
class DDOLoader:
|
|
13
|
+
files: InitVar[Files]
|
|
14
|
+
"""The files to load the DDOs from"""
|
|
15
|
+
|
|
16
|
+
_files: List[Tuple[DID, Path]] = field(init=False)
|
|
17
|
+
|
|
18
|
+
def __post_init__(self, files: Files) -> None:
|
|
19
|
+
assert files is not None and len(files) != 0, "Missing files"
|
|
20
|
+
object.__setattr__(self, "_files", [(f.did, f.ddo) for f in files])
|
|
21
|
+
|
|
22
|
+
def load(self) -> DDOMetadata:
|
|
23
|
+
return {
|
|
24
|
+
did: DDO.model_validate(orjson.loads(path.read_bytes()))
|
|
25
|
+
for did, path in self._files
|
|
26
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Generic, Type, TypeVar, final
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, Secret
|
|
5
|
+
|
|
6
|
+
from oceanprotocol_job_details.domain import DDOMetadata, Files, Paths
|
|
7
|
+
from oceanprotocol_job_details.ocean import JobDetails
|
|
8
|
+
|
|
9
|
+
InputParameterT = TypeVar("InputParameterT", bound=BaseModel)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@final
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class JobDetailsLoader(Generic[InputParameterT]):
|
|
15
|
+
input_type: Type[InputParameterT] | None
|
|
16
|
+
files: Files
|
|
17
|
+
secret: Secret[str] | None
|
|
18
|
+
paths: Paths
|
|
19
|
+
metadata: DDOMetadata
|
|
20
|
+
|
|
21
|
+
def load(self) -> JobDetails[InputParameterT]:
|
|
22
|
+
return JobDetails[InputParameterT](
|
|
23
|
+
files=self.files,
|
|
24
|
+
secret=self.secret,
|
|
25
|
+
metadata=self.metadata,
|
|
26
|
+
paths=self.paths,
|
|
27
|
+
input_type=self.input_type,
|
|
28
|
+
)
|
|
@@ -1,29 +1,27 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import asyncio
|
|
4
|
-
from functools import cached_property
|
|
5
1
|
from pathlib import Path
|
|
6
2
|
from typing import Generator, Generic, Tuple, Type, TypeVar, final
|
|
7
3
|
|
|
8
4
|
import aiofiles
|
|
5
|
+
import orjson
|
|
9
6
|
from pydantic import BaseModel, ConfigDict, Secret
|
|
10
7
|
|
|
11
|
-
from oceanprotocol_job_details.domain import
|
|
12
|
-
from oceanprotocol_job_details.executors import run_in_executor
|
|
8
|
+
from oceanprotocol_job_details.domain import DDOMetadata, Files, Paths
|
|
13
9
|
|
|
14
|
-
InputParametersT = TypeVar("InputParametersT", BaseModel
|
|
10
|
+
InputParametersT = TypeVar("InputParametersT", bound=BaseModel)
|
|
15
11
|
|
|
16
12
|
|
|
17
13
|
@final
|
|
18
14
|
class JobDetails(BaseModel, Generic[InputParametersT]): # type: ignore[explicit-any]
|
|
19
15
|
files: Files
|
|
20
|
-
|
|
16
|
+
metadata: DDOMetadata
|
|
21
17
|
paths: Paths
|
|
22
18
|
input_type: Type[InputParametersT] | None
|
|
23
|
-
secret: Secret[str] | None
|
|
19
|
+
secret: Secret[str] | None
|
|
24
20
|
|
|
25
21
|
model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
|
|
26
22
|
|
|
23
|
+
_input_parameters: InputParametersT | None = None
|
|
24
|
+
|
|
27
25
|
def inputs(self) -> Generator[Tuple[int, Path], None, None]:
|
|
28
26
|
yield from (
|
|
29
27
|
(idx, file)
|
|
@@ -31,9 +29,14 @@ class JobDetails(BaseModel, Generic[InputParametersT]): # type: ignore[explicit
|
|
|
31
29
|
for file in files.input_files
|
|
32
30
|
)
|
|
33
31
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
32
|
+
async def input_parameters(self) -> InputParametersT | None:
|
|
33
|
+
current = self._input_parameters
|
|
34
|
+
|
|
35
|
+
if current is None:
|
|
36
|
+
current = await self.ainput_parameters()
|
|
37
|
+
object.__setattr__(self, "_input_parameters", current)
|
|
38
|
+
|
|
39
|
+
return current
|
|
37
40
|
|
|
38
41
|
async def ainput_parameters(self) -> InputParametersT | None:
|
|
39
42
|
if self.input_type is None:
|
|
@@ -45,4 +48,4 @@ class JobDetails(BaseModel, Generic[InputParametersT]): # type: ignore[explicit
|
|
|
45
48
|
|
|
46
49
|
raw = raw.strip()
|
|
47
50
|
assert raw is not None, f"Empty file {path}"
|
|
48
|
-
return self.input_type.
|
|
51
|
+
return self.input_type.model_validate(orjson.loads(raw))
|
|
@@ -4,7 +4,7 @@ from pathlib import Path
|
|
|
4
4
|
from typing import Self
|
|
5
5
|
|
|
6
6
|
import orjson
|
|
7
|
-
from pydantic import Field, field_validator, model_validator
|
|
7
|
+
from pydantic import Field, Secret, field_validator, model_validator
|
|
8
8
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
9
9
|
|
|
10
10
|
|
|
@@ -12,7 +12,7 @@ class JobSettings(BaseSettings): # type: ignore[explicit-any]
|
|
|
12
12
|
base_dir: Path = Field(alias="BASE_DIR")
|
|
13
13
|
dids: list[str] = Field(default_factory=list, alias="DIDS")
|
|
14
14
|
transformation_did: str = Field(alias="TRANSFORMATION_DID")
|
|
15
|
-
secret: str | None = Field(default=None, alias="SECRET")
|
|
15
|
+
secret: Secret[str] | None = Field(default=None, alias="SECRET")
|
|
16
16
|
logger: Logger = Field(default_factory=lambda: getLogger(__name__))
|
|
17
17
|
|
|
18
18
|
model_config = SettingsConfigDict(
|
|
@@ -35,7 +35,7 @@ class JobSettings(BaseSettings): # type: ignore[explicit-any]
|
|
|
35
35
|
|
|
36
36
|
@model_validator(mode="after")
|
|
37
37
|
def validate_dids(self) -> Self:
|
|
38
|
-
if
|
|
38
|
+
if len(self.dids) == 0:
|
|
39
39
|
self.dids.extend(
|
|
40
40
|
[f.name for f in (self.base_dir / "ddos").glob("*") if f.is_file()]
|
|
41
41
|
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "oceanprotocol-job-details"
|
|
3
|
-
version = "0.3.
|
|
3
|
+
version = "0.3.14"
|
|
4
4
|
description = "A Python package to get details from OceanProtocol jobs"
|
|
5
5
|
authors = [
|
|
6
6
|
{ name = "Agrospai", email = "agrospai@udl.cat" },
|
|
@@ -27,6 +27,8 @@ Homepage = "https://github.com/AgrospAI/oceanprotocol-job-details"
|
|
|
27
27
|
Issues = "https://github.com/AgrospAI/oceanprotocol-job-details/issues"
|
|
28
28
|
|
|
29
29
|
[tool.pytest.ini_options]
|
|
30
|
+
asyncio_mode = "auto"
|
|
31
|
+
asyncio_default_fixture_loop_scope = "function"
|
|
30
32
|
log_level = "INFO"
|
|
31
33
|
pythonpath = "oceanprotocol_job_details"
|
|
32
34
|
log_cli = true
|
|
@@ -51,4 +53,9 @@ warn_return_any = true
|
|
|
51
53
|
disallow_any_explicit = true
|
|
52
54
|
|
|
53
55
|
[dependency-groups]
|
|
54
|
-
dev = [
|
|
56
|
+
dev = [
|
|
57
|
+
"mypy>=1.15.0",
|
|
58
|
+
"pytest>=8.3.4",
|
|
59
|
+
"pytest-asyncio>=1.3.0",
|
|
60
|
+
"types-aiofiles>=25.1.0.20251011",
|
|
61
|
+
]
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import inspect
|
|
3
|
-
from typing import Any, Callable, Coroutine, TypeVar
|
|
4
|
-
|
|
5
|
-
T = TypeVar("T")
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def run_in_executor(obj: Callable[..., Any] | Coroutine[Any, Any, T]) -> T:
|
|
9
|
-
if callable(obj) and not inspect.iscoroutinefunction(obj):
|
|
10
|
-
return obj()
|
|
11
|
-
|
|
12
|
-
if inspect.iscoroutinefunction(obj):
|
|
13
|
-
obj = obj()
|
|
14
|
-
|
|
15
|
-
if not inspect.iscoroutine(obj):
|
|
16
|
-
return obj
|
|
17
|
-
|
|
18
|
-
try:
|
|
19
|
-
loop = asyncio.get_running_loop()
|
|
20
|
-
except RuntimeError:
|
|
21
|
-
return asyncio.run(obj)
|
|
22
|
-
|
|
23
|
-
future = asyncio.run_coroutine_threadsafe(obj, loop)
|
|
24
|
-
return future.result()
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from dataclasses import InitVar, dataclass, field
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
from typing import final
|
|
6
|
-
|
|
7
|
-
from oceanprotocol_job_details.domain import DDO, Files
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
@final
|
|
11
|
-
@dataclass(frozen=True)
|
|
12
|
-
class DDOLoader:
|
|
13
|
-
files: InitVar[Files]
|
|
14
|
-
"""The files to load the DDOs from"""
|
|
15
|
-
|
|
16
|
-
_ddo_paths: list[Path] = field(init=False)
|
|
17
|
-
|
|
18
|
-
def __post_init__(self, files: Files) -> None:
|
|
19
|
-
assert files is not None and len(files) != 0, "Missing files"
|
|
20
|
-
|
|
21
|
-
object.__setattr__(self, "_ddo_paths", [f.ddo for f in files])
|
|
22
|
-
|
|
23
|
-
def load(self) -> list[DDO]:
|
|
24
|
-
return [DDO.model_validate_json(p.read_text()) for p in self._ddo_paths]
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
from dataclasses import dataclass, field
|
|
2
|
-
from types import NoneType
|
|
3
|
-
from typing import Generic, Type, TypeVar, final
|
|
4
|
-
|
|
5
|
-
from pydantic import BaseModel
|
|
6
|
-
|
|
7
|
-
from oceanprotocol_job_details.domain import DDO, Files, Paths
|
|
8
|
-
from oceanprotocol_job_details.ocean import JobDetails
|
|
9
|
-
|
|
10
|
-
T = TypeVar("T", BaseModel, None)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
@final
|
|
14
|
-
@dataclass(frozen=True)
|
|
15
|
-
class JobDetailsLoader(Generic[T]):
|
|
16
|
-
input_type: Type[T] = field(repr=False)
|
|
17
|
-
files: Files
|
|
18
|
-
secret: str
|
|
19
|
-
paths: Paths
|
|
20
|
-
ddos: list[DDO]
|
|
21
|
-
|
|
22
|
-
def load(self) -> JobDetails[T]:
|
|
23
|
-
return JobDetails[T](
|
|
24
|
-
files=self.files,
|
|
25
|
-
secret=self.secret,
|
|
26
|
-
ddos=self.ddos,
|
|
27
|
-
paths=self.paths,
|
|
28
|
-
input_type=self.input_type,
|
|
29
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|