ocean-runner 0.2.12__tar.gz → 0.2.14__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ocean-runner might be problematic. Click here for more details.
- {ocean_runner-0.2.12 → ocean_runner-0.2.14}/PKG-INFO +1 -1
- {ocean_runner-0.2.12 → ocean_runner-0.2.14}/ocean_runner/config.py +2 -7
- ocean_runner-0.2.14/ocean_runner/runner.py +172 -0
- {ocean_runner-0.2.12 → ocean_runner-0.2.14}/pyproject.toml +1 -1
- ocean_runner-0.2.12/ocean_runner/runner.py +0 -143
- {ocean_runner-0.2.12 → ocean_runner-0.2.14}/.gitignore +0 -0
- {ocean_runner-0.2.12 → ocean_runner-0.2.14}/LICENSE +0 -0
- {ocean_runner-0.2.12 → ocean_runner-0.2.14}/README.md +0 -0
- {ocean_runner-0.2.12 → ocean_runner-0.2.14}/ocean_runner/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ocean-runner
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.14
|
|
4
4
|
Summary: A fluent API for OceanProtocol algorithms
|
|
5
5
|
Project-URL: Homepage, https://github.com/AgrospAI/ocean-runner
|
|
6
6
|
Project-URL: Issues, https://github.com/AgrospAI/ocean-runner/issues
|
|
@@ -2,7 +2,7 @@ import os
|
|
|
2
2
|
from dataclasses import asdict, dataclass, field
|
|
3
3
|
from logging import Logger
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import Iterable, TypeVar
|
|
6
6
|
|
|
7
7
|
T = TypeVar("T")
|
|
8
8
|
|
|
@@ -43,9 +43,6 @@ class Config:
|
|
|
43
43
|
custom_input: T | None = None
|
|
44
44
|
"""Algorithm's custom input types, must be a dataclass_json"""
|
|
45
45
|
|
|
46
|
-
error_callback: Callable[[Exception], None] = None
|
|
47
|
-
"""Callback to execute upon exceptions"""
|
|
48
|
-
|
|
49
46
|
logger: Logger | None = None
|
|
50
47
|
"""Logger to use in the algorithm"""
|
|
51
48
|
|
|
@@ -54,7 +51,5 @@ class Config:
|
|
|
54
51
|
)
|
|
55
52
|
"""Paths that should be included so the code executes correctly"""
|
|
56
53
|
|
|
57
|
-
environment: Environment = field(
|
|
58
|
-
default_factory=lambda: Environment(),
|
|
59
|
-
)
|
|
54
|
+
environment: Environment = field(default_factory=lambda: Environment())
|
|
60
55
|
"""Mock of environment data"""
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import InitVar, asdict, dataclass, field
|
|
4
|
+
from logging import Logger
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Callable, Generic, Self, TypeVar
|
|
7
|
+
|
|
8
|
+
from oceanprotocol_job_details import JobDetails
|
|
9
|
+
|
|
10
|
+
from ocean_runner.config import Config
|
|
11
|
+
|
|
12
|
+
JobDetailsT = TypeVar("JobDetailsT")
|
|
13
|
+
ResultT = TypeVar("ResultT")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def default_error_callback(_, e: Exception) -> None:
|
|
17
|
+
raise e
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def default_validation(algorithm: Algorithm) -> None:
|
|
21
|
+
algorithm.logger.info("Validating input using default validation")
|
|
22
|
+
assert algorithm.job_details.ddos, "DDOs missing"
|
|
23
|
+
assert algorithm.job_details.files, "Files missing"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def default_save(*, result: ResultT, base: Path, algorithm: Algorithm) -> None:
|
|
27
|
+
algorithm.logger.info("Saving results using default save")
|
|
28
|
+
with open(base / "result.txt", "w+") as f:
|
|
29
|
+
f.write(str(result))
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class Algorithm(Generic[JobDetailsT, ResultT]):
|
|
34
|
+
"""
|
|
35
|
+
A configurable algorithm runner that behaves like a FastAPI app:
|
|
36
|
+
- You register `validate`, `run`, and `save_results` via decorators.
|
|
37
|
+
- You execute the full pipeline by calling `app()`.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
config: InitVar[Config | None] = None
|
|
41
|
+
logger: Logger = field(init=False)
|
|
42
|
+
_job_details: JobDetails[JobDetailsT] = field(init=False)
|
|
43
|
+
_result: ResultT | None = field(default=None, init=False)
|
|
44
|
+
|
|
45
|
+
error_callback: Callable[[Algorithm, Exception], None] = default_error_callback
|
|
46
|
+
|
|
47
|
+
# Decorator-registered callbacks
|
|
48
|
+
_validate_fn: Callable[[Algorithm], None] | None = field(default=None, init=False)
|
|
49
|
+
_run_fn: Callable[[Algorithm], ResultT] | None = field(default=None, init=False)
|
|
50
|
+
_save_fn: Callable[[ResultT, Path, Algorithm], None] | None = field(
|
|
51
|
+
default=None, init=False
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
def __post_init__(self, config: Config | None) -> None:
|
|
55
|
+
config: Config = config or Config()
|
|
56
|
+
|
|
57
|
+
# Configure logger
|
|
58
|
+
if config.logger:
|
|
59
|
+
self.logger = config.logger
|
|
60
|
+
else:
|
|
61
|
+
import logging
|
|
62
|
+
|
|
63
|
+
logging.basicConfig(
|
|
64
|
+
level=logging.DEBUG,
|
|
65
|
+
format="%(asctime)s | %(levelname)-8s | %(name)s | %(message)s",
|
|
66
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
|
67
|
+
)
|
|
68
|
+
self.logger = logging.getLogger("ocean_runner")
|
|
69
|
+
|
|
70
|
+
# Normalize base_dir
|
|
71
|
+
if isinstance(config.environment.base_dir, str):
|
|
72
|
+
config.environment.base_dir = Path(config.environment.base_dir)
|
|
73
|
+
|
|
74
|
+
# Extend sys.path for custom imports
|
|
75
|
+
if config.source_paths:
|
|
76
|
+
import sys
|
|
77
|
+
|
|
78
|
+
sys.path.extend([str(path.absolute()) for path in config.source_paths])
|
|
79
|
+
self.logger.debug(f"Added [{len(config.source_paths)}] entries to PATH")
|
|
80
|
+
|
|
81
|
+
# Load job details
|
|
82
|
+
self._job_details = JobDetails.load(
|
|
83
|
+
_type=config.custom_input,
|
|
84
|
+
base_dir=config.environment.base_dir,
|
|
85
|
+
dids=config.environment.dids,
|
|
86
|
+
transformation_did=config.environment.transformation_did,
|
|
87
|
+
secret=config.environment.secret,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
self.logger.info("Loaded JobDetails")
|
|
91
|
+
self.logger.debug(asdict(self.job_details))
|
|
92
|
+
|
|
93
|
+
self.config = config
|
|
94
|
+
|
|
95
|
+
class Error(RuntimeError): ...
|
|
96
|
+
|
|
97
|
+
@property
|
|
98
|
+
def job_details(self) -> JobDetails:
|
|
99
|
+
if not self._job_details:
|
|
100
|
+
raise Algorithm.Error("JobDetails not initialized or missing")
|
|
101
|
+
return self._job_details
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def result(self) -> ResultT:
|
|
105
|
+
if self._result is None:
|
|
106
|
+
raise Algorithm.Error("Result missing, run the algorithm first")
|
|
107
|
+
return self._result
|
|
108
|
+
|
|
109
|
+
# ---------------------------
|
|
110
|
+
# Decorators (FastAPI-style)
|
|
111
|
+
# ---------------------------
|
|
112
|
+
|
|
113
|
+
def validate(self, fn: Callable[[Self], None]) -> Callable[[Self], None]:
|
|
114
|
+
self._validate_fn = fn
|
|
115
|
+
return fn
|
|
116
|
+
|
|
117
|
+
def run(self, fn: Callable[[Self], ResultT]) -> Callable[[Self], ResultT]:
|
|
118
|
+
self._run_fn = fn
|
|
119
|
+
return fn
|
|
120
|
+
|
|
121
|
+
def save_results(self, fn: Callable[[ResultT, Path, Algorithm], None]) -> Callable:
|
|
122
|
+
self._save_fn = fn
|
|
123
|
+
return fn
|
|
124
|
+
|
|
125
|
+
def on_error(self, fn: Callable[[Algorithm, Exception], None]) -> Callable:
|
|
126
|
+
self.error_callback = fn
|
|
127
|
+
return fn
|
|
128
|
+
|
|
129
|
+
# ---------------------------
|
|
130
|
+
# Execution Pipeline
|
|
131
|
+
# ---------------------------
|
|
132
|
+
|
|
133
|
+
def __call__(self) -> ResultT | None:
|
|
134
|
+
"""Executes the algorithm pipeline: validate → run → save_results."""
|
|
135
|
+
try:
|
|
136
|
+
# Validation step
|
|
137
|
+
if self._validate_fn:
|
|
138
|
+
self.logger.info("Running custom validation...")
|
|
139
|
+
self._validate_fn(self)
|
|
140
|
+
else:
|
|
141
|
+
self.logger.info("Running default validation...")
|
|
142
|
+
default_validation(self)
|
|
143
|
+
|
|
144
|
+
# Run step
|
|
145
|
+
if self._run_fn:
|
|
146
|
+
self.logger.info("Running algorithm...")
|
|
147
|
+
self._result = self._run_fn(self)
|
|
148
|
+
else:
|
|
149
|
+
self.logger.warning("No run() function defined. Skipping execution.")
|
|
150
|
+
self._result = None
|
|
151
|
+
|
|
152
|
+
# Save step
|
|
153
|
+
if self._save_fn:
|
|
154
|
+
self.logger.info("Saving results...")
|
|
155
|
+
self._save_fn(
|
|
156
|
+
self._result,
|
|
157
|
+
self.job_details.paths.outputs,
|
|
158
|
+
self,
|
|
159
|
+
)
|
|
160
|
+
else:
|
|
161
|
+
self.logger.info("No save_results() defined. Using default.")
|
|
162
|
+
default_save(
|
|
163
|
+
result=self._result,
|
|
164
|
+
base=self.job_details.paths.outputs,
|
|
165
|
+
algorithm=self,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
except Exception as e:
|
|
169
|
+
self.logger.exception("Error during algorithm execution")
|
|
170
|
+
self.error_callback(self, e)
|
|
171
|
+
|
|
172
|
+
return self._result
|
|
@@ -1,143 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from dataclasses import InitVar, asdict, dataclass, field
|
|
4
|
-
from logging import Logger
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
from typing import Callable, Generic, Self, TypeVar
|
|
7
|
-
|
|
8
|
-
from oceanprotocol_job_details import JobDetails
|
|
9
|
-
|
|
10
|
-
from ocean_runner.config import Config
|
|
11
|
-
|
|
12
|
-
JobDetailsT = TypeVar(
|
|
13
|
-
"JobDetailsT",
|
|
14
|
-
)
|
|
15
|
-
ResultT = TypeVar("ResultT")
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def default_error_callback(_: Algorithm, e: Exception) -> None:
|
|
19
|
-
raise e
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
def default_validation(algorithm: Algorithm) -> None:
|
|
23
|
-
algorithm.logger.info("Validating input using default validation")
|
|
24
|
-
|
|
25
|
-
assert algorithm.job_details.ddos, "DDOs missing"
|
|
26
|
-
assert algorithm.job_details.files, "Files missing"
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def default_save(*, result: ResultT, base: Path, algorithm: Algorithm) -> None:
|
|
30
|
-
algorithm.logger.info("Saving results using default save")
|
|
31
|
-
|
|
32
|
-
with open(base / "result.txt", "w+") as f:
|
|
33
|
-
f.write(str(result))
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
@dataclass
|
|
37
|
-
class Algorithm(Generic[JobDetailsT, ResultT]):
|
|
38
|
-
|
|
39
|
-
config: InitVar[Config | None] = None
|
|
40
|
-
|
|
41
|
-
# Load from config
|
|
42
|
-
logger: Logger = field(init=False)
|
|
43
|
-
|
|
44
|
-
_job_details: JobDetails[JobDetailsT] = field(init=False)
|
|
45
|
-
_result: ResultT | None = field(default=None, init=False)
|
|
46
|
-
|
|
47
|
-
error_callback = default_error_callback
|
|
48
|
-
|
|
49
|
-
def __post_init__(self, config: Config | None) -> None:
|
|
50
|
-
config: Config = config or Config()
|
|
51
|
-
|
|
52
|
-
if config.error_callback:
|
|
53
|
-
self.error_callback = config.error_callback
|
|
54
|
-
|
|
55
|
-
if config.logger:
|
|
56
|
-
self.logger = config.logger
|
|
57
|
-
else:
|
|
58
|
-
import logging
|
|
59
|
-
|
|
60
|
-
logging.basicConfig(
|
|
61
|
-
level=logging.DEBUG,
|
|
62
|
-
format="%(asctime)s | %(levelname)-8s | %(name)s | %(message)s",
|
|
63
|
-
datefmt="%Y-%m-%d %H:%M:%S",
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
self.logger = logging.getLogger("ocean_runner")
|
|
67
|
-
|
|
68
|
-
if isinstance(config.environment.base_dir, str):
|
|
69
|
-
config.environment.base_dir = Path(config.environment.base_dir)
|
|
70
|
-
|
|
71
|
-
if config.source_paths:
|
|
72
|
-
import sys
|
|
73
|
-
|
|
74
|
-
sys.path.extend([str(path.absolute()) for path in config.source_paths])
|
|
75
|
-
self.logger.debug(f"Added [{len(config.source_paths)}] entries to PATH")
|
|
76
|
-
|
|
77
|
-
self._job_details = JobDetails.load(
|
|
78
|
-
_type=config.custom_input,
|
|
79
|
-
base_dir=config.environment.base_dir,
|
|
80
|
-
dids=config.environment.dids,
|
|
81
|
-
transformation_did=config.environment.transformation_did,
|
|
82
|
-
secret=config.environment.secret,
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
self.logger.info("Loaded JobDetails")
|
|
86
|
-
self.logger.debug(asdict(self.job_details))
|
|
87
|
-
|
|
88
|
-
class Error(RuntimeError): ...
|
|
89
|
-
|
|
90
|
-
@property
|
|
91
|
-
def job_details(self) -> JobDetails:
|
|
92
|
-
if not self._job_details:
|
|
93
|
-
raise Algorithm.Error("JobDetails not initialized or missing")
|
|
94
|
-
return self._job_details
|
|
95
|
-
|
|
96
|
-
@property
|
|
97
|
-
def result(self) -> ResultT:
|
|
98
|
-
if self._result is None:
|
|
99
|
-
raise Algorithm.Error("Result missing, run the algorithm first")
|
|
100
|
-
return self._result
|
|
101
|
-
|
|
102
|
-
def validate(
|
|
103
|
-
self,
|
|
104
|
-
callback: Callable[[Self], None] = default_validation,
|
|
105
|
-
) -> Self:
|
|
106
|
-
self.logger.info("Validating instance...")
|
|
107
|
-
try:
|
|
108
|
-
callback(self)
|
|
109
|
-
except Exception as e:
|
|
110
|
-
self.error_callback(e)
|
|
111
|
-
|
|
112
|
-
return self
|
|
113
|
-
|
|
114
|
-
def run(
|
|
115
|
-
self,
|
|
116
|
-
callable: Callable[[Self], ResultT],
|
|
117
|
-
) -> Self:
|
|
118
|
-
self.logger.info("Running algorithm...")
|
|
119
|
-
try:
|
|
120
|
-
self._result = callable(self)
|
|
121
|
-
except Exception as e:
|
|
122
|
-
self.error_callback(e)
|
|
123
|
-
|
|
124
|
-
return self
|
|
125
|
-
|
|
126
|
-
def save_results(
|
|
127
|
-
self,
|
|
128
|
-
callable: Callable[[ResultT, Path, Algorithm], None] = default_save,
|
|
129
|
-
*,
|
|
130
|
-
override_path: Path | None = None,
|
|
131
|
-
) -> None:
|
|
132
|
-
self.logger.info("Saving results...")
|
|
133
|
-
try:
|
|
134
|
-
callable(
|
|
135
|
-
results=self.result,
|
|
136
|
-
base_path=override_path or self.job_details.paths.outputs,
|
|
137
|
-
algorithm=self,
|
|
138
|
-
)
|
|
139
|
-
except Exception as e:
|
|
140
|
-
self.error_callback(e)
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
__all__ = [Algorithm]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|