scientiflow-cli 0.4.13__py3-none-any.whl → 0.4.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- scientiflow_cli/pipeline/decode_and_execute.py +38 -5
- scientiflow_cli/services/executor.py +3 -3
- {scientiflow_cli-0.4.13.dist-info → scientiflow_cli-0.4.16.dist-info}/METADATA +1 -1
- {scientiflow_cli-0.4.13.dist-info → scientiflow_cli-0.4.16.dist-info}/RECORD +7 -7
- {scientiflow_cli-0.4.13.dist-info → scientiflow_cli-0.4.16.dist-info}/LICENSE.md +0 -0
- {scientiflow_cli-0.4.13.dist-info → scientiflow_cli-0.4.16.dist-info}/WHEEL +0 -0
- {scientiflow_cli-0.4.13.dist-info → scientiflow_cli-0.4.16.dist-info}/entry_points.txt +0 -0
|
@@ -116,7 +116,7 @@ def execute_background_command_standalone(command: str, log_file_path: str):
|
|
|
116
116
|
return False
|
|
117
117
|
|
|
118
118
|
class PipelineExecutor:
|
|
119
|
-
def __init__(self, base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None):
|
|
119
|
+
def __init__(self, base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None, is_cloud: bool = False):
|
|
120
120
|
self.base_dir = base_dir
|
|
121
121
|
self.project_id = project_id
|
|
122
122
|
self.project_job_id = project_job_id
|
|
@@ -130,6 +130,11 @@ class PipelineExecutor:
|
|
|
130
130
|
self.current_node = None
|
|
131
131
|
self.job_status = job_status
|
|
132
132
|
self.current_node_from_config = current_node_from_config
|
|
133
|
+
self.is_cloud = is_cloud
|
|
134
|
+
self.background_executors = [] # Keep track of background executors
|
|
135
|
+
self.background_jobs_count = 0 # Track number of active background jobs
|
|
136
|
+
self.background_jobs_completed = 0 # Track completed background jobs
|
|
137
|
+
self.background_jobs_lock = threading.Lock() # Thread-safe counter updates
|
|
133
138
|
|
|
134
139
|
# For resuming: flag to track if we've reached the resume point
|
|
135
140
|
self.resume_mode = (job_status == "running" and current_node_from_config is not None)
|
|
@@ -153,6 +158,18 @@ class PipelineExecutor:
|
|
|
153
158
|
# Initialize log file
|
|
154
159
|
self.init_log()
|
|
155
160
|
|
|
161
|
+
# ✅ Helper method to conditionally notify cloud manager
|
|
162
|
+
def _notify_cloud(self, completion_type: str):
|
|
163
|
+
"""Triggers scientiflow-cloud commands if in cloud mode."""
|
|
164
|
+
if not self.is_cloud:
|
|
165
|
+
return
|
|
166
|
+
try:
|
|
167
|
+
cmd = ["scientiflow-cloud", f"--completed-job-{completion_type}", str(self.project_job_id)]
|
|
168
|
+
subprocess.run(cmd, check=False)
|
|
169
|
+
logger.info(f"Sent {completion_type} signal to cloud manager.")
|
|
170
|
+
except Exception as e:
|
|
171
|
+
print(f"[ERROR] Cloud notification failed: {e}")
|
|
172
|
+
|
|
156
173
|
def init_log(self):
|
|
157
174
|
"""Initialize the log file."""
|
|
158
175
|
try:
|
|
@@ -185,6 +202,8 @@ class PipelineExecutor:
|
|
|
185
202
|
body = {"project_job_id": self.project_job_id, "terminal_output": terminal_output}
|
|
186
203
|
make_auth_request(endpoint="/agent-application/update-terminal-output", method="POST", data=body, error_message="Unable to update terminal output!")
|
|
187
204
|
printer.print_message("[+] Terminal output updated successfully.", style="bold green")
|
|
205
|
+
# ✅ TRIGGER: FULL COMPLETION
|
|
206
|
+
self._notify_cloud("fully")
|
|
188
207
|
except Exception as e:
|
|
189
208
|
print(f"[ERROR] Failed to update terminal output: {e}")
|
|
190
209
|
|
|
@@ -241,6 +260,20 @@ class PipelineExecutor:
|
|
|
241
260
|
raise SystemExit("[ERROR] Pipeline execution terminated due to an unexpected error.")
|
|
242
261
|
|
|
243
262
|
|
|
263
|
+
def wait_for_background_jobs(self):
|
|
264
|
+
"""Wait for all background jobs to complete."""
|
|
265
|
+
import time
|
|
266
|
+
if self.background_jobs_count > 0:
|
|
267
|
+
printer.print_message(f"[INFO] Waiting for {self.background_jobs_count} background job(s) to complete...", style="bold yellow")
|
|
268
|
+
# ✅ TRIGGER: PARTIAL COMPLETION (Frees GPU/Primary CPU while waiting for cleanup)
|
|
269
|
+
self._notify_cloud("partially")
|
|
270
|
+
while True:
|
|
271
|
+
with self.background_jobs_lock:
|
|
272
|
+
if self.background_jobs_completed >= self.background_jobs_count:
|
|
273
|
+
break
|
|
274
|
+
time.sleep(0.5) # Check every 500ms
|
|
275
|
+
|
|
276
|
+
printer.print_message("[INFO] All background jobs completed.", style="bold green")
|
|
244
277
|
|
|
245
278
|
def dfs(self, node: str):
|
|
246
279
|
"""Perform Depth-First Search (DFS) for executing pipeline nodes."""
|
|
@@ -353,7 +386,6 @@ class PipelineExecutor:
|
|
|
353
386
|
"""Start executing the pipeline."""
|
|
354
387
|
# Use job status from configuration instead of API call
|
|
355
388
|
current_status = self.job_status
|
|
356
|
-
|
|
357
389
|
if current_status == "running":
|
|
358
390
|
# Job is already running, resume from start but skip until current node
|
|
359
391
|
current_node_id = self.current_node_from_config
|
|
@@ -384,7 +416,8 @@ class PipelineExecutor:
|
|
|
384
416
|
self.update_terminal_output()
|
|
385
417
|
|
|
386
418
|
# External function to initiate the pipeline execution
|
|
387
|
-
def decode_and_execute_pipeline(base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None):
|
|
419
|
+
def decode_and_execute_pipeline(base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None, is_cloud: bool = False):
|
|
388
420
|
"""Initialize and execute the pipeline."""
|
|
389
|
-
executor = PipelineExecutor(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node, end_node, job_status, current_node_from_config)
|
|
390
|
-
executor.decode_and_execute_pipeline()
|
|
421
|
+
executor = PipelineExecutor(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node, end_node, job_status, current_node_from_config, is_cloud)
|
|
422
|
+
executor.decode_and_execute_pipeline()
|
|
423
|
+
|
|
@@ -37,7 +37,7 @@ def execute_jobs(job_ids: list[int] = None, parallel: bool = False, is_cloud: bo
|
|
|
37
37
|
if matching_jobs:
|
|
38
38
|
if is_cloud:
|
|
39
39
|
for job in matching_jobs:
|
|
40
|
-
job['project_title'] = job['project']['id'] + '_' + job['project']['project_title']
|
|
40
|
+
job['project']['project_title'] = str(job['project']['id']) + '_' + job['project']['project_title']
|
|
41
41
|
if 'server' not in job or job['server'] is None:
|
|
42
42
|
job['server'] = {'base_directory':None}
|
|
43
43
|
job['server']['base_directory'] = get_base_directory()
|
|
@@ -172,8 +172,8 @@ def execute_single_job(job: dict, is_cloud: bool = False) -> None:
|
|
|
172
172
|
printer.print_success(f"[+] Resuming execution for job ID: {project_job_id}")
|
|
173
173
|
else:
|
|
174
174
|
printer.print_success(f"[+] Starting execution for job ID: {project_job_id}")
|
|
175
|
-
|
|
176
|
-
decode_and_execute_pipeline(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node=start_node, end_node=end_node, job_status=job_status, current_node_from_config=current_node_from_config)
|
|
175
|
+
|
|
176
|
+
decode_and_execute_pipeline(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node=start_node, end_node=end_node, job_status=job_status, current_node_from_config=current_node_from_config, is_cloud=is_cloud)
|
|
177
177
|
printer.print_success(f"[+] Execution completed for job ID: {project_job_id}")
|
|
178
178
|
|
|
179
179
|
except ValueError as value_err:
|
|
@@ -7,12 +7,12 @@ scientiflow_cli/cli/logout.py,sha256=EzpFPA1ENoXqLvduo6rxaVF09GqgO5GCRvnGMDr5BEw
|
|
|
7
7
|
scientiflow_cli/main.py,sha256=AliEFU3bebJV2tbvxQYbiMY1bqVGqq3-a6dzkVUXong,6982
|
|
8
8
|
scientiflow_cli/pipeline/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
scientiflow_cli/pipeline/container_manager.py,sha256=KAnE5AvsSg4cewLc_v4gWCqhKGoc7ycHOtl_HYxUK7E,8444
|
|
10
|
-
scientiflow_cli/pipeline/decode_and_execute.py,sha256
|
|
10
|
+
scientiflow_cli/pipeline/decode_and_execute.py,sha256=-y52feHSuRTaNZO5Hxg0iOEapwuMFgQ6i9q_HIU0Uo0,20761
|
|
11
11
|
scientiflow_cli/pipeline/get_jobs.py,sha256=69jOIVwXd8j2lAYy28r2QcsjFT4yRpXNOqsfopiZhFs,1498
|
|
12
12
|
scientiflow_cli/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
13
|
scientiflow_cli/services/auth_service.py,sha256=lknARfVTjRCH-4yWVPKiS330NIVHxFw3HlQdN2Lw3Og,3343
|
|
14
14
|
scientiflow_cli/services/base_directory.py,sha256=2dUvGYk4enLe3Cagcs_bfv2kNuHg1ws-dtMDEW_tccI,2726
|
|
15
|
-
scientiflow_cli/services/executor.py,sha256=
|
|
15
|
+
scientiflow_cli/services/executor.py,sha256=RuLB2vt4msI8QenA-lMPuVfDn7fLvfYrALpWdN6ZmuA,11967
|
|
16
16
|
scientiflow_cli/services/modes.py,sha256=-Bk1CJO0vgc8v_rXktfKAyHSF6cr5bGbufSGa_DtvY4,1241
|
|
17
17
|
scientiflow_cli/services/request_handler.py,sha256=CjJqEjXt8AOzyZq6UwUBzRBhahqWHj1_OKUyPG3PpFs,1335
|
|
18
18
|
scientiflow_cli/services/rich_printer.py,sha256=5ORAaZOa_84m6vP-novpPOI70UPxt0pEvmRq9999Ifg,2129
|
|
@@ -24,8 +24,8 @@ scientiflow_cli/utils/file_manager.py,sha256=KLdJlIzFng_BfKHHZzQNp35hXsFMWfgy4OU
|
|
|
24
24
|
scientiflow_cli/utils/logger.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
25
|
scientiflow_cli/utils/mock.py,sha256=UZ9cN2Qx3EAXcxnLQY5z4DQWy4jflnWFnfNTsuvnFH8,11237
|
|
26
26
|
scientiflow_cli/utils/singularity.py,sha256=jy8ep7Xa1Eg4fptNjyXLPuVN2KA8l4dFil-w-KaVNkw,4956
|
|
27
|
-
scientiflow_cli-0.4.
|
|
28
|
-
scientiflow_cli-0.4.
|
|
29
|
-
scientiflow_cli-0.4.
|
|
30
|
-
scientiflow_cli-0.4.
|
|
31
|
-
scientiflow_cli-0.4.
|
|
27
|
+
scientiflow_cli-0.4.16.dist-info/LICENSE.md,sha256=nb6GGGYuS_KXe33mSNwcEW-QzvwM475NQ4cNE7KBb34,425
|
|
28
|
+
scientiflow_cli-0.4.16.dist-info/METADATA,sha256=XJKJK1LBn0GLwpKdNbzMTcR29Cht6pQoDe_-CXJZbvk,2492
|
|
29
|
+
scientiflow_cli-0.4.16.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
30
|
+
scientiflow_cli-0.4.16.dist-info/entry_points.txt,sha256=0lq2mjcG5hGfODrQodeMSAy9RfE2EX1MZSHRpfSncxc,61
|
|
31
|
+
scientiflow_cli-0.4.16.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|