scientiflow-cli 0.4.13__tar.gz → 0.4.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/PKG-INFO +1 -1
  2. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/pyproject.toml +1 -1
  3. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/pipeline/decode_and_execute.py +38 -5
  4. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/executor.py +3 -3
  5. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/LICENSE.md +0 -0
  6. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/README.md +0 -0
  7. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/__init__.py +0 -0
  8. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/__main__.py +0 -0
  9. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/cli/__init__.py +0 -0
  10. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/cli/auth_utils.py +0 -0
  11. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/cli/login.py +0 -0
  12. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/cli/logout.py +0 -0
  13. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/main.py +0 -0
  14. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/pipeline/__init__.py +0 -0
  15. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/pipeline/container_manager.py +0 -0
  16. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/pipeline/get_jobs.py +0 -0
  17. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/__init__.py +0 -0
  18. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/auth_service.py +0 -0
  19. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/base_directory.py +0 -0
  20. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/modes.py +0 -0
  21. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/request_handler.py +0 -0
  22. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/rich_printer.py +0 -0
  23. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/services/status_updater.py +0 -0
  24. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/utils/__init__.py +0 -0
  25. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/utils/config.py +0 -0
  26. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/utils/encryption.py +0 -0
  27. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/utils/file_manager.py +0 -0
  28. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/utils/logger.py +0 -0
  29. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/utils/mock.py +0 -0
  30. {scientiflow_cli-0.4.13 → scientiflow_cli-0.4.16}/scientiflow_cli/utils/singularity.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: scientiflow-cli
3
- Version: 0.4.13
3
+ Version: 0.4.16
4
4
  Summary: CLI tool for scientiflow. This application runs on the client side, decodes pipelines, and executes them in the configured order!
5
5
  License: Proprietary
6
6
  Author: ScientiFlow
@@ -3,7 +3,7 @@ mode = "prod"
3
3
 
4
4
  [tool.poetry]
5
5
  name = "scientiflow-cli"
6
- version = "0.4.13"
6
+ version = "0.4.16"
7
7
  description = "CLI tool for scientiflow. This application runs on the client side, decodes pipelines, and executes them in the configured order!"
8
8
  authors = ["ScientiFlow <scientiflow@gmail.com>"]
9
9
  license = "Proprietary"
@@ -116,7 +116,7 @@ def execute_background_command_standalone(command: str, log_file_path: str):
116
116
  return False
117
117
 
118
118
  class PipelineExecutor:
119
- def __init__(self, base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None):
119
+ def __init__(self, base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None, is_cloud: bool = False):
120
120
  self.base_dir = base_dir
121
121
  self.project_id = project_id
122
122
  self.project_job_id = project_job_id
@@ -130,6 +130,11 @@ class PipelineExecutor:
130
130
  self.current_node = None
131
131
  self.job_status = job_status
132
132
  self.current_node_from_config = current_node_from_config
133
+ self.is_cloud = is_cloud
134
+ self.background_executors = [] # Keep track of background executors
135
+ self.background_jobs_count = 0 # Track number of active background jobs
136
+ self.background_jobs_completed = 0 # Track completed background jobs
137
+ self.background_jobs_lock = threading.Lock() # Thread-safe counter updates
133
138
 
134
139
  # For resuming: flag to track if we've reached the resume point
135
140
  self.resume_mode = (job_status == "running" and current_node_from_config is not None)
@@ -153,6 +158,18 @@ class PipelineExecutor:
153
158
  # Initialize log file
154
159
  self.init_log()
155
160
 
161
+ # ✅ Helper method to conditionally notify cloud manager
162
+ def _notify_cloud(self, completion_type: str):
163
+ """Triggers scientiflow-cloud commands if in cloud mode."""
164
+ if not self.is_cloud:
165
+ return
166
+ try:
167
+ cmd = ["scientiflow-cloud", f"--completed-job-{completion_type}", str(self.project_job_id)]
168
+ subprocess.run(cmd, check=False)
169
+ logger.info(f"Sent {completion_type} signal to cloud manager.")
170
+ except Exception as e:
171
+ print(f"[ERROR] Cloud notification failed: {e}")
172
+
156
173
  def init_log(self):
157
174
  """Initialize the log file."""
158
175
  try:
@@ -185,6 +202,8 @@ class PipelineExecutor:
185
202
  body = {"project_job_id": self.project_job_id, "terminal_output": terminal_output}
186
203
  make_auth_request(endpoint="/agent-application/update-terminal-output", method="POST", data=body, error_message="Unable to update terminal output!")
187
204
  printer.print_message("[+] Terminal output updated successfully.", style="bold green")
205
+ # ✅ TRIGGER: FULL COMPLETION
206
+ self._notify_cloud("fully")
188
207
  except Exception as e:
189
208
  print(f"[ERROR] Failed to update terminal output: {e}")
190
209
 
@@ -241,6 +260,20 @@ class PipelineExecutor:
241
260
  raise SystemExit("[ERROR] Pipeline execution terminated due to an unexpected error.")
242
261
 
243
262
 
263
+ def wait_for_background_jobs(self):
264
+ """Wait for all background jobs to complete."""
265
+ import time
266
+ if self.background_jobs_count > 0:
267
+ printer.print_message(f"[INFO] Waiting for {self.background_jobs_count} background job(s) to complete...", style="bold yellow")
268
+ # ✅ TRIGGER: PARTIAL COMPLETION (Frees GPU/Primary CPU while waiting for cleanup)
269
+ self._notify_cloud("partially")
270
+ while True:
271
+ with self.background_jobs_lock:
272
+ if self.background_jobs_completed >= self.background_jobs_count:
273
+ break
274
+ time.sleep(0.5) # Check every 500ms
275
+
276
+ printer.print_message("[INFO] All background jobs completed.", style="bold green")
244
277
 
245
278
  def dfs(self, node: str):
246
279
  """Perform Depth-First Search (DFS) for executing pipeline nodes."""
@@ -353,7 +386,6 @@ class PipelineExecutor:
353
386
  """Start executing the pipeline."""
354
387
  # Use job status from configuration instead of API call
355
388
  current_status = self.job_status
356
-
357
389
  if current_status == "running":
358
390
  # Job is already running, resume from start but skip until current node
359
391
  current_node_id = self.current_node_from_config
@@ -384,7 +416,8 @@ class PipelineExecutor:
384
416
  self.update_terminal_output()
385
417
 
386
418
  # External function to initiate the pipeline execution
387
- def decode_and_execute_pipeline(base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None):
419
+ def decode_and_execute_pipeline(base_dir: str, project_id: int, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str], start_node: str = None, end_node: str = None, job_status: str = None, current_node_from_config: str = None, is_cloud: bool = False):
388
420
  """Initialize and execute the pipeline."""
389
- executor = PipelineExecutor(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node, end_node, job_status, current_node_from_config)
390
- executor.decode_and_execute_pipeline()
421
+ executor = PipelineExecutor(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node, end_node, job_status, current_node_from_config, is_cloud)
422
+ executor.decode_and_execute_pipeline()
423
+
@@ -37,7 +37,7 @@ def execute_jobs(job_ids: list[int] = None, parallel: bool = False, is_cloud: bo
37
37
  if matching_jobs:
38
38
  if is_cloud:
39
39
  for job in matching_jobs:
40
- job['project_title'] = job['project']['id'] + '_' + job['project']['project_title']
40
+ job['project']['project_title'] = str(job['project']['id']) + '_' + job['project']['project_title']
41
41
  if 'server' not in job or job['server'] is None:
42
42
  job['server'] = {'base_directory':None}
43
43
  job['server']['base_directory'] = get_base_directory()
@@ -172,8 +172,8 @@ def execute_single_job(job: dict, is_cloud: bool = False) -> None:
172
172
  printer.print_success(f"[+] Resuming execution for job ID: {project_job_id}")
173
173
  else:
174
174
  printer.print_success(f"[+] Starting execution for job ID: {project_job_id}")
175
-
176
- decode_and_execute_pipeline(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node=start_node, end_node=end_node, job_status=job_status, current_node_from_config=current_node_from_config)
175
+
176
+ decode_and_execute_pipeline(base_dir, project_id, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables, start_node=start_node, end_node=end_node, job_status=job_status, current_node_from_config=current_node_from_config, is_cloud=is_cloud)
177
177
  printer.print_success(f"[+] Execution completed for job ID: {project_job_id}")
178
178
 
179
179
  except ValueError as value_err: