bfabric-web-apps 0.1.4__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -19,42 +19,39 @@ from .utils.create_app_in_bfabric import create_app_in_bfabric
19
19
  from .utils.callbacks import (
20
20
  process_url_and_token,
21
21
  submit_bug_report,
22
- populate_workunit_details
22
+ populate_workunit_details,
23
+ get_redis_queue_layout
23
24
  )
24
25
 
25
- from .utils import defaults
26
-
27
- from bfabric_web_apps.utils.resource_utilities import create_workunit, create_resource, create_workunits, create_resources
28
- HOST = os.getenv("HOST", defaults.HOST)
29
- PORT = int(os.getenv("PORT", defaults.PORT)) # Convert to int since env variables are strings
30
- DEV = os.getenv("DEV", str(defaults.DEV)).lower() in ["true", "1", "yes"] # Convert to bool
31
- CONFIG_FILE_PATH = os.getenv("CONFIG_FILE_PATH", defaults.CONFIG_FILE_PATH)
32
-
33
- DEVELOPER_EMAIL_ADDRESS = os.getenv("DEVELOPER_EMAIL_ADDRESS", defaults.DEVELOPER_EMAIL_ADDRESS)
34
- BUG_REPORT_EMAIL_ADDRESS = os.getenv("BUG_REPORT_EMAIL_ADDRESS", defaults.BUG_REPORT_EMAIL_ADDRESS)
35
-
36
-
37
- # Define __all__ for controlled imports
38
- __all__ = [
39
- "BfabricInterface",
40
- "Logger",
41
- "components",
42
- "get_static_layout",
43
- "create_app",
44
- "process_url_and_token",
45
- "submit_bug_report",
46
- 'get_logger',
47
- 'get_power_user_wrapper',
48
- 'HOST',
49
- 'PORT',
50
- 'DEV',
51
- 'CONFIG_FILE_PATH',
52
- 'DEVELOPER_EMAIL_ADDRESS',
53
- 'BUG_REPORT_EMAIL_ADDRESS',
54
- 'create_app_in_bfabric',
55
- 'create_workunit',
56
- 'create_resource',
57
- 'create_workunits',
58
- 'create_resources',
59
- 'populate_workunit_details',
60
- ]
26
+ from .utils.config import settings as config
27
+
28
+ from. utils.run_main_pipeline import run_main_job
29
+
30
+ from .utils.resource_utilities import (
31
+ create_workunit,
32
+ create_resource,
33
+ create_workunits,
34
+ create_resources
35
+ )
36
+
37
+ from .utils.redis_worker_init import run_worker, test_job
38
+ from .utils.redis_queue import q
39
+
40
+ REDIS_HOST = config.REDIS_HOST
41
+ REDIS_PORT = config.REDIS_PORT
42
+
43
+ HOST = config.HOST
44
+ PORT = config.PORT
45
+ DEV = config.DEV
46
+ DEBUG = config.DEBUG
47
+
48
+ CONFIG_FILE_PATH = config.CONFIG_FILE_PATH
49
+
50
+ DEVELOPER_EMAIL_ADDRESS = config.DEVELOPER_EMAIL_ADDRESS
51
+ BUG_REPORT_EMAIL_ADDRESS = config.BUG_REPORT_EMAIL_ADDRESS
52
+
53
+ GSTORE_REMOTE_PATH = config.GSTORE_REMOTE_PATH
54
+ SCRATCH_PATH = config.SCRATCH_PATH
55
+ TRX_LOGIN = config.TRX_LOGIN
56
+ TRX_SSH_KEY = config.TRX_SSH_KEY
57
+ URL = config.URL
@@ -2,7 +2,7 @@ from dash import html, dcc
2
2
  import dash_bootstrap_components as dbc
3
3
  import bfabric_web_apps
4
4
 
5
- def get_static_layout(base_title=None, main_content=None, documentation_content=None):
5
+ def get_static_layout(base_title=None, main_content=None, documentation_content=None, layout_config={}):
6
6
  """
7
7
  Returns a layout with static tabs for Main, Documentation, and Report a Bug.
8
8
  The main content is customizable, while the other tabs are generic.
@@ -11,10 +11,24 @@ def get_static_layout(base_title=None, main_content=None, documentation_content=
11
11
  base_title (str): The main title to be displayed in the banner.
12
12
  main_content (html.Div): Content to be displayed in the "Main" tab.
13
13
  documentation_content (html.Div): Content for the "Documentation" tab.
14
+ layout_config (dict): Configuration for the layout, determining which tabs are shown.
14
15
 
15
16
  Returns:
16
17
  html.Div: The complete static layout of the web app.
17
18
  """
19
+
20
+ tab_list = [
21
+ dbc.Tab(main_content, label="Main", tab_id="main"),
22
+ dbc.Tab(dcc.Loading(get_documentation_tab(documentation_content)), label="Documentation", tab_id="documentation"),
23
+ ]
24
+
25
+ if layout_config.get("workunits", False):
26
+ tab_list.append(dbc.Tab(dcc.Loading(get_workunits_tab()), label="Workunits", tab_id="workunits"))
27
+ if layout_config.get("queue", False):
28
+ tab_list.append(dbc.Tab(get_queue_tab(), label="Queue", tab_id="queue"))
29
+ if layout_config.get("bug", False):
30
+ tab_list.append(dbc.Tab(dcc.Loading(get_report_bug_tab()), label="Report a Bug", tab_id="report-bug"))
31
+
18
32
  return html.Div(
19
33
  children=[
20
34
  dcc.Location(id='url', refresh=False),
@@ -142,12 +156,7 @@ def get_static_layout(base_title=None, main_content=None, documentation_content=
142
156
 
143
157
  # Tabs Section
144
158
  dbc.Tabs(
145
- [
146
- dbc.Tab(main_content, label="Main", tab_id="main"),
147
- dbc.Tab(dcc.Loading(get_documentation_tab(documentation_content)), label="Documentation", tab_id="documentation"),
148
- dbc.Tab(dcc.Loading(get_workunits_tab()), label="Workunits", tab_id="workunits"),
149
- dbc.Tab(dcc.Loading(get_report_bug_tab()), label="Report a Bug", tab_id="report-bug"),
150
- ],
159
+ tab_list,
151
160
  id="tabs",
152
161
  active_tab="main",
153
162
  ),
@@ -305,4 +314,38 @@ def get_workunits_tab():
305
314
  ),
306
315
  ],
307
316
  style={"margin-top": "0px", "min-height": "40vh"},
317
+ )
318
+
319
+
320
+ def get_queue_tab():
321
+
322
+ return dbc.Row(
323
+ id="page-content-queue",
324
+ children=[
325
+ dbc.Col(
326
+ html.Div(
327
+ id="page-content-queue-children",
328
+ children=[],
329
+ style={
330
+ "margin-top": "2vh",
331
+ "margin-left": "2vw",
332
+ "font-size": "20px",
333
+ "padding-right": "40px",
334
+ "overflow-y": "scroll",
335
+ "max-height": "65vh",
336
+ },
337
+ ),
338
+ ),
339
+ dbc.Col(
340
+ children = [
341
+ dcc.Interval(
342
+ id="queue-interval",
343
+ interval=5 * 1000, # in milliseconds
344
+ n_intervals=0,
345
+ ),
346
+ ],
347
+ style={"display": "none"}
348
+ )
349
+ ],
350
+ style={"margin-top": "0px", "min-height": "40vh"},
308
351
  )
@@ -4,6 +4,10 @@ import json
4
4
  import dash_bootstrap_components as dbc
5
5
  from datetime import datetime as dt
6
6
  from bfabric_web_apps.utils.get_logger import get_logger
7
+ from rq import Queue
8
+ from .redis_connection import redis_conn
9
+ from rq.registry import StartedJobRegistry, FailedJobRegistry, FinishedJobRegistry
10
+
7
11
 
8
12
  def process_url_and_token(url_params):
9
13
  """
@@ -181,7 +185,7 @@ def populate_workunit_details(token_data):
181
185
 
182
186
  environment_urls = {
183
187
  "Test": "https://fgcz-bfabric-test.uzh.ch/bfabric/workunit/show.html?id=",
184
- "Prod": "https://fgcz-bfabric.uzh.ch/bfabric/workunit/show.html?id="
188
+ "Production": "https://fgcz-bfabric.uzh.ch/bfabric/workunit/show.html?id="
185
189
  }
186
190
 
187
191
  if token_data:
@@ -228,4 +232,107 @@ def populate_workunit_details(token_data):
228
232
 
229
233
  return dbc.Container(wu_cards, style={"display": "flex", "flex-wrap": "wrap"})
230
234
  else:
231
- return html.Div()
235
+ return html.Div()
236
+
237
+ def get_redis_queue_layout():
238
+ # Get all queues dynamically
239
+ queues = Queue.all(connection=redis_conn)
240
+
241
+ queue_cards = []
242
+
243
+ print("QUEUES", queues)
244
+
245
+ for queue in queues:
246
+ queue_name = queue.name
247
+
248
+ # Get queue stats
249
+ started_registry = StartedJobRegistry(queue_name, connection=redis_conn)
250
+ failed_registry = FailedJobRegistry(queue_name, connection=redis_conn)
251
+ finished_registry = FinishedJobRegistry(queue_name, connection=redis_conn)
252
+
253
+ stats = {
254
+ "Jobs in queue": queue.count,
255
+ "Running": started_registry.count,
256
+ "Failed": failed_registry.count,
257
+ "Completed": finished_registry.count,
258
+ }
259
+
260
+ print("STAT", stats)
261
+
262
+ stats_row = dbc.Row([
263
+ dbc.Col([
264
+ html.P([html.B("Jobs in queue: "), f"{queue.count}"]),
265
+ html.P([html.B("Running: "), f"{started_registry.count}"]),
266
+ ],width=6),
267
+ dbc.Col([
268
+ html.P([html.B("Failed: "), f"{failed_registry.count}"]),
269
+ html.P([html.B("Completed: "), f"{finished_registry.count}"]),
270
+ ], width=6)
271
+ ])
272
+
273
+ # Fetch job details
274
+ job_cards = []
275
+ for job_id in started_registry.get_job_ids():
276
+ job = queue.fetch_job(job_id)
277
+ if job:
278
+ job_cards.append(
279
+ dbc.Card(
280
+ dbc.CardBody([
281
+ html.H6(f"Job ID: {job.id}", className="card-title"),
282
+ html.P(f"Function: {job.func_name}", className="card-text"),
283
+ html.P(f"Status: Running", className="text-success"),
284
+ ]),
285
+ style={"maxWidth": "36vw", "backgroundColor": "#d4edda"}, className="mb-2"
286
+ )
287
+ )
288
+
289
+ for job_id in failed_registry.get_job_ids():
290
+ job = queue.fetch_job(job_id)
291
+ if job:
292
+ job_cards.append(
293
+ dbc.Card(
294
+ dbc.CardBody([
295
+ html.H6(f"Job ID: {job.id}", className="card-title"),
296
+ html.P(f"Function: {job.func_name}", className="card-text"),
297
+ html.P(f"Status: Failed", className="text-danger"),
298
+ ]),
299
+ style={"maxWidth": "36vw", "backgroundColor": "#f8d7da"}, className="mb-2"
300
+ )
301
+ )
302
+
303
+ for job_id in finished_registry.get_job_ids():
304
+ job = queue.fetch_job(job_id)
305
+ if job:
306
+ finished_time = job.ended_at.strftime("%Y-%m-%d %H:%M:%S") if job.ended_at else "Unknown"
307
+ job_cards.append(
308
+ dbc.Card(
309
+ dbc.CardBody([
310
+ html.H6(f"Job ID: {job.id}", className="card-title"),
311
+ html.P(f"Function: {job.func_name}", className="card-text"),
312
+ html.P(f"Status: Completed", className="text-primary"),
313
+ html.P(f"Finished at: {finished_time}", className="text-muted"),
314
+ ]),
315
+ style={"maxWidth": "36vw", "backgroundColor": "#d1ecf1"}, className="mb-2"
316
+ )
317
+ )
318
+
319
+ # Create queue card
320
+ queue_card = dbc.Col([
321
+ dbc.Card(
322
+ [
323
+ dbc.CardHeader(html.H5(f"Queue: {queue_name}")),
324
+ dbc.CardBody([
325
+ stats_row, # Fixed: Convert dictionary to list
326
+ html.Hr(),
327
+ *job_cards # Add job sub-cards
328
+ ], style={"maxHeight": "58vh", "overflow-y": "scroll"})
329
+ ],
330
+ style={"maxWidth": "36vw", "backgroundColor": "#f8f9fa", "max-height":"60vh"}, className="mb-4"
331
+ )
332
+ ])
333
+
334
+ queue_cards.append(queue_card)
335
+
336
+ container_children = dbc.Row(queue_cards)
337
+
338
+ return dbc.Container(container_children, className="mt-4")
@@ -0,0 +1,38 @@
1
+ from pydantic_settings import BaseSettings
2
+ from pydantic import EmailStr
3
+
4
+ class Settings(BaseSettings):
5
+
6
+ REDIS_HOST: str = "localhost"
7
+ REDIS_PORT: int = 6379
8
+
9
+ CONFIG_FILE_PATH: str = "~/.bfabricpy.yml"
10
+
11
+ HOST: str = "127.0.0.1"
12
+ PORT: int = 8050
13
+
14
+ DEV: bool = False
15
+ DEBUG: bool = False
16
+
17
+ DEVELOPER_EMAIL_ADDRESS: EmailStr = "griffin@gwcustom.com"
18
+ BUG_REPORT_EMAIL_ADDRESS: EmailStr = "gwtools@fgcz.system"
19
+
20
+ #Run main pipeline config (only FGCZ specific)
21
+ GSTORE_REMOTE_PATH: str = "/path/to/remote/gstore"
22
+ SCRATCH_PATH: str = "/scratch/folder"
23
+ TRX_LOGIN: str = "trxcopy@fgcz-server.uzh.ch"
24
+ TRX_SSH_KEY: str = "/home/user/.ssh/your_ssh_key"
25
+ URL: str = "https:/fgcz/dummy/url"
26
+
27
+ class Config:
28
+
29
+ env_file = ".env"
30
+
31
+ # Disable reading from environment variables
32
+ @classmethod
33
+ def customise_sources(cls, init_settings, env_settings, file_secret_settings):
34
+ return file_secret_settings, init_settings
35
+
36
+ # Instantiate settings
37
+ settings = Settings()
38
+
@@ -0,0 +1,6 @@
1
+
2
+ from .config import settings as config
3
+
4
+ from redis import Redis
5
+
6
+ redis_conn = Redis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@@ -0,0 +1,6 @@
1
+ from rq import Queue
2
+ from .redis_connection import redis_conn as conn
3
+
4
+
5
+ def q(queue_name):
6
+ return Queue(name=queue_name, connection=conn, default_timeout=10000000)
@@ -0,0 +1,28 @@
1
+ import redis
2
+ from rq import Worker, Queue, Connection
3
+ import time
4
+
5
+ def test_job():
6
+
7
+ """
8
+ A test job that prints a message to the console.
9
+ """
10
+ print("Hello, this is a test job!")
11
+ time.sleep(10)
12
+ print("Test job finished!")
13
+ return
14
+
15
+
16
+ def run_worker(host, port, queue_names):
17
+ """
18
+ Provides internal interface for running workers on a specified host and port.
19
+
20
+ Args:
21
+ host (str): The host to run
22
+ port (int): The port to run
23
+ queue_names (list): A list of queue names to listen to
24
+ """
25
+ conn = redis.Redis(host=host, port=port)
26
+ with Connection(conn):
27
+ worker = Worker(map(Queue, queue_names))
28
+ worker.work()
@@ -16,7 +16,7 @@ def create_workunit(token_data, application_name, application_description, appli
16
16
  container_id (int): Container ID (Order ID).
17
17
 
18
18
  Returns:
19
- int: Created workunit ID or None if creation fails.
19
+ obj: Created workunit object or None if creation fails.
20
20
  """
21
21
  L = get_logger(token_data)
22
22
  wrapper = bfabric_interface.get_wrapper()
@@ -50,7 +50,7 @@ def create_workunit(token_data, application_name, application_description, appli
50
50
  params=None,
51
51
  flush_logs=True
52
52
  )
53
- return workunit_id
53
+ return workunit_response[0]
54
54
 
55
55
  except Exception as e:
56
56
  L.log_operation(
@@ -75,82 +75,95 @@ def create_workunits(token_data, application_name, application_description, appl
75
75
  container_ids (list): List of container IDs.
76
76
 
77
77
  Returns:
78
- list: List of created workunit IDs.
78
+ list[obj]: List of created workunit objects.
79
79
  """
80
80
  if not isinstance(container_ids, list):
81
81
  container_ids = [container_ids] # Ensure it's a list
82
82
 
83
- workunit_ids = [
83
+ workunits = [
84
84
  create_workunit(token_data, application_name, application_description, application_id, container_id)
85
85
  for container_id in container_ids
86
86
  ]
87
87
 
88
- return [wu_id for wu_id in workunit_ids if wu_id is not None] # Filter out None values
88
+ return [wu_id for wu_id in workunits if wu_id is not None] # Filter out None values
89
89
 
90
90
 
91
- def create_resource(token_data, workunit_id, gz_file_path):
91
+ from pathlib import Path
92
+
93
+ def create_resource(token_data, workunit_id, file_path, storage_id="20"): # GWC Server is storage id 20.
92
94
  """
93
- Upload a single .gz resource to an existing B-Fabric workunit.
95
+ Attach a single file as a resource to an existing B-Fabric workunit.
94
96
 
95
97
  Args:
96
98
  token_data (dict): Authentication token data.
97
99
  workunit_id (int): ID of the workunit to associate the resource with.
98
- gz_file_path (str): Full path to the .gz file to upload.
100
+ file_path (str): Full path to the file to attach.
99
101
 
100
102
  Returns:
101
- int: Resource ID if successful, None otherwise.
103
+ obj: Resource object if successful, None otherwise.
102
104
  """
103
105
  L = get_logger(token_data)
104
106
  wrapper = get_power_user_wrapper(token_data)
105
107
 
106
108
  try:
107
- file_path = Path(gz_file_path)
108
-
109
- # Upload the resource
110
- print("Uploading:", file_path, "to workunit:", workunit_id)
111
- result = bfabric_upload_resource(wrapper, file_path, workunit_id)
109
+ file_path = Path(file_path)
110
+
111
+ # Attaching the resource
112
+ print(f"Attaching: {file_path.name} to workunit: {workunit_id}")
113
+
114
+ result = wrapper.save(
115
+ endpoint="resource",
116
+ obj={
117
+ "workunitid": str(workunit_id),
118
+ "name": file_path.name,
119
+ "description": f"Resource attached to workunit {workunit_id}",
120
+ "relativepath": file_path.name,
121
+ "storageid": str(storage_id),
122
+ }
123
+ )
112
124
 
113
125
  if result:
114
- print(f"Resource uploaded: {file_path.name}")
126
+ resource_id = result[0].get("id")
127
+ print(f"Resource attached: {file_path.name} (ID: {resource_id})")
115
128
  L.log_operation(
116
- "upload_resource",
117
- f"Resource uploaded successfully: {file_path.name}",
129
+ "Attach_resource",
130
+ f"Resource attached successfully: {file_path.name}",
118
131
  params=None,
119
132
  flush_logs=True,
120
133
  )
121
- return result
134
+ return result[0]
122
135
  else:
123
- raise ValueError(f"Failed to upload resource: {file_path.name}")
136
+ raise ValueError(f"Failed to attach resource: {file_path.name}")
124
137
 
125
138
  except Exception as e:
126
139
  L.log_operation(
127
140
  "error",
128
- f"Failed to upload resource: {e}",
141
+ f"Failed to attach resource: {e}",
129
142
  params=None,
130
143
  flush_logs=True,
131
144
  )
132
- print(f"Failed to upload resource: {e}")
145
+ print(f"Failed to attach resource: {e}")
133
146
  return None
134
147
 
135
148
 
136
- def create_resources(token_data, workunit_id, gz_file_paths):
149
+ def create_resources(token_data, workunit_id, file_paths):
137
150
  """
138
- Upload multiple .gz resources to an existing B-Fabric workunit.
151
+ Attach multiple files as resources to an existing B-Fabric workunit.
139
152
 
140
153
  Args:
141
154
  token_data (dict): Authentication token data.
142
155
  workunit_id (int): ID of the workunit to associate the resources with.
143
- gz_file_paths (list): List of full paths to .gz files to upload.
156
+ file_paths (list): List of full paths to files to attach.
144
157
 
145
158
  Returns:
146
- list: List of successfully uploaded resource IDs.
159
+ list[obj]: List of successfully attached resource objects.
147
160
  """
148
- if not isinstance(gz_file_paths, list):
149
- gz_file_paths = [gz_file_paths] # Ensure it's a list
161
+ if not isinstance(file_paths, list):
162
+ file_paths = [file_paths] # Ensure it's a list
150
163
 
151
- resource_ids = [
152
- create_resource(token_data, workunit_id, gz_file_path)
153
- for gz_file_path in gz_file_paths
164
+ resources = [
165
+ create_resource(token_data, workunit_id, file_path)
166
+ for file_path in file_paths
154
167
  ]
155
168
 
156
- return [res_id for res_id in resource_ids if res_id is not None] # Filter out None values
169
+ return [res_id for res_id in resources if res_id is not None] # Filter out None values
@@ -0,0 +1,414 @@
1
+ import redis
2
+ from rq import Queue
3
+ import os
4
+ import shutil
5
+ import subprocess
6
+ from pathlib import Path
7
+
8
+ from .get_logger import get_logger
9
+ from .get_power_user_wrapper import get_power_user_wrapper
10
+ from .callbacks import process_url_and_token
11
+ from bfabric_web_apps.objects import BfabricInterface
12
+ from .resource_utilities import (
13
+ create_workunit,
14
+ create_resource,
15
+ create_workunits,
16
+ create_resources
17
+ )
18
+
19
+ from .config import settings as config
20
+ from datetime import datetime as dt
21
+
22
+ GSTORE_REMOTE_PATH = config.GSTORE_REMOTE_PATH
23
+ SCRATCH_PATH = config.SCRATCH_PATH
24
+ TRX_LOGIN = config.TRX_LOGIN
25
+ TRX_SSH_KEY = config.TRX_SSH_KEY
26
+ URL = config.URL
27
+
28
+ def run_main_job(files_as_byte_strings: dict,
29
+ bash_commands: list[str],
30
+ resource_paths: dict,
31
+ attachment_paths: list[dict],
32
+ token: str):
33
+ """
34
+ Main function to handle:
35
+ 1) Save Files on Server
36
+ 2) Execute local bash commands
37
+ 3) Create workunits in B-Fabric
38
+ 4) Register resources in B-Fabric
39
+ 5) Attach additional gstore files (logs/reports/etc.) to entities in B-Fabric
40
+
41
+ :param files_as_byte_strings: {destination_path: file as byte strings}
42
+ :param bash_commands: List of bash commands to execute
43
+ :param resource_paths: dict, {resource_path: container_id}
44
+ :param attachment_paths: Dictionary mapping source file paths to their corresponding file names ({"path/test.txt": "name.txt"})
45
+ for attachment to a B-Fabric entity (e.g., logs, final reports, etc.)
46
+ :param token: Authentication token
47
+
48
+
49
+ Dev Notes:
50
+ !!! All exceptions get logged (make sure to log the exception message i.e. "except Exception as e: log(e)") !!!
51
+ !!! If an exception doesn't occur, log that some step ran successfully to the job object !!!
52
+ """
53
+
54
+ # STEP 0: Parse token, logger, etc.
55
+ token, token_data, entity_data, app_data, page_title, session_details, job_link = process_url_and_token(token)
56
+
57
+ if token is None:
58
+ raise ValueError("Error: 'token' is None")
59
+ if token_data is None:
60
+ raise ValueError("Error: 'token_data' is None")
61
+ if entity_data is None:
62
+ raise ValueError("Error: 'entity_data' is None")
63
+ if app_data is None:
64
+ raise ValueError("Error: 'app_data' is None")
65
+
66
+
67
+ L = get_logger(token_data)
68
+ print("Token Data:", token_data)
69
+ print("Entity Data:", entity_data)
70
+ print("App Data:", app_data)
71
+
72
+
73
+ # Step 1: Save files to the server
74
+ try:
75
+ summary = save_files_from_bytes(files_as_byte_strings, L)
76
+ L.log_operation("Success", f"File copy summary: {summary}", params=None, flush_logs=True)
77
+ print("Summary:", summary)
78
+ except Exception as e:
79
+ # If something unexpected blows up the entire process
80
+ L.log_operation("Error", f"Failed to copy files: {e}", params=None, flush_logs=True)
81
+ print("Error copying files:", e)
82
+
83
+
84
+ # STEP 2: Execute bash commands
85
+ try:
86
+ bash_log = execute_and_log_bash_commands(bash_commands, L)
87
+ L.log_operation("Success", f"Bash commands executed successfully:\n{bash_log}",
88
+ params=None, flush_logs=True)
89
+ except Exception as e:
90
+ L.log_operation("Error", f"Failed to execute bash commands: {e}",
91
+ params=None, flush_logs=True)
92
+ print("Error executing bash commands:", e)
93
+
94
+
95
+ # STEP 3: Create Workunits
96
+ try:
97
+ workunit_map = create_workunits_step(token_data, app_data, resource_paths, L)
98
+ except Exception as e:
99
+ L.log_operation("Error", f"Failed to create workunits in B-Fabric: {e}",
100
+ params=None, flush_logs=True)
101
+ print("Error creating workunits:", e)
102
+ workunit_map = []
103
+
104
+ # STEP 4: Register Resources (Refactored)
105
+ try:
106
+ attach_resources_to_workunits(token_data, L, workunit_map)
107
+ except Exception as e:
108
+ L.log_operation("Error", f"Failed to register resources: {e}", params=None, flush_logs=True)
109
+ print("Error registering resources:", e)
110
+
111
+ # STEP 5: Attach gstore files (logs, reports, etc.) to B-Fabric entity as a Link
112
+ try:
113
+ attach_gstore_files_to_entities_as_link(token_data, L, attachment_paths)
114
+ print("Attachment Paths:", attachment_paths)
115
+ except Exception as e:
116
+ L.log_operation("Error", f"Failed to attach extra files: {e}", params=None, flush_logs=True)
117
+ print("Error attaching extra files:", e)
118
+
119
+
120
+
121
+ #---------------------------------------------------------------------------------------------------------------------
122
+ #---------------------------------------------------------------------------------------------------------------------
123
+
124
+
125
+ # -----------------------------------------------------------------------------
126
+ # Step 1: Save Files from bytes
127
+ # -----------------------------------------------------------------------------
128
+
129
+ import os
130
+
131
+ def save_files_from_bytes(files_as_byte_strings: dict, logger):
132
+ """
133
+ Saves byte string files to their respective paths.
134
+
135
+ :param files_as_byte_strings: Dictionary where keys are destination paths and values are byte strings
136
+ :param logger: Logging instance
137
+ :return: Summary indicating how many files succeeded vs. failed
138
+ """
139
+ results = {} # Store results: (destination) -> True (if success) or error message (if failure)
140
+
141
+ # First pass: attempt to write all files
142
+ for destination, file_bytes in files_as_byte_strings.items():
143
+ try:
144
+ # Ensure the directory exists
145
+ os.makedirs(os.path.dirname(destination), exist_ok=True)
146
+
147
+ # Write file from byte string
148
+ with open(destination, "wb") as f:
149
+ f.write(file_bytes)
150
+ logger.log_operation("Files saved", "All files saved successfully.", params=None, flush_logs=True)
151
+ return "All files saved successfully."
152
+
153
+ except Exception as e:
154
+ error_msg = f"Error saving file: {destination}, Error: {str(e)}"
155
+ logger.log_operation("Error", error_msg, params=None, flush_logs=True)
156
+ print(error_msg)
157
+ raise RuntimeError(error_msg)
158
+
159
+
160
+ # -----------------------------------------------------------------------------
161
+ # Step 2: Execute Bash Commands
162
+ # -----------------------------------------------------------------------------
163
+
164
+ def execute_and_log_bash_commands(bash_commands: list[str], logger):
165
+ """
166
+ Executes a list of bash commands locally, logs and returns the output.
167
+
168
+ :param bash_commands: List of commands to execute
169
+ :param logger: Logging instance
170
+ :return: A single string containing logs for all commands
171
+ """
172
+ logstring = ""
173
+
174
+ for cmd in bash_commands:
175
+ logstring += "---------------------------------------------------------\n"
176
+ logstring += f"Executing Command: {cmd}\n"
177
+
178
+ try:
179
+ # Execute the command and capture both stdout and stderr
180
+ result = subprocess.run(cmd, shell=True, text=True, capture_output=True)
181
+ output = result.stdout.strip()
182
+ error_output = result.stderr.strip()
183
+
184
+ # Check if command executed successfully
185
+ if result.returncode == 0:
186
+ status = "SUCCESS"
187
+ log_entry = f"Command: {cmd}\nStatus: {status}\nOutput:\n{output}\n"
188
+ logger.log_operation("Info", log_entry, params=None, flush_logs=True)
189
+ else:
190
+ status = "FAILURE"
191
+ log_entry = f"Command: {cmd}\nStatus: {status}\nError Output:\n{error_output}\n"
192
+ logger.log_operation("Error", log_entry, params=None, flush_logs=True)
193
+
194
+ logstring += log_entry
195
+ print(log_entry)
196
+
197
+ except Exception as e:
198
+ logstring += f"Command: {cmd}\nStatus: ERROR\nException: {str(e)}\n"
199
+ logger.log_operation("Error", f"Command: {cmd} failed with Exception: {str(e)}",
200
+ params=None, flush_logs=True)
201
+
202
+ return logstring
203
+
204
+
205
+ # -----------------------------------------------------------------------------
206
+ # Step 3: Create Workunits in B-Fabric
207
+ # -----------------------------------------------------------------------------
208
+
209
+ def create_workunits_step(token_data, app_data, resource_paths, logger):
210
+ """
211
+ Creates multiple workunits in B-Fabric based on unique order IDs found in resource_paths.
212
+
213
+ :param token_data: dict with token/auth info
214
+ :param app_data: dict with fields like {"id": <app_id>} or other app info
215
+ :param resource_paths: Dictionary {file_path: container_id}
216
+ :param logger: a logger instance
217
+ :return: A dictionary mapping file_paths to workunit objects {file_path: workunit}
218
+ """
219
+ app_id = app_data["id"] # Extract the application ID
220
+
221
+ # Extract unique order IDs from resource_paths
222
+ container_ids = list(set(resource_paths.values()))
223
+
224
+ if not container_ids:
225
+ raise ValueError("No order IDs found in resource_paths; cannot create workunits.")
226
+
227
+ # Create all workunits in one API call
228
+ created_workunits = create_workunits(
229
+ token_data=token_data,
230
+ application_name="Test Workunit",
231
+ application_description="Workunits for batch processing",
232
+ application_id=app_id,
233
+ container_ids=container_ids
234
+ )
235
+
236
+ if not created_workunits or len(created_workunits) != len(container_ids):
237
+ raise ValueError(f"Mismatch in workunit creation: Expected {len(container_ids)} workunits, got {len(created_workunits)}.")
238
+
239
+ workunit_map = {
240
+ file_path: wu["id"]
241
+ for wu in created_workunits
242
+ for file_path, container_id in resource_paths.items()
243
+ if container_id == wu["container"]["id"]
244
+ }
245
+
246
+ logger.log_operation("Success", f"Total created Workunits: {list(workunit_map.values())}", params=None, flush_logs=True)
247
+ print(f"Total created Workunits: {list(workunit_map.values())}")
248
+
249
+ print(workunit_map)
250
+ return workunit_map # Returning {file_path: workunit}
251
+
252
+
253
+
254
+ # -----------------------------------------------------------------------------
255
+ # Step 4: Attach Resources in B-Fabric
256
+ # -----------------------------------------------------------------------------
257
+
258
+ def attach_resources_to_workunits(token_data, logger, workunit_map):
259
+ """
260
+ Attaches each file to its corresponding workunit.
261
+
262
+ Uses `create_resource` to upload files one by one.
263
+
264
+ :param token_data: B-Fabric token data
265
+ :param logger: Logger instance
266
+ :param workunit_map: Dictionary mapping file_path to workunit_id {file_path: workunit_id}
267
+ """
268
+ if not workunit_map:
269
+ logger.log_operation("Info", "No workunits found, skipping resource registration.",
270
+ params=None, flush_logs=True)
271
+ print("No workunits found, skipping resource registration.")
272
+ return
273
+
274
+ print("Workunit Map:", workunit_map)
275
+
276
+ for file_path, workunit_id in workunit_map.items():
277
+ print(f"Processing file: {file_path}, Workunit ID: {workunit_id}") # Corrected print statement
278
+ # Upload the file as a resource
279
+ resource = create_resource(token_data, workunit_id, file_path)
280
+ resource_id = resource.get("id")
281
+ print("Resource ID:", resource_id)
282
+
283
+ if resource_id:
284
+ logger.log_operation("Success", f"Resource {resource_id} attached to Workunit {workunit_id}",
285
+ params=None, flush_logs=True)
286
+ print(f"Resource {resource_id} attached to Workunit {workunit_id}")
287
+ else:
288
+ logger.log_operation("Error", f"Failed to attach resource {file_path} for Workunit {workunit_id}",
289
+ params=None, flush_logs=True)
290
+ print(f"Failed to attach resource {file_path} for Workunit {workunit_id}")
291
+
292
+
293
+
294
+ # -----------------------------------------------------------------------------
295
+ # Step 5: Attachments of gstore in B-Fabric as a Link
296
+ # -----------------------------------------------------------------------------
297
+
298
+ def attach_gstore_files_to_entities_as_link(token_data, logger, attachment_paths: dict):
299
+
300
+
301
+ """
302
+ Attaches files to a B-Fabric entity by copying them to the FGCZ storage and creating an API link.
303
+
304
+ Args:
305
+ token_data (dict): Authentication token data.
306
+ logger: Logger instance for logging operations.
307
+ attachment_paths (dict): Dictionary mapping source file paths to their corresponding file names.
308
+
309
+ Returns:
310
+ None
311
+ """
312
+
313
+ # Extract entity details from token data
314
+ entity_class = token_data.get("entityClass_data", None)
315
+ entity_id = token_data.get("entity_id_data", None)
316
+
317
+ # Check if we have access to the FGCZ server
318
+ local = local_access(GSTORE_REMOTE_PATH)
319
+
320
+ # Process each attachment
321
+ for source_path, file_name in attachment_paths.items():
322
+ if not source_path or not file_name:
323
+ logger.log_operation("Error", f"Missing required attachment details: {source_path} -> {file_name}", params=None, flush_logs=True)
324
+ print(f"Error: Missing required attachment details: {source_path} -> {file_name}")
325
+ continue
326
+
327
+ try:
328
+ # Define entity folder
329
+ entity_folder = f"{entity_class}_{entity_id}" if entity_class and entity_id else "unknown_entity"
330
+ final_remote_path = f"{GSTORE_REMOTE_PATH}/{entity_folder}/"
331
+
332
+ print("local access:", local)
333
+ print("source path:", source_path)
334
+ print("file name:", file_name)
335
+ print("final remote path:", final_remote_path)
336
+
337
+ if local: # We have direct access → Copy directly
338
+ g_req_copy(source_path, final_remote_path)
339
+
340
+ else: # We don't have direct access → Send to migration folder first
341
+ remote_tmp_path = f"{SCRATCH_PATH}/{file_name}"
342
+ scp_copy(source_path, TRX_LOGIN, TRX_SSH_KEY, remote_tmp_path)
343
+
344
+ # Move to final location
345
+ ssh_move(TRX_LOGIN, TRX_SSH_KEY, remote_tmp_path, final_remote_path)
346
+
347
+ # Log success
348
+ success_msg = f"Successfully attached '{file_name}' to {entity_class} (ID={entity_id})"
349
+ logger.log_operation("Success", success_msg, params=None, flush_logs=True)
350
+ print(success_msg)
351
+
352
+ # Step 3: Create API link
353
+ create_api_link(token_data, logger, entity_class, entity_id, file_name, entity_folder)
354
+
355
+ except Exception as e:
356
+ error_msg = f"Exception while processing '{file_name}': {e}"
357
+ logger.log_operation("Error", error_msg, params=None, flush_logs=True)
358
+ print(error_msg)
359
+
360
+ def local_access(remote_path):
361
+ """Checks if the remote gstore path (i.e. /srv/gstore/projects/) exists locally"""
362
+ result = os.path.exists(remote_path)
363
+ print("Remote Path Exists:", result)
364
+ return result
365
+
366
+
367
+ def scp_copy(source_path, ssh_user, ssh_key, remote_path):
368
+ """Copies a file to a remote location using SCP with the correct FGCZ server address."""
369
+ cmd = ["scp", "-i", ssh_key, source_path, f"{ssh_user}:{remote_path}"]
370
+ subprocess.run(cmd, check=True)
371
+ print(f"Copied {source_path} to {remote_path}")
372
+
373
+
374
+ def ssh_move(ssh_user, ssh_key, remote_tmp_path, final_remote_path):
375
+ """Moves a file on the remote server to its final location using SSH."""
376
+ cmd = ["ssh", "-i", ssh_key, ssh_user, f"/usr/local/ngseq/bin/g-req copynow -f {remote_tmp_path} {final_remote_path}"]
377
+
378
+ subprocess.run(cmd, check=True)
379
+ print(f"Moved {remote_tmp_path} to {final_remote_path}")
380
+
381
+
382
+ def g_req_copy(source_path, destination_path):
383
+ """Copies a file using g-req command when direct access is available."""
384
+ cmd = ["/usr/local/ngseq/bin/g-req", "copynow", "-f", source_path, destination_path]
385
+ subprocess.run(cmd, check=True)
386
+ print(f"Copied {source_path} using g-req")
387
+
388
+
389
+ def create_api_link(token_data, logger, entity_class, entity_id, file_name, folder_name):
390
+ """Creates an API link in B-Fabric for the attached file."""
391
+ wrapper = get_power_user_wrapper(token_data)
392
+ url = f"{URL}/{folder_name}/{file_name}"
393
+ timestamped_filename = f"{dt.now().strftime('%Y-%m-%d_%H:%M:%S')}_{file_name}"
394
+
395
+ data = {
396
+ "name": timestamped_filename,
397
+ "parentclassname": entity_class,
398
+ "parentid": entity_id,
399
+ "url": url
400
+ }
401
+
402
+ try:
403
+ link_result = wrapper.save("link", data)
404
+ if link_result:
405
+ success_msg = f"API link created for '{file_name}': {url}"
406
+ logger.log_operation("Success", success_msg, params=None, flush_logs=True)
407
+ print(success_msg)
408
+ else:
409
+ raise ValueError("API link creation failed")
410
+ except Exception as e:
411
+ error_msg = f"Failed to create API link for '{file_name}': {e}"
412
+ logger.log_operation("Error", error_msg, params=None, flush_logs=True)
413
+ print(error_msg)
414
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: bfabric-web-apps
3
- Version: 0.1.4
3
+ Version: 0.1.5
4
4
  Summary: A package containing handy boilerplate utilities for developing bfabric web-applications
5
5
  Author: Marc Zuber, Griffin White, GWC GmbH
6
6
  Requires-Python: >=3.8,<4.0
@@ -9,3 +9,6 @@ Classifier: Programming Language :: Python :: 3.8
9
9
  Classifier: Programming Language :: Python :: 3.9
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
+ Requires-Dist: pydantic-settings (>=2.8.1,<3.0.0)
13
+ Requires-Dist: pydantic[email] (>=2.10.6,<3.0.0)
14
+ Requires-Dist: rq (==1.15.1)
@@ -0,0 +1,20 @@
1
+ bfabric_web_apps/__init__.py,sha256=D2jTCEYedVnp6yQxNGLRsjihupDTQxNkPVFQAUYXQys,1409
2
+ bfabric_web_apps/layouts/layouts.py,sha256=z8gL4n4wwLdpLGomO9CftBLnGpc3r6OpmUc2-wBg8uo,14661
3
+ bfabric_web_apps/objects/BfabricInterface.py,sha256=wmcL9JuSC0QEopgImvkZxmtCIS7izt6bwb6y_ch0zus,10178
4
+ bfabric_web_apps/objects/Logger.py,sha256=62LC94xhm7YG5LUw3yH46NqvJQsAX7wnc9D4zbY16rA,5224
5
+ bfabric_web_apps/utils/app_init.py,sha256=RCdpCXp19cF74bouYJLPe-KSETZ0Vwqtd02Ta2VXEF8,428
6
+ bfabric_web_apps/utils/callbacks.py,sha256=m5d6IPiYX77-kJN8I2OptZN-GPxZgrI76o1DGFxjpPU,12686
7
+ bfabric_web_apps/utils/components.py,sha256=V7ECGmF2XYy5O9ciDJVH1nofJYP2a_ELQF3z3X_ADbo,844
8
+ bfabric_web_apps/utils/config.py,sha256=i93fe49Ak4Z7cm_G80m2cBCPp-5qCYLAJEtEr-mYSwQ,1044
9
+ bfabric_web_apps/utils/create_app_in_bfabric.py,sha256=eVk3cQDXxW-yo9b9n_zzGO6kLg_SLxYbIDECyvEPJXU,2752
10
+ bfabric_web_apps/utils/get_logger.py,sha256=0Y3SrXW93--eglS0_ZOc34NOriAt6buFPik5n0ltzRA,434
11
+ bfabric_web_apps/utils/get_power_user_wrapper.py,sha256=T33z64XjmJ0KSlmfEmrEP8eYpbpINCVD6Xld_V7PR2g,1027
12
+ bfabric_web_apps/utils/redis_connection.py,sha256=qXSPxW6m55Ogv44BhmPCl9ACuvzmpfZNU73UJhHRXL4,133
13
+ bfabric_web_apps/utils/redis_queue.py,sha256=MCx7z_I2NusJ4P42mcLvV7STtXBFMIIvun83fM8zOGI,168
14
+ bfabric_web_apps/utils/redis_worker_init.py,sha256=9SUc9bbgBeMbUdqJD9EkWPA4wcJjvyX6Tzanv5JfqEg,691
15
+ bfabric_web_apps/utils/resource_utilities.py,sha256=cJTak0sXAiMSQ7VwJ4ImDUCmW8tAKGObBZCSr5uARBg,5931
16
+ bfabric_web_apps/utils/run_main_pipeline.py,sha256=1YSbk3uP_T3tL6mZZXGv7a7FJc8exro_Eb49gnJjdrs,16864
17
+ bfabric_web_apps-0.1.5.dist-info/LICENSE,sha256=k0O_i2k13i9e35aO-j7FerJafAqzzu8x0kkBs0OWF3c,1065
18
+ bfabric_web_apps-0.1.5.dist-info/METADATA,sha256=Cg8oOpeNHTs2EpD_CLp5LoKmomHml-ermak1AzF14tA,608
19
+ bfabric_web_apps-0.1.5.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
20
+ bfabric_web_apps-0.1.5.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- # defaults.py
2
- CONFIG_FILE_PATH = "~/.bfabricpy.yml"
3
-
4
- # Default values for application settings
5
- HOST = "0.0.0.0"
6
- PORT = 8050
7
- DEV = False
8
-
9
- # Developer and bug report email addresses
10
- DEVELOPER_EMAIL_ADDRESS = "griffin@gwcustom.com"
11
- BUG_REPORT_EMAIL_ADDRESS = "gwtools@fgcz.system"
@@ -1,16 +0,0 @@
1
- bfabric_web_apps/__init__.py,sha256=jU5o22wl7kXHNJVCH6aqW0llZLfxeQssCIeX1OerQfI,1790
2
- bfabric_web_apps/layouts/layouts.py,sha256=fmv_QTJeAmiOxreAwx14QojzyRV_8RHu1c4sCPN5r5U,13382
3
- bfabric_web_apps/objects/BfabricInterface.py,sha256=wmcL9JuSC0QEopgImvkZxmtCIS7izt6bwb6y_ch0zus,10178
4
- bfabric_web_apps/objects/Logger.py,sha256=62LC94xhm7YG5LUw3yH46NqvJQsAX7wnc9D4zbY16rA,5224
5
- bfabric_web_apps/utils/app_init.py,sha256=RCdpCXp19cF74bouYJLPe-KSETZ0Vwqtd02Ta2VXEF8,428
6
- bfabric_web_apps/utils/callbacks.py,sha256=XbRMK2sL55twtR6IWGAf5B1m2fnMTOpkhyR55-76nes,8444
7
- bfabric_web_apps/utils/components.py,sha256=V7ECGmF2XYy5O9ciDJVH1nofJYP2a_ELQF3z3X_ADbo,844
8
- bfabric_web_apps/utils/create_app_in_bfabric.py,sha256=eVk3cQDXxW-yo9b9n_zzGO6kLg_SLxYbIDECyvEPJXU,2752
9
- bfabric_web_apps/utils/defaults.py,sha256=B82j3JEbysLEU9JDZgoDBTX7WGvW3Hn5YMZaWAcjZew,278
10
- bfabric_web_apps/utils/get_logger.py,sha256=0Y3SrXW93--eglS0_ZOc34NOriAt6buFPik5n0ltzRA,434
11
- bfabric_web_apps/utils/get_power_user_wrapper.py,sha256=T33z64XjmJ0KSlmfEmrEP8eYpbpINCVD6Xld_V7PR2g,1027
12
- bfabric_web_apps/utils/resource_utilities.py,sha256=q0gC_Lr5GQlMBU0_gLm48zjq3XlXbT4QArqzJcmxrTo,5476
13
- bfabric_web_apps-0.1.4.dist-info/LICENSE,sha256=k0O_i2k13i9e35aO-j7FerJafAqzzu8x0kkBs0OWF3c,1065
14
- bfabric_web_apps-0.1.4.dist-info/METADATA,sha256=5TKRMRQB4an34gV7b4_hValbQEvuLvOmJpB9DsKBFR4,480
15
- bfabric_web_apps-0.1.4.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
16
- bfabric_web_apps-0.1.4.dist-info/RECORD,,