bfabric-web-apps 0.1.6__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,7 @@ import os
2
2
 
3
3
  # Export objects and classes
4
4
  from bfabric_web_apps.objects import BfabricInterface, Logger
5
+ from bfabric_web_apps.objects.BfabricInterface import bfabric_interface
5
6
 
6
7
  # Export components
7
8
  from .utils import components
@@ -14,6 +15,10 @@ from .utils.app_init import create_app
14
15
  from .utils.get_logger import get_logger
15
16
  from .utils.get_power_user_wrapper import get_power_user_wrapper
16
17
  from .utils.create_app_in_bfabric import create_app_in_bfabric
18
+ from .utils.dataset_utils import (
19
+ dataset_to_dictionary,
20
+ dictionary_to_dataset
21
+ )
17
22
 
18
23
  # Export callbacks
19
24
  from .utils.callbacks import (
@@ -25,7 +30,7 @@ from .utils.callbacks import (
25
30
 
26
31
  from .utils.config import settings as config
27
32
 
28
- from. utils.run_main_pipeline import run_main_job
33
+ from. utils.run_main_pipeline import run_main_job, read_file_as_bytes
29
34
 
30
35
  from .utils.resource_utilities import (
31
36
  create_workunit,
@@ -34,6 +39,7 @@ from .utils.resource_utilities import (
34
39
  create_resources
35
40
  )
36
41
 
42
+ from .utils.charging import create_charge
37
43
  from .utils.redis_worker_init import run_worker, test_job
38
44
  from .utils.redis_queue import q
39
45
 
@@ -54,4 +60,7 @@ GSTORE_REMOTE_PATH = config.GSTORE_REMOTE_PATH
54
60
  SCRATCH_PATH = config.SCRATCH_PATH
55
61
  TRX_LOGIN = config.TRX_LOGIN
56
62
  TRX_SSH_KEY = config.TRX_SSH_KEY
57
- URL = config.URL
63
+ URL = config.URL
64
+
65
+ SERVICE_ID = config.SERVICE_ID
66
+ DATASET_TEMPLATE_ID = config.DATASET_TEMPLATE_ID
@@ -67,7 +67,7 @@ class BfabricInterface( Bfabric ):
67
67
 
68
68
  validation_url = VALIDATION_URL + token
69
69
  res = requests.get(validation_url, headers={"Host": HOST})
70
-
70
+
71
71
  if res.status_code != 200:
72
72
  res = requests.get(validation_url)
73
73
 
@@ -83,11 +83,9 @@ class BfabricInterface( Bfabric ):
83
83
  userinfo = json.loads(res.text)
84
84
  expiry_time = userinfo['expiryDateTime']
85
85
  current_time = datetime.datetime.now()
86
- five_minutes_later = current_time + datetime.timedelta(minutes=5)
87
-
88
86
  # Comparing the parsed expiry time with the five minutes later time
89
87
 
90
- if not five_minutes_later <= datetime.datetime.strptime(expiry_time, "%Y-%m-%d %H:%M:%S"):
88
+ if current_time > datetime.datetime.strptime(expiry_time, "%Y-%m-%d %H:%M:%S") + datetime.timedelta(days=7):
91
89
  return "EXPIRED"
92
90
 
93
91
  environment_dict = {"Production":"https://fgcz-bfabric.uzh.ch/bfabric","Test":"https://fgcz-bfabric-test.uzh.ch/bfabric"}
@@ -104,10 +102,18 @@ class BfabricInterface( Bfabric ):
104
102
  userWsPassword = userinfo['userWsPassword'],
105
103
  jobId = userinfo['jobId']
106
104
  )
107
-
108
105
  # Initialize the wrapper right after validating the token
109
106
  self._initialize_wrapper(token_data)
110
107
 
108
+ # Log the token validation process
109
+ L = get_logger(token_data)
110
+ L.log_operation(
111
+ operation="Authentication Process",
112
+ message=f"Token validated successfully. User {token_data.get('user_data')} authenticated.",
113
+ params=None,
114
+ flush_logs=True
115
+ )
116
+
111
117
  return json.dumps(token_data)
112
118
 
113
119
 
@@ -152,7 +158,10 @@ class BfabricInterface( Bfabric ):
152
158
  "Project": "container",
153
159
  "Order": "container",
154
160
  "Container": "container",
155
- "Plate": "plate"
161
+ "Plate": "plate",
162
+ "Workunit": "workunit",
163
+ "Resource": "resource",
164
+ "Dataset": "dataset"
156
165
  }
157
166
 
158
167
  if not token_data:
@@ -176,7 +185,7 @@ class BfabricInterface( Bfabric ):
176
185
  obj={"id": entity_id},
177
186
  max_results=None,
178
187
  params=None,
179
- flush_logs=True
188
+ flush_logs=False
180
189
  )[0]
181
190
 
182
191
 
@@ -186,6 +195,7 @@ class BfabricInterface( Bfabric ):
186
195
  "createdby": entity_data_dict.get("createdby"),
187
196
  "created": entity_data_dict.get("created"),
188
197
  "modified": entity_data_dict.get("modified"),
198
+ "full_api_response": entity_data_dict,
189
199
  })
190
200
  return json_data
191
201
  else:
@@ -246,7 +256,7 @@ class BfabricInterface( Bfabric ):
246
256
  obj={"id": app_id}, # Query using the App ID
247
257
  max_results=None,
248
258
  params=None,
249
- flush_logs=True
259
+ flush_logs=False
250
260
  )
251
261
 
252
262
  # If API call fails, return empty JSON
@@ -335,4 +335,4 @@ def get_redis_queue_layout():
335
335
 
336
336
  container_children = dbc.Row(queue_cards)
337
337
 
338
- return dbc.Container(container_children, className="mt-4")
338
+ return dbc.Container(container_children, className="mt-4")
@@ -0,0 +1,40 @@
1
+
2
+ from bfabric_web_apps.utils.get_logger import get_logger
3
+ from bfabric_web_apps.utils.get_power_user_wrapper import get_power_user_wrapper
4
+
5
+ def create_charge(token_data, container_id, service_id):
6
+ """
7
+ Create a charge in B-Fabric.
8
+
9
+ Args:
10
+ token_data (dict): Authentication token data.
11
+ container_id (int): Container ID (Order ID).
12
+ service_id (int): Service ID.
13
+
14
+ Returns:
15
+ list[dict]: List of charge data.
16
+ """
17
+
18
+ # Get a logger and an api wrapper
19
+ L = get_logger(token_data)
20
+ wrapper = get_power_user_wrapper(token_data)
21
+
22
+ # Get the user ID from the token data to assign a charger
23
+ usr_id = wrapper.read("user", {"login": token_data.get("user_data")})[0]['id']
24
+
25
+ charge_data = {
26
+ "serviceid": service_id,
27
+ "containerid": container_id,
28
+ "chargerid": usr_id
29
+ }
30
+
31
+ # Create and log the charge
32
+ charge = L.logthis(
33
+ api_call=wrapper.save,
34
+ endpoint="charge",
35
+ obj=charge_data,
36
+ params=None,
37
+ flush_logs=True
38
+ )
39
+
40
+ return charge
@@ -1,4 +1,5 @@
1
1
  from dash import html
2
+ import dash_daq as daq
2
3
 
3
4
  DEVELOPER_EMAIL = "gwhite@fgcz.ethz.ch"
4
5
 
@@ -19,4 +20,9 @@ auth = [html.Div(id="auth-div")]
19
20
  no_auth = [
20
21
  html.P("You are not currently logged into an active session. Please log into bfabric to continue:"),
21
22
  html.A('Login to Bfabric', href='https://fgcz-bfabric.uzh.ch/bfabric/')
22
- ]
23
+ ]
24
+
25
+ charge_switch = [
26
+ daq.BooleanSwitch(id='charge_run', on=True, label="Charge project for run"),
27
+ html.Br()
28
+ ]
@@ -24,6 +24,12 @@ class Settings(BaseSettings):
24
24
  TRX_SSH_KEY: str = "/home/user/.ssh/your_ssh_key"
25
25
  URL: str = "https:/fgcz/dummy/url"
26
26
 
27
+ # Which service id to use for the charge
28
+ SERVICE_ID: int = 0
29
+
30
+ # Which dataset template id to use for dataset creation
31
+ DATASET_TEMPLATE_ID: int = 0
32
+
27
33
  class Config:
28
34
 
29
35
  env_file = ".env"
@@ -11,7 +11,7 @@ def get_user_input():
11
11
  "2": "Proteomics",
12
12
  "4": "Metabolomics / Biophysics",
13
13
  "6": "General",
14
- "10": "New Tech"
14
+ "10": "Bioinformatics"
15
15
  },
16
16
  "PRODUCTION": {
17
17
  "1": "Genomics / Transcriptomics",
@@ -0,0 +1,93 @@
1
+
2
+ def is_numeric(value):
3
+ try:
4
+ float(value) # or int(value) if you only want integers
5
+ return True
6
+ except ValueError:
7
+ return False
8
+
9
+
10
+ def dataset_to_dictionary(dataset):
11
+
12
+ """
13
+ Convert B-Fabric API Dataset Response
14
+ to a dictionary. The dictionary will have the attribute names as keys and the field values as lists,
15
+ so that it can be easily converted to a pandas dataframe.
16
+
17
+ Args:
18
+ dataset (dict): B-Fabric API Dataset Response
19
+
20
+ Returns:
21
+ dict: A dictionary where the keys are the attribute names and the values are lists of field values, ready to become a pandas dataframe.
22
+ """
23
+
24
+ # Check if the dataset is empty
25
+ if not dataset:
26
+ return {}
27
+
28
+ attributes = dataset.get("attribute", [])
29
+ items = [elt.get("field") for elt in dataset.get("item", [])]
30
+
31
+ position_map = {str(elt.get("position")): elt.get("name") for elt in attributes} # Create a mapping of attribute positions to names
32
+ df_dict = {elt : [] for elt in position_map.values()} # Create a dictionary to hold the dataframe data
33
+
34
+ for item in items:
35
+ for field in item:
36
+ attribute_position = field.get("attributeposition")
37
+ df_dict[position_map.get(attribute_position)].append(field.get("value")) # Append the field value to the corresponding attribute name in the dictionary
38
+
39
+ # Create a dataframe from the dictionary
40
+ return df_dict
41
+
42
+
43
+ def dictionary_to_dataset(dictionary, dataset_name, containerid, dataset_template_id=0, linked_workunit_id=0):
44
+
45
+ """
46
+ Convert a dictionary to a B-Fabric API Dataset
47
+
48
+ Args:
49
+ dictionary (dict): A dictionary where the keys are the attribute names and the values are lists of field values.
50
+
51
+ Returns:
52
+ dict: A B-Fabric API Dataset ready to be sent to the API.
53
+ """
54
+
55
+ if not isinstance(dictionary, dict):
56
+ raise ValueError("Input must be a dictionary.")
57
+
58
+ if not isinstance(dataset_name, str):
59
+ raise ValueError("Dataset name must be a string.")
60
+
61
+ if not is_numeric(containerid):
62
+ raise ValueError("Container ID must be a numeric string or integer.")
63
+
64
+ if not isinstance(dataset_template_id, int):
65
+ raise ValueError("Dataset template ID must be an integer.")
66
+
67
+ if not isinstance(linked_workunit_id, int):
68
+ raise ValueError("Linked workunit ID must be an integer.")
69
+
70
+ # Check if the dictionary is empty
71
+ if not dictionary:
72
+ return {}
73
+
74
+ # Create a list of attributes
75
+ attributes = [{"name": name, "position": str(i+1)} for i, name in enumerate(dictionary.keys())]
76
+
77
+ # Create a list of items
78
+ items = []
79
+ for i in range(len(next(iter(dictionary.values())))): # Get the length of the first value list
80
+ item = [{"attributeposition": str(j+1), "value": dictionary[name][i]} for j, name in enumerate(dictionary.keys())]
81
+ items.append({"field": item, "position": str(i+1)})
82
+
83
+ to_return = {"attribute": attributes, "item": items, "name": dataset_name, "containerid": containerid}
84
+
85
+ if dataset_template_id:
86
+ # Add the dataset template ID to the dataset
87
+ to_return["datasettemplateid"] = dataset_template_id
88
+
89
+ if linked_workunit_id:
90
+ # Add the linked workunit ID to the dataset
91
+ to_return["workunitid"] = linked_workunit_id
92
+
93
+ return to_return
@@ -1,28 +1,38 @@
1
- import redis
1
+ import redis
2
2
  from rq import Worker, Queue, Connection
3
3
  import time
4
+ import threading
4
5
 
5
- def test_job():
6
-
7
- """
8
- A test job that prints a message to the console.
9
- """
6
+ def test_job():
10
7
  print("Hello, this is a test job!")
11
- time.sleep(10)
8
+ time.sleep(10)
12
9
  print("Test job finished!")
13
10
  return
14
11
 
15
-
16
- def run_worker(host, port, queue_names):
12
+ def keepalive_ping(conn, interval=60):
17
13
  """
18
- Provides internal interface for running workers on a specified host and port.
14
+ Periodically ping Redis to keep the TCP connection alive on platforms like Azure.
15
+ """
16
+ while True:
17
+ try:
18
+ conn.ping()
19
+ except Exception as e:
20
+ print("Redis keepalive ping failed:", e)
21
+ time.sleep(interval)
19
22
 
20
- Args:
21
- host (str): The host to run
22
- port (int): The port to run
23
- queue_names (list): A list of queue names to listen to
23
+ def run_worker(host, port, queue_names):
24
+ """
25
+ Starts an RQ worker with a background Redis keepalive thread to prevent Azure from dropping idle connections.
24
26
  """
25
- conn = redis.Redis(host=host, port=port)
26
- with Connection(conn):
27
- worker = Worker(map(Queue, queue_names))
28
- worker.work()
27
+ conn = redis.Redis(
28
+ host=host,
29
+ port=port,
30
+ socket_keepalive=True
31
+ )
32
+
33
+ # Start Redis keepalive thread
34
+ threading.Thread(target=keepalive_ping, args=(conn,), daemon=True).start()
35
+
36
+ with Connection(conn):
37
+ worker = Worker(map(Queue, queue_names))
38
+ worker.work(logging_level="INFO")
@@ -22,8 +22,8 @@ def create_workunit(token_data, application_name, application_description, appli
22
22
  wrapper = bfabric_interface.get_wrapper()
23
23
 
24
24
  workunit_data = {
25
- "name": f"{application_name} - Order {container_id}",
26
- "description": f"{application_description} for Order {container_id}",
25
+ "name": f"Workunit - {application_name} - Container {container_id}",
26
+ "description": f"{application_description} for Container {container_id}",
27
27
  "applicationid": int(application_id),
28
28
  "containerid": container_id,
29
29
  }
@@ -54,7 +54,7 @@ def create_workunit(token_data, application_name, application_description, appli
54
54
 
55
55
  except Exception as e:
56
56
  L.log_operation(
57
- "Error",
57
+ "Error | ORIGIN: run_main_job function",
58
58
  f"Failed to create workunit for Order {container_id}: {e}",
59
59
  params=None,
60
60
  flush_logs=True,
@@ -85,7 +85,7 @@ def create_workunits(token_data, application_name, application_description, appl
85
85
  for container_id in container_ids
86
86
  ]
87
87
 
88
- return [wu_id for wu_id in workunits if wu_id is not None] # Filter out None values
88
+ return [wu for wu in workunits if wu is not None] # Filter out None values
89
89
 
90
90
 
91
91
  from pathlib import Path
@@ -117,7 +117,7 @@ def create_resource(token_data, workunit_id, file_path, storage_id="20"): # GWC
117
117
  "workunitid": str(workunit_id),
118
118
  "name": file_path.name,
119
119
  "description": f"Resource attached to workunit {workunit_id}",
120
- "relativepath": file_path.name,
120
+ "relativepath": file_path,
121
121
  "storageid": str(storage_id),
122
122
  }
123
123
  )
@@ -125,19 +125,13 @@ def create_resource(token_data, workunit_id, file_path, storage_id="20"): # GWC
125
125
  if result:
126
126
  resource_id = result[0].get("id")
127
127
  print(f"Resource attached: {file_path.name} (ID: {resource_id})")
128
- L.log_operation(
129
- "Attach_resource",
130
- f"Resource attached successfully: {file_path.name}",
131
- params=None,
132
- flush_logs=True,
133
- )
134
128
  return result[0]
135
129
  else:
136
130
  raise ValueError(f"Failed to attach resource: {file_path.name}")
137
131
 
138
132
  except Exception as e:
139
133
  L.log_operation(
140
- "error",
134
+ "error | ORIGIN: run_main_job function",
141
135
  f"Failed to attach resource: {e}",
142
136
  params=None,
143
137
  flush_logs=True,
@@ -4,6 +4,8 @@ import os
4
4
  import shutil
5
5
  import subprocess
6
6
  from pathlib import Path
7
+ import time
8
+ from collections import defaultdict
7
9
 
8
10
  from .get_logger import get_logger
9
11
  from .get_power_user_wrapper import get_power_user_wrapper
@@ -15,6 +17,12 @@ from .resource_utilities import (
15
17
  create_workunits,
16
18
  create_resources
17
19
  )
20
+ from .dataset_utils import (
21
+ dataset_to_dictionary,
22
+ dictionary_to_dataset
23
+ )
24
+
25
+ from .charging import create_charge
18
26
 
19
27
  from .config import settings as config
20
28
  from datetime import datetime as dt
@@ -24,19 +32,29 @@ SCRATCH_PATH = config.SCRATCH_PATH
24
32
  TRX_LOGIN = config.TRX_LOGIN
25
33
  TRX_SSH_KEY = config.TRX_SSH_KEY
26
34
  URL = config.URL
35
+ DATASET_TEMPLATE_ID = config.DATASET_TEMPLATE_ID
36
+
37
+ def run_main_job(
38
+ files_as_byte_strings: dict,
39
+ bash_commands: list[str],
40
+ resource_paths: dict,
41
+ attachment_paths: list[dict],
42
+ token: str,
43
+ service_id: int = 0,
44
+ charge: list[int] = [],
45
+ dataset_dict: dict = {}
46
+ ):
47
+
27
48
 
28
- def run_main_job(files_as_byte_strings: dict,
29
- bash_commands: list[str],
30
- resource_paths: dict,
31
- attachment_paths: list[dict],
32
- token: str):
33
49
  """
34
50
  Main function to handle:
35
51
  1) Save Files on Server
36
52
  2) Execute local bash commands
37
53
  3) Create workunits in B-Fabric
38
- 4) Register resources in B-Fabric
39
- 5) Attach additional gstore files (logs/reports/etc.) to entities in B-Fabric
54
+ 4) Create Dataset in B-Fabric
55
+ 5) Register resources in B-Fabric
56
+ 6) Attach additional gstore files (logs/reports/etc.) to entities in B-Fabric
57
+ 7) Automatically charge the relevant container for the service
40
58
 
41
59
  :param files_as_byte_strings: {destination_path: file as byte strings}
42
60
  :param bash_commands: List of bash commands to execute
@@ -44,9 +62,13 @@ def run_main_job(files_as_byte_strings: dict,
44
62
  :param attachment_paths: Dictionary mapping source file paths to their corresponding file names ({"path/test.txt": "name.txt"})
45
63
  for attachment to a B-Fabric entity (e.g., logs, final reports, etc.)
46
64
  :param token: Authentication token
65
+ :param service_id: ID of the service to charge
66
+ :param charge: A list of container IDs to be charged.
67
+ :param dataset_dict: A dictionary to create a dataset in B-Fabric. keys are container IDs and values are dictionaries whose keys are field names and values are lists of values.
68
+
47
69
 
48
70
 
49
- Dev Notes:
71
+ Dev Notes:
50
72
  !!! All exceptions get logged (make sure to log the exception message i.e. "except Exception as e: log(e)") !!!
51
73
  !!! If an exception doesn't occur, log that some step ran successfully to the job object !!!
52
74
  """
@@ -73,50 +95,98 @@ Dev Notes:
73
95
  # Step 1: Save files to the server
74
96
  try:
75
97
  summary = save_files_from_bytes(files_as_byte_strings, L)
76
- L.log_operation("Success", f"File copy summary: {summary}", params=None, flush_logs=True)
98
+ L.log_operation("Success | ORIGIN: run_main_job function", f"File copy summary: {summary}", params=None, flush_logs=True)
77
99
  print("Summary:", summary)
100
+
78
101
  except Exception as e:
79
102
  # If something unexpected blows up the entire process
80
- L.log_operation("Error", f"Failed to copy files: {e}", params=None, flush_logs=True)
103
+ L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to copy files: {e}", params=None, flush_logs=True)
81
104
  print("Error copying files:", e)
82
105
 
83
106
 
84
107
  # STEP 2: Execute bash commands
85
108
  try:
86
- bash_log = execute_and_log_bash_commands(bash_commands, L)
87
- L.log_operation("Success", f"Bash commands executed successfully:\n{bash_log}",
109
+ bash_log = execute_and_log_bash_commands(bash_commands)
110
+ L.log_operation("Success | ORIGIN: run_main_job function", f"Bash commands executed success | origin: run_main_job functionfully:\n{bash_log}",
88
111
  params=None, flush_logs=True)
89
112
  except Exception as e:
90
- L.log_operation("Error", f"Failed to execute bash commands: {e}",
113
+ L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to execute bash commands: {e}",
91
114
  params=None, flush_logs=True)
92
115
  print("Error executing bash commands:", e)
93
116
 
94
117
 
95
118
  # STEP 3: Create Workunits
96
119
  try:
97
- workunit_map = create_workunits_step(token_data, app_data, resource_paths, L)
120
+ workunit_map, workunit_container_map = create_workunits_step(token_data, app_data, resource_paths, L)
98
121
  except Exception as e:
99
- L.log_operation("Error", f"Failed to create workunits in B-Fabric: {e}",
122
+ L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to create workunits in B-Fabric: {e}",
100
123
  params=None, flush_logs=True)
101
124
  print("Error creating workunits:", e)
102
125
  workunit_map = []
103
126
 
104
- # STEP 4: Register Resources (Refactored)
127
+
128
+ # STEP 4: Create Dataset
129
+ if dataset_dict:
130
+ for container_id, dataset_data in dataset_dict.items():
131
+
132
+ dataset_name = f'Dataset - {str(app_data.get("name", "Unknown App"))} - Container {container_id}'
133
+ linked_workunit_id = workunit_container_map.get(str(container_id), None)
134
+
135
+ try:
136
+ dataset = dictionary_to_dataset(dataset_data, dataset_name, container_id, DATASET_TEMPLATE_ID, linked_workunit_id)
137
+ dataset = create_dataset(token_data, dataset)
138
+ L.log_operation("Success | ORIGIN: run_main_job function", f'Dataset {dataset.get("id", "Null")} created successfully for container {container_id}', params=None, flush_logs=True)
139
+ print(f"Dataset created successfully for container {container_id}")
140
+ except Exception as e:
141
+ L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to create dataset for container {container_id}: {e}", params=None, flush_logs=True)
142
+ print(f"Error creating dataset for container {container_id}:", e)
143
+ else:
144
+ L.log_operation("Info | ORIGIN: run_main_job function", "No dataset creation requested.", params=None, flush_logs=True)
145
+ print("No dataset creation requested.")
146
+
147
+
148
+ # STEP 5: Register Resources (Refactored)
105
149
  try:
106
150
  attach_resources_to_workunits(token_data, L, workunit_map)
107
151
  except Exception as e:
108
- L.log_operation("Error", f"Failed to register resources: {e}", params=None, flush_logs=True)
152
+ L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to register resources: {e}", params=None, flush_logs=True)
109
153
  print("Error registering resources:", e)
110
154
 
111
- # STEP 5: Attach gstore files (logs, reports, etc.) to B-Fabric entity as a Link
155
+ # STEP 6: Attach gstore files (logs, reports, etc.) to B-Fabric entity as a Link
112
156
  try:
113
157
  attach_gstore_files_to_entities_as_link(token_data, L, attachment_paths)
114
158
  print("Attachment Paths:", attachment_paths)
115
159
  except Exception as e:
116
- L.log_operation("Error", f"Failed to attach extra files: {e}", params=None, flush_logs=True)
160
+ L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to attach extra files: {e}", params=None, flush_logs=True)
117
161
  print("Error attaching extra files:", e)
118
162
 
119
163
 
164
+ # STEP 7: Charge the container for the service
165
+ if charge:
166
+
167
+ if service_id == 0:
168
+ print("Service ID not provided. Skipping charge creation.")
169
+ L.log_operation("Info | ORIGIN: run_main_job function", "Service ID not provided. Skipping charge creation.", params=None, flush_logs=True)
170
+ else:
171
+ container_ids = charge
172
+ print("Container IDs to charge:", container_ids)
173
+ if not container_ids:
174
+ L.log_operation("Error | ORIGIN: run_main_job function", "No container IDs found for charging.", params=None, flush_logs=True)
175
+ print("Error: No container IDs found for charging.")
176
+ return
177
+ for container_id in container_ids:
178
+ charges = create_charge(token_data, container_id, service_id)
179
+ charge_id = charges[0].get("id")
180
+ L.log_operation("Success | ORIGIN: run_main_job function", f"Charge created for container {container_id} with service ID {service_id} and charge id {charge_id}", params=None, flush_logs=False)
181
+ print(f"Charge created with id {charge_id} for container {container_id} with service ID {service_id}")
182
+ L.flush_logs()
183
+ else:
184
+ L.log_operation("Info | ORIGIN: run_main_job function", "Charge creation skipped.", params=None, flush_logs=True)
185
+ print("Charge creation skipped.")
186
+
187
+ # Final log message
188
+ L.log_operation("Success | ORIGIN: run_main_job function", "All steps completed successfully.", params=None, flush_logs=True)
189
+ print("All steps completed successfully.")
120
190
 
121
191
  #---------------------------------------------------------------------------------------------------------------------
122
192
  #---------------------------------------------------------------------------------------------------------------------
@@ -126,8 +196,6 @@ Dev Notes:
126
196
  # Step 1: Save Files from bytes
127
197
  # -----------------------------------------------------------------------------
128
198
 
129
- import os
130
-
131
199
  def save_files_from_bytes(files_as_byte_strings: dict, logger):
132
200
  """
133
201
  Saves byte string files to their respective paths.
@@ -136,44 +204,42 @@ def save_files_from_bytes(files_as_byte_strings: dict, logger):
136
204
  :param logger: Logging instance
137
205
  :return: Summary indicating how many files succeeded vs. failed
138
206
  """
139
- results = {} # Store results: (destination) -> True (if success) or error message (if failure)
207
+
208
+ message = "All files saved successfully."
140
209
 
141
210
  # First pass: attempt to write all files
142
211
  for destination, file_bytes in files_as_byte_strings.items():
143
212
  try:
144
- # Ensure the directory exists
145
- os.makedirs(os.path.dirname(destination), exist_ok=True)
146
-
147
213
  # Write file from byte string
148
- with open(destination, "wb") as f:
214
+ with open(destination, "+wb") as f:
149
215
  f.write(file_bytes)
150
- logger.log_operation("Files saved", "All files saved successfully.", params=None, flush_logs=True)
151
- return "All files saved successfully."
216
+ logger.log_operation("File saved | ORIGIN: run_main_job function", f"File {destination} saved successfully.", params=None, flush_logs=True)
152
217
 
153
218
  except Exception as e:
154
219
  error_msg = f"Error saving file: {destination}, Error: {str(e)}"
155
- logger.log_operation("Error", error_msg, params=None, flush_logs=True)
220
+ logger.log_operation("Error | ORIGIN: run_main_job function", error_msg, params=None, flush_logs=True)
156
221
  print(error_msg)
157
- raise RuntimeError(error_msg)
222
+ message = f"Error saving some files."
223
+
224
+ return message
158
225
 
159
226
 
160
227
  # -----------------------------------------------------------------------------
161
228
  # Step 2: Execute Bash Commands
162
229
  # -----------------------------------------------------------------------------
163
230
 
164
- def execute_and_log_bash_commands(bash_commands: list[str], logger):
231
+ def execute_and_log_bash_commands(bash_commands: list[str]):
165
232
  """
166
233
  Executes a list of bash commands locally, logs and returns the output.
167
234
 
168
235
  :param bash_commands: List of commands to execute
169
- :param logger: Logging instance
170
236
  :return: A single string containing logs for all commands
171
237
  """
172
238
  logstring = ""
173
239
 
174
240
  for cmd in bash_commands:
175
241
  logstring += "---------------------------------------------------------\n"
176
- logstring += f"Executing Command: {cmd}\n"
242
+ logstring += f"Executing Command\n"
177
243
 
178
244
  try:
179
245
  # Execute the command and capture both stdout and stderr
@@ -185,19 +251,15 @@ def execute_and_log_bash_commands(bash_commands: list[str], logger):
185
251
  if result.returncode == 0:
186
252
  status = "SUCCESS"
187
253
  log_entry = f"Command: {cmd}\nStatus: {status}\nOutput:\n{output}\n"
188
- logger.log_operation("Info", log_entry, params=None, flush_logs=True)
189
254
  else:
190
255
  status = "FAILURE"
191
256
  log_entry = f"Command: {cmd}\nStatus: {status}\nError Output:\n{error_output}\n"
192
- logger.log_operation("Error", log_entry, params=None, flush_logs=True)
193
257
 
194
258
  logstring += log_entry
195
259
  print(log_entry)
196
260
 
197
261
  except Exception as e:
198
262
  logstring += f"Command: {cmd}\nStatus: ERROR\nException: {str(e)}\n"
199
- logger.log_operation("Error", f"Command: {cmd} failed with Exception: {str(e)}",
200
- params=None, flush_logs=True)
201
263
 
202
264
  return logstring
203
265
 
@@ -212,17 +274,33 @@ def create_workunits_step(token_data, app_data, resource_paths, logger):
212
274
 
213
275
  :param token_data: dict with token/auth info
214
276
  :param app_data: dict with fields like {"id": <app_id>} or other app info
215
- :param resource_paths: Dictionary {file_path: container_id}
277
+ :param resource_paths: Dictionary {file_path or dir_path: container_id}
216
278
  :param logger: a logger instance
217
- :return: A dictionary mapping file_paths to workunit objects {file_path: workunit}
279
+ :return: A tuple containing:
280
+ A dictionary mapping file_paths to workunit objects {file_path: workunit_id}
281
+ A dictionary mapping container_ids to workunit objects {container_id: workunit_id}
218
282
  """
219
283
  app_id = app_data["id"] # Extract the application ID
220
284
 
221
- # Extract unique order IDs from resource_paths
222
- container_ids = list(set(resource_paths.values()))
285
+ # Expand any directories into individual files
286
+ expanded_paths = {}
287
+
288
+ for path_str, container_id in resource_paths.items():
289
+ path = Path(path_str)
290
+ if path.is_file():
291
+ expanded_paths[str(path)] = int(container_id)
292
+ elif path.is_dir():
293
+ for file in path.rglob("*"): #is a method that returns all files and folders in the directory and its subdirectories
294
+ if file.is_file():
295
+ expanded_paths[str(file)] = int(container_id)
296
+ else:
297
+ logger.log_operation("Warning | ORIGIN: run_main_job function", f"Path {path_str} does not exist.", flush_logs=True)
298
+ print(f"Warning: Path {path_str} does not exist or is not accessible.")
299
+
300
+ if not expanded_paths:
301
+ raise ValueError("No valid file paths found in resource_paths.")
223
302
 
224
- if not container_ids:
225
- raise ValueError("No order IDs found in resource_paths; cannot create workunits.")
303
+ container_ids = list(set(expanded_paths.values()))
226
304
 
227
305
  # Create all workunits in one API call
228
306
  created_workunits = create_workunits(
@@ -239,20 +317,45 @@ def create_workunits_step(token_data, app_data, resource_paths, logger):
239
317
  workunit_map = {
240
318
  file_path: wu["id"]
241
319
  for wu in created_workunits
242
- for file_path, container_id in resource_paths.items()
320
+ for file_path, container_id in expanded_paths.items()
243
321
  if container_id == wu["container"]["id"]
244
322
  }
245
323
 
246
- logger.log_operation("Success", f"Total created Workunits: {list(workunit_map.values())}", params=None, flush_logs=True)
247
- print(f"Total created Workunits: {list(workunit_map.values())}")
324
+ workunit_container_map = {
325
+ str(wu["container"]["id"]): wu["id"]
326
+ for wu in created_workunits
327
+ }
248
328
 
329
+ workunit_ids = [wu.get("id") for wu in created_workunits]
330
+ logger.log_operation("Success | ORIGIN: run_main_job function", f"Total created Workunits: {workunit_ids}", params=None, flush_logs=True)
331
+ print(f"Total created Workunits: {workunit_ids}")
249
332
  print(workunit_map)
250
- return workunit_map # Returning {file_path: workunit}
333
+
334
+ return workunit_map, workunit_container_map # Returning {file_path: workunit}
251
335
 
252
336
 
253
337
 
254
338
  # -----------------------------------------------------------------------------
255
- # Step 4: Attach Resources in B-Fabric
339
+ # Step 4: Create Dataset in B-Fabric
340
+ # -----------------------------------------------------------------------------
341
+ def create_dataset(token_data, dataset_data):
342
+
343
+ """
344
+ Creates a dataset in B-Fabric using the provided dataset data.
345
+ :param dataset_data: Dictionary containing dataset information
346
+ :param token_data: B-Fabric token data
347
+ :return: The created dataset object
348
+ """
349
+
350
+ wrapper = get_power_user_wrapper(token_data)
351
+ dataset = wrapper.save("dataset", dataset_data) # Save the dataset
352
+
353
+ return dataset[0]
354
+
355
+
356
+
357
+ # -----------------------------------------------------------------------------
358
+ # Step 5: Attach Resources in B-Fabric
256
359
  # -----------------------------------------------------------------------------
257
360
 
258
361
  def attach_resources_to_workunits(token_data, logger, workunit_map):
@@ -266,33 +369,45 @@ def attach_resources_to_workunits(token_data, logger, workunit_map):
266
369
  :param workunit_map: Dictionary mapping file_path to workunit_id {file_path: workunit_id}
267
370
  """
268
371
  if not workunit_map:
269
- logger.log_operation("Info", "No workunits found, skipping resource registration.",
372
+ logger.log_operation("Info | ORIGIN: run_main_job function", "No workunits found, skipping resource registration.",
270
373
  params=None, flush_logs=True)
271
374
  print("No workunits found, skipping resource registration.")
272
375
  return
273
-
376
+
274
377
  print("Workunit Map:", workunit_map)
275
378
 
379
+ # Dictionary to count successfully created resources per workunit
380
+ # defaultdict(int) automatically starts each new key with a value of 0
381
+ workunit_resource_count = defaultdict(int)
382
+
276
383
  for file_path, workunit_id in workunit_map.items():
277
- print(f"Processing file: {file_path}, Workunit ID: {workunit_id}") # Corrected print statement
384
+ print(f"Processing file: {file_path}, Workunit ID: {workunit_id}")
278
385
  # Upload the file as a resource
279
386
  resource = create_resource(token_data, workunit_id, file_path)
280
387
  resource_id = resource.get("id")
281
388
  print("Resource ID:", resource_id)
282
-
283
389
  if resource_id:
284
- logger.log_operation("Success", f"Resource {resource_id} attached to Workunit {workunit_id}",
285
- params=None, flush_logs=True)
390
+ workunit_resource_count[workunit_id] += 1
286
391
  print(f"Resource {resource_id} attached to Workunit {workunit_id}")
287
392
  else:
288
- logger.log_operation("Error", f"Failed to attach resource {file_path} for Workunit {workunit_id}",
393
+ logger.log_operation("Error | ORIGIN: run_main_job function", f"Failed to attach resource {file_path} for Workunit {workunit_id}",
289
394
  params=None, flush_logs=True)
290
395
  print(f"Failed to attach resource {file_path} for Workunit {workunit_id}")
291
396
 
397
+ # Log a summary per workunit
398
+ for workunit_id, count in workunit_resource_count.items():
399
+ logger.log_operation(
400
+ "Success | ORIGIN: run_main_job function",
401
+ f"Created {count} resource(s) for Workunit ID {workunit_id}",
402
+ params=None,
403
+ flush_logs=True
404
+ )
405
+ print(f"Created {count} resource(s) for Workunit ID {workunit_id}")
406
+
292
407
 
293
408
 
294
409
  # -----------------------------------------------------------------------------
295
- # Step 5: Attachments of gstore in B-Fabric as a Link
410
+ # Step 6: Attachments of gstore in B-Fabric as a Link
296
411
  # -----------------------------------------------------------------------------
297
412
 
298
413
  def attach_gstore_files_to_entities_as_link(token_data, logger, attachment_paths: dict):
@@ -320,7 +435,7 @@ def attach_gstore_files_to_entities_as_link(token_data, logger, attachment_paths
320
435
  # Process each attachment
321
436
  for source_path, file_name in attachment_paths.items():
322
437
  if not source_path or not file_name:
323
- logger.log_operation("Error", f"Missing required attachment details: {source_path} -> {file_name}", params=None, flush_logs=True)
438
+ logger.log_operation("Error | ORIGIN: run_main_job function", f"Missing required attachment details: {source_path} -> {file_name}", params=None, flush_logs=True)
324
439
  print(f"Error: Missing required attachment details: {source_path} -> {file_name}")
325
440
  continue
326
441
 
@@ -340,21 +455,22 @@ def attach_gstore_files_to_entities_as_link(token_data, logger, attachment_paths
340
455
  else: # We don't have direct access → Send to migration folder first
341
456
  remote_tmp_path = f"{SCRATCH_PATH}/{file_name}"
342
457
  scp_copy(source_path, TRX_LOGIN, TRX_SSH_KEY, remote_tmp_path)
458
+ print("scp copy done:")
343
459
 
344
460
  # Move to final location
345
461
  ssh_move(TRX_LOGIN, TRX_SSH_KEY, remote_tmp_path, final_remote_path)
462
+ print("ssh move done:")
346
463
 
347
464
  # Log success
348
465
  success_msg = f"Successfully attached '{file_name}' to {entity_class} (ID={entity_id})"
349
- logger.log_operation("Success", success_msg, params=None, flush_logs=True)
350
466
  print(success_msg)
351
467
 
352
468
  # Step 3: Create API link
353
- create_api_link(token_data, logger, entity_class, entity_id, file_name, entity_folder)
469
+ create_attachment_link(token_data, logger, entity_class, entity_id, file_name, entity_folder)
354
470
 
355
471
  except Exception as e:
356
472
  error_msg = f"Exception while processing '{file_name}': {e}"
357
- logger.log_operation("Error", error_msg, params=None, flush_logs=True)
473
+ logger.log_operation("Error | ORIGIN: run_main_job function", error_msg, params=None, flush_logs=True)
358
474
  print(error_msg)
359
475
 
360
476
  def local_access(remote_path):
@@ -366,8 +482,11 @@ def local_access(remote_path):
366
482
 
367
483
  def scp_copy(source_path, ssh_user, ssh_key, remote_path):
368
484
  """Copies a file to a remote location using SCP with the correct FGCZ server address."""
485
+ print("SCP Copying...")
369
486
  cmd = ["scp", "-i", ssh_key, source_path, f"{ssh_user}:{remote_path}"]
487
+ print("SCP Command:")
370
488
  subprocess.run(cmd, check=True)
489
+ print("SCP Command Executed:", cmd)
371
490
  print(f"Copied {source_path} to {remote_path}")
372
491
 
373
492
 
@@ -377,6 +496,9 @@ def ssh_move(ssh_user, ssh_key, remote_tmp_path, final_remote_path):
377
496
 
378
497
  subprocess.run(cmd, check=True)
379
498
  print(f"Moved {remote_tmp_path} to {final_remote_path}")
499
+
500
+ # Wait 10 second before next move
501
+ time.sleep(10)
380
502
 
381
503
 
382
504
  def g_req_copy(source_path, destination_path):
@@ -386,8 +508,8 @@ def g_req_copy(source_path, destination_path):
386
508
  print(f"Copied {source_path} using g-req")
387
509
 
388
510
 
389
- def create_api_link(token_data, logger, entity_class, entity_id, file_name, folder_name):
390
- """Creates an API link in B-Fabric for the attached file."""
511
+ def create_attachment_link(token_data, logger, entity_class, entity_id, file_name, folder_name):
512
+ """Creates an attachment link in B-Fabric for the attached file."""
391
513
  wrapper = get_power_user_wrapper(token_data)
392
514
  url = f"{URL}/{folder_name}/{file_name}"
393
515
  timestamped_filename = f"{dt.now().strftime('%Y-%m-%d_%H:%M:%S')}_{file_name}"
@@ -402,13 +524,24 @@ def create_api_link(token_data, logger, entity_class, entity_id, file_name, fold
402
524
  try:
403
525
  link_result = wrapper.save("link", data)
404
526
  if link_result:
405
- success_msg = f"API link created for '{file_name}': {url}"
406
- logger.log_operation("Success", success_msg, params=None, flush_logs=True)
527
+ success_msg = f"Attachment link created for '{file_name}': {url}"
528
+ logger.log_operation("Success | ORIGIN: run_main_job function", success_msg, params=None, flush_logs=True)
407
529
  print(success_msg)
408
530
  else:
409
- raise ValueError("API link creation failed")
531
+ raise ValueError("Attachment link creation failed")
410
532
  except Exception as e:
411
- error_msg = f"Failed to create API link for '{file_name}': {e}"
412
- logger.log_operation("Error", error_msg, params=None, flush_logs=True)
533
+ error_msg = f"Failed to create attachment link for '{file_name}': {e}"
534
+ logger.log_operation("Error | ORIGIN: run_main_job function", error_msg, params=None, flush_logs=True)
413
535
  print(error_msg)
414
536
 
537
+
538
+ def read_file_as_bytes(file_path, max_size_mb=400):
539
+ """Reads any file type and stores it as a byte string in a dictionary."""
540
+ file_size_mb = os.path.getsize(file_path) / (1024 * 1024) # Convert bytes to MB
541
+ if file_size_mb > max_size_mb:
542
+ raise ValueError(f"File {file_path} exceeds {max_size_mb}MB limit ({file_size_mb:.2f}MB).")
543
+
544
+ with open(file_path, "rb") as f: # Read as bytes
545
+ file_as_bytes = f.read()
546
+
547
+ return file_as_bytes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: bfabric-web-apps
3
- Version: 0.1.6
3
+ Version: 0.2.0
4
4
  Summary: A package containing handy boilerplate utilities for developing bfabric web-applications
5
5
  Author: Marc Zuber, Griffin White, GWC GmbH
6
6
  Requires-Python: >=3.10,<4.0
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3.11
10
10
  Requires-Dist: bfabric (>=1.13.23,<2.0.0)
11
11
  Requires-Dist: dash (>=3.0.2,<4.0.0)
12
12
  Requires-Dist: dash-bootstrap-components (>=2.0.0,<3.0.0)
13
+ Requires-Dist: dash-daq (>=0.6.0,<0.7.0)
13
14
  Requires-Dist: pydantic-settings (>=2.8.1,<3.0.0)
14
15
  Requires-Dist: pydantic[email] (>=2.10.6,<3.0.0)
15
16
  Requires-Dist: rq (==1.15.1)
@@ -0,0 +1,22 @@
1
+ bfabric_web_apps/__init__.py,sha256=eRYBvXrDM8Bhdm7cbI80L1hjtbRFqUasJiBW52Hx3TI,1715
2
+ bfabric_web_apps/layouts/layouts.py,sha256=z8gL4n4wwLdpLGomO9CftBLnGpc3r6OpmUc2-wBg8uo,14661
3
+ bfabric_web_apps/objects/BfabricInterface.py,sha256=cEUcAdr4iUH8aS2VGe3CFnAbNQiTj13hYuSVcDg725A,10646
4
+ bfabric_web_apps/objects/Logger.py,sha256=62LC94xhm7YG5LUw3yH46NqvJQsAX7wnc9D4zbY16rA,5224
5
+ bfabric_web_apps/utils/app_init.py,sha256=RCdpCXp19cF74bouYJLPe-KSETZ0Vwqtd02Ta2VXEF8,428
6
+ bfabric_web_apps/utils/callbacks.py,sha256=tB1xtHl_ePY6KJWNz3erkrZw3HFhRneewGqZm9xIYtI,12687
7
+ bfabric_web_apps/utils/charging.py,sha256=oNNazH59SFkbxJKPvCel0IxdsRHC8xpJ0AXCLvI88FI,1069
8
+ bfabric_web_apps/utils/components.py,sha256=X3NRnv--LsHWMtWL83Pzr2whOZLSEJIwXTklQdAQpZE,984
9
+ bfabric_web_apps/utils/config.py,sha256=F4EExu7EkY7upOnxk6BU6zTLt9eU6_iy2y8esIlxTSc,1209
10
+ bfabric_web_apps/utils/create_app_in_bfabric.py,sha256=Z7puke8QB4SBuDJ9x3_OjgApzovKu0Nt1g8EqkOHJpc,2758
11
+ bfabric_web_apps/utils/dataset_utils.py,sha256=p_UtoOl1kJpSm2BGdg31Ji0C7ctst40wp4LX1tUe4tI,3360
12
+ bfabric_web_apps/utils/get_logger.py,sha256=0Y3SrXW93--eglS0_ZOc34NOriAt6buFPik5n0ltzRA,434
13
+ bfabric_web_apps/utils/get_power_user_wrapper.py,sha256=T33z64XjmJ0KSlmfEmrEP8eYpbpINCVD6Xld_V7PR2g,1027
14
+ bfabric_web_apps/utils/redis_connection.py,sha256=qXSPxW6m55Ogv44BhmPCl9ACuvzmpfZNU73UJhHRXL4,133
15
+ bfabric_web_apps/utils/redis_queue.py,sha256=MCx7z_I2NusJ4P42mcLvV7STtXBFMIIvun83fM8zOGI,168
16
+ bfabric_web_apps/utils/redis_worker_init.py,sha256=wtjQL48PLNXD1s-5s3Oq5EC8BmcfKcd7IhUbTH_EYz8,1014
17
+ bfabric_web_apps/utils/resource_utilities.py,sha256=N4EiUkxXHZ18jnU2OuRqaGSroCZ73Ogb9lkeA21Kvq4,5716
18
+ bfabric_web_apps/utils/run_main_pipeline.py,sha256=whmUbO9mMom9voOCv7iS51wh6St4DfawCD337BuPvtY,23112
19
+ bfabric_web_apps-0.2.0.dist-info/LICENSE,sha256=k0O_i2k13i9e35aO-j7FerJafAqzzu8x0kkBs0OWF3c,1065
20
+ bfabric_web_apps-0.2.0.dist-info/METADATA,sha256=vhqne76HPRsHA9uFhTPgP7wudTKYJq74V7GEQYR9S1I,687
21
+ bfabric_web_apps-0.2.0.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
22
+ bfabric_web_apps-0.2.0.dist-info/RECORD,,
@@ -1,20 +0,0 @@
1
- bfabric_web_apps/__init__.py,sha256=D2jTCEYedVnp6yQxNGLRsjihupDTQxNkPVFQAUYXQys,1409
2
- bfabric_web_apps/layouts/layouts.py,sha256=z8gL4n4wwLdpLGomO9CftBLnGpc3r6OpmUc2-wBg8uo,14661
3
- bfabric_web_apps/objects/BfabricInterface.py,sha256=wmcL9JuSC0QEopgImvkZxmtCIS7izt6bwb6y_ch0zus,10178
4
- bfabric_web_apps/objects/Logger.py,sha256=62LC94xhm7YG5LUw3yH46NqvJQsAX7wnc9D4zbY16rA,5224
5
- bfabric_web_apps/utils/app_init.py,sha256=RCdpCXp19cF74bouYJLPe-KSETZ0Vwqtd02Ta2VXEF8,428
6
- bfabric_web_apps/utils/callbacks.py,sha256=m5d6IPiYX77-kJN8I2OptZN-GPxZgrI76o1DGFxjpPU,12686
7
- bfabric_web_apps/utils/components.py,sha256=V7ECGmF2XYy5O9ciDJVH1nofJYP2a_ELQF3z3X_ADbo,844
8
- bfabric_web_apps/utils/config.py,sha256=i93fe49Ak4Z7cm_G80m2cBCPp-5qCYLAJEtEr-mYSwQ,1044
9
- bfabric_web_apps/utils/create_app_in_bfabric.py,sha256=eVk3cQDXxW-yo9b9n_zzGO6kLg_SLxYbIDECyvEPJXU,2752
10
- bfabric_web_apps/utils/get_logger.py,sha256=0Y3SrXW93--eglS0_ZOc34NOriAt6buFPik5n0ltzRA,434
11
- bfabric_web_apps/utils/get_power_user_wrapper.py,sha256=T33z64XjmJ0KSlmfEmrEP8eYpbpINCVD6Xld_V7PR2g,1027
12
- bfabric_web_apps/utils/redis_connection.py,sha256=qXSPxW6m55Ogv44BhmPCl9ACuvzmpfZNU73UJhHRXL4,133
13
- bfabric_web_apps/utils/redis_queue.py,sha256=MCx7z_I2NusJ4P42mcLvV7STtXBFMIIvun83fM8zOGI,168
14
- bfabric_web_apps/utils/redis_worker_init.py,sha256=9SUc9bbgBeMbUdqJD9EkWPA4wcJjvyX6Tzanv5JfqEg,691
15
- bfabric_web_apps/utils/resource_utilities.py,sha256=4LnV_eQjKkcpZJBsWFx--dmASyE7jfJfktk2hdHn5Fk,5856
16
- bfabric_web_apps/utils/run_main_pipeline.py,sha256=1YSbk3uP_T3tL6mZZXGv7a7FJc8exro_Eb49gnJjdrs,16864
17
- bfabric_web_apps-0.1.6.dist-info/LICENSE,sha256=k0O_i2k13i9e35aO-j7FerJafAqzzu8x0kkBs0OWF3c,1065
18
- bfabric_web_apps-0.1.6.dist-info/METADATA,sha256=bFw2hFwCuonWxGFxyCSj6y_l2rIjKfxbzXCAGK3O0Ek,646
19
- bfabric_web_apps-0.1.6.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
20
- bfabric_web_apps-0.1.6.dist-info/RECORD,,