bfabric-web-apps 0.1.7__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bfabric_web_apps/__init__.py +7 -1
- bfabric_web_apps/objects/BfabricInterface.py +17 -7
- bfabric_web_apps/utils/config.py +3 -0
- bfabric_web_apps/utils/create_app_in_bfabric.py +1 -1
- bfabric_web_apps/utils/dataset_utils.py +93 -0
- bfabric_web_apps/utils/redis_worker_init.py +28 -18
- bfabric_web_apps/utils/resource_utilities.py +6 -12
- bfabric_web_apps/utils/run_main_pipeline.py +156 -65
- {bfabric_web_apps-0.1.7.dist-info → bfabric_web_apps-0.2.0.dist-info}/METADATA +1 -1
- bfabric_web_apps-0.2.0.dist-info/RECORD +22 -0
- bfabric_web_apps-0.1.7.dist-info/RECORD +0 -21
- {bfabric_web_apps-0.1.7.dist-info → bfabric_web_apps-0.2.0.dist-info}/LICENSE +0 -0
- {bfabric_web_apps-0.1.7.dist-info → bfabric_web_apps-0.2.0.dist-info}/WHEEL +0 -0
bfabric_web_apps/__init__.py
CHANGED
@@ -2,6 +2,7 @@ import os
|
|
2
2
|
|
3
3
|
# Export objects and classes
|
4
4
|
from bfabric_web_apps.objects import BfabricInterface, Logger
|
5
|
+
from bfabric_web_apps.objects.BfabricInterface import bfabric_interface
|
5
6
|
|
6
7
|
# Export components
|
7
8
|
from .utils import components
|
@@ -14,6 +15,10 @@ from .utils.app_init import create_app
|
|
14
15
|
from .utils.get_logger import get_logger
|
15
16
|
from .utils.get_power_user_wrapper import get_power_user_wrapper
|
16
17
|
from .utils.create_app_in_bfabric import create_app_in_bfabric
|
18
|
+
from .utils.dataset_utils import (
|
19
|
+
dataset_to_dictionary,
|
20
|
+
dictionary_to_dataset
|
21
|
+
)
|
17
22
|
|
18
23
|
# Export callbacks
|
19
24
|
from .utils.callbacks import (
|
@@ -57,4 +62,5 @@ TRX_LOGIN = config.TRX_LOGIN
|
|
57
62
|
TRX_SSH_KEY = config.TRX_SSH_KEY
|
58
63
|
URL = config.URL
|
59
64
|
|
60
|
-
SERVICE_ID = config.SERVICE_ID
|
65
|
+
SERVICE_ID = config.SERVICE_ID
|
66
|
+
DATASET_TEMPLATE_ID = config.DATASET_TEMPLATE_ID
|
@@ -83,11 +83,9 @@ class BfabricInterface( Bfabric ):
|
|
83
83
|
userinfo = json.loads(res.text)
|
84
84
|
expiry_time = userinfo['expiryDateTime']
|
85
85
|
current_time = datetime.datetime.now()
|
86
|
-
five_minutes_later = current_time + datetime.timedelta(minutes=5)
|
87
|
-
|
88
86
|
# Comparing the parsed expiry time with the five minutes later time
|
89
87
|
|
90
|
-
if
|
88
|
+
if current_time > datetime.datetime.strptime(expiry_time, "%Y-%m-%d %H:%M:%S") + datetime.timedelta(days=7):
|
91
89
|
return "EXPIRED"
|
92
90
|
|
93
91
|
environment_dict = {"Production":"https://fgcz-bfabric.uzh.ch/bfabric","Test":"https://fgcz-bfabric-test.uzh.ch/bfabric"}
|
@@ -104,10 +102,18 @@ class BfabricInterface( Bfabric ):
|
|
104
102
|
userWsPassword = userinfo['userWsPassword'],
|
105
103
|
jobId = userinfo['jobId']
|
106
104
|
)
|
107
|
-
|
108
105
|
# Initialize the wrapper right after validating the token
|
109
106
|
self._initialize_wrapper(token_data)
|
110
107
|
|
108
|
+
# Log the token validation process
|
109
|
+
L = get_logger(token_data)
|
110
|
+
L.log_operation(
|
111
|
+
operation="Authentication Process",
|
112
|
+
message=f"Token validated successfully. User {token_data.get('user_data')} authenticated.",
|
113
|
+
params=None,
|
114
|
+
flush_logs=True
|
115
|
+
)
|
116
|
+
|
111
117
|
return json.dumps(token_data)
|
112
118
|
|
113
119
|
|
@@ -152,7 +158,10 @@ class BfabricInterface( Bfabric ):
|
|
152
158
|
"Project": "container",
|
153
159
|
"Order": "container",
|
154
160
|
"Container": "container",
|
155
|
-
"Plate": "plate"
|
161
|
+
"Plate": "plate",
|
162
|
+
"Workunit": "workunit",
|
163
|
+
"Resource": "resource",
|
164
|
+
"Dataset": "dataset"
|
156
165
|
}
|
157
166
|
|
158
167
|
if not token_data:
|
@@ -176,7 +185,7 @@ class BfabricInterface( Bfabric ):
|
|
176
185
|
obj={"id": entity_id},
|
177
186
|
max_results=None,
|
178
187
|
params=None,
|
179
|
-
flush_logs=
|
188
|
+
flush_logs=False
|
180
189
|
)[0]
|
181
190
|
|
182
191
|
|
@@ -186,6 +195,7 @@ class BfabricInterface( Bfabric ):
|
|
186
195
|
"createdby": entity_data_dict.get("createdby"),
|
187
196
|
"created": entity_data_dict.get("created"),
|
188
197
|
"modified": entity_data_dict.get("modified"),
|
198
|
+
"full_api_response": entity_data_dict,
|
189
199
|
})
|
190
200
|
return json_data
|
191
201
|
else:
|
@@ -246,7 +256,7 @@ class BfabricInterface( Bfabric ):
|
|
246
256
|
obj={"id": app_id}, # Query using the App ID
|
247
257
|
max_results=None,
|
248
258
|
params=None,
|
249
|
-
flush_logs=
|
259
|
+
flush_logs=False
|
250
260
|
)
|
251
261
|
|
252
262
|
# If API call fails, return empty JSON
|
bfabric_web_apps/utils/config.py
CHANGED
@@ -0,0 +1,93 @@
|
|
1
|
+
|
2
|
+
def is_numeric(value):
|
3
|
+
try:
|
4
|
+
float(value) # or int(value) if you only want integers
|
5
|
+
return True
|
6
|
+
except ValueError:
|
7
|
+
return False
|
8
|
+
|
9
|
+
|
10
|
+
def dataset_to_dictionary(dataset):
|
11
|
+
|
12
|
+
"""
|
13
|
+
Convert B-Fabric API Dataset Response
|
14
|
+
to a dictionary. The dictionary will have the attribute names as keys and the field values as lists,
|
15
|
+
so that it can be easily converted to a pandas dataframe.
|
16
|
+
|
17
|
+
Args:
|
18
|
+
dataset (dict): B-Fabric API Dataset Response
|
19
|
+
|
20
|
+
Returns:
|
21
|
+
dict: A dictionary where the keys are the attribute names and the values are lists of field values, ready to become a pandas dataframe.
|
22
|
+
"""
|
23
|
+
|
24
|
+
# Check if the dataset is empty
|
25
|
+
if not dataset:
|
26
|
+
return {}
|
27
|
+
|
28
|
+
attributes = dataset.get("attribute", [])
|
29
|
+
items = [elt.get("field") for elt in dataset.get("item", [])]
|
30
|
+
|
31
|
+
position_map = {str(elt.get("position")): elt.get("name") for elt in attributes} # Create a mapping of attribute positions to names
|
32
|
+
df_dict = {elt : [] for elt in position_map.values()} # Create a dictionary to hold the dataframe data
|
33
|
+
|
34
|
+
for item in items:
|
35
|
+
for field in item:
|
36
|
+
attribute_position = field.get("attributeposition")
|
37
|
+
df_dict[position_map.get(attribute_position)].append(field.get("value")) # Append the field value to the corresponding attribute name in the dictionary
|
38
|
+
|
39
|
+
# Create a dataframe from the dictionary
|
40
|
+
return df_dict
|
41
|
+
|
42
|
+
|
43
|
+
def dictionary_to_dataset(dictionary, dataset_name, containerid, dataset_template_id=0, linked_workunit_id=0):
|
44
|
+
|
45
|
+
"""
|
46
|
+
Convert a dictionary to a B-Fabric API Dataset
|
47
|
+
|
48
|
+
Args:
|
49
|
+
dictionary (dict): A dictionary where the keys are the attribute names and the values are lists of field values.
|
50
|
+
|
51
|
+
Returns:
|
52
|
+
dict: A B-Fabric API Dataset ready to be sent to the API.
|
53
|
+
"""
|
54
|
+
|
55
|
+
if not isinstance(dictionary, dict):
|
56
|
+
raise ValueError("Input must be a dictionary.")
|
57
|
+
|
58
|
+
if not isinstance(dataset_name, str):
|
59
|
+
raise ValueError("Dataset name must be a string.")
|
60
|
+
|
61
|
+
if not is_numeric(containerid):
|
62
|
+
raise ValueError("Container ID must be a numeric string or integer.")
|
63
|
+
|
64
|
+
if not isinstance(dataset_template_id, int):
|
65
|
+
raise ValueError("Dataset template ID must be an integer.")
|
66
|
+
|
67
|
+
if not isinstance(linked_workunit_id, int):
|
68
|
+
raise ValueError("Linked workunit ID must be an integer.")
|
69
|
+
|
70
|
+
# Check if the dictionary is empty
|
71
|
+
if not dictionary:
|
72
|
+
return {}
|
73
|
+
|
74
|
+
# Create a list of attributes
|
75
|
+
attributes = [{"name": name, "position": str(i+1)} for i, name in enumerate(dictionary.keys())]
|
76
|
+
|
77
|
+
# Create a list of items
|
78
|
+
items = []
|
79
|
+
for i in range(len(next(iter(dictionary.values())))): # Get the length of the first value list
|
80
|
+
item = [{"attributeposition": str(j+1), "value": dictionary[name][i]} for j, name in enumerate(dictionary.keys())]
|
81
|
+
items.append({"field": item, "position": str(i+1)})
|
82
|
+
|
83
|
+
to_return = {"attribute": attributes, "item": items, "name": dataset_name, "containerid": containerid}
|
84
|
+
|
85
|
+
if dataset_template_id:
|
86
|
+
# Add the dataset template ID to the dataset
|
87
|
+
to_return["datasettemplateid"] = dataset_template_id
|
88
|
+
|
89
|
+
if linked_workunit_id:
|
90
|
+
# Add the linked workunit ID to the dataset
|
91
|
+
to_return["workunitid"] = linked_workunit_id
|
92
|
+
|
93
|
+
return to_return
|
@@ -1,28 +1,38 @@
|
|
1
|
-
import redis
|
1
|
+
import redis
|
2
2
|
from rq import Worker, Queue, Connection
|
3
3
|
import time
|
4
|
+
import threading
|
4
5
|
|
5
|
-
def test_job():
|
6
|
-
|
7
|
-
"""
|
8
|
-
A test job that prints a message to the console.
|
9
|
-
"""
|
6
|
+
def test_job():
|
10
7
|
print("Hello, this is a test job!")
|
11
|
-
time.sleep(10)
|
8
|
+
time.sleep(10)
|
12
9
|
print("Test job finished!")
|
13
10
|
return
|
14
11
|
|
15
|
-
|
16
|
-
def run_worker(host, port, queue_names):
|
12
|
+
def keepalive_ping(conn, interval=60):
|
17
13
|
"""
|
18
|
-
|
14
|
+
Periodically ping Redis to keep the TCP connection alive on platforms like Azure.
|
15
|
+
"""
|
16
|
+
while True:
|
17
|
+
try:
|
18
|
+
conn.ping()
|
19
|
+
except Exception as e:
|
20
|
+
print("Redis keepalive ping failed:", e)
|
21
|
+
time.sleep(interval)
|
19
22
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
queue_names (list): A list of queue names to listen to
|
23
|
+
def run_worker(host, port, queue_names):
|
24
|
+
"""
|
25
|
+
Starts an RQ worker with a background Redis keepalive thread to prevent Azure from dropping idle connections.
|
24
26
|
"""
|
25
|
-
conn = redis.Redis(
|
26
|
-
|
27
|
-
|
28
|
-
|
27
|
+
conn = redis.Redis(
|
28
|
+
host=host,
|
29
|
+
port=port,
|
30
|
+
socket_keepalive=True
|
31
|
+
)
|
32
|
+
|
33
|
+
# Start Redis keepalive thread
|
34
|
+
threading.Thread(target=keepalive_ping, args=(conn,), daemon=True).start()
|
35
|
+
|
36
|
+
with Connection(conn):
|
37
|
+
worker = Worker(map(Queue, queue_names))
|
38
|
+
worker.work(logging_level="INFO")
|
@@ -22,8 +22,8 @@ def create_workunit(token_data, application_name, application_description, appli
|
|
22
22
|
wrapper = bfabric_interface.get_wrapper()
|
23
23
|
|
24
24
|
workunit_data = {
|
25
|
-
"name": f"{application_name} -
|
26
|
-
"description": f"{application_description} for
|
25
|
+
"name": f"Workunit - {application_name} - Container {container_id}",
|
26
|
+
"description": f"{application_description} for Container {container_id}",
|
27
27
|
"applicationid": int(application_id),
|
28
28
|
"containerid": container_id,
|
29
29
|
}
|
@@ -54,7 +54,7 @@ def create_workunit(token_data, application_name, application_description, appli
|
|
54
54
|
|
55
55
|
except Exception as e:
|
56
56
|
L.log_operation(
|
57
|
-
"Error",
|
57
|
+
"Error | ORIGIN: run_main_job function",
|
58
58
|
f"Failed to create workunit for Order {container_id}: {e}",
|
59
59
|
params=None,
|
60
60
|
flush_logs=True,
|
@@ -85,7 +85,7 @@ def create_workunits(token_data, application_name, application_description, appl
|
|
85
85
|
for container_id in container_ids
|
86
86
|
]
|
87
87
|
|
88
|
-
return [
|
88
|
+
return [wu for wu in workunits if wu is not None] # Filter out None values
|
89
89
|
|
90
90
|
|
91
91
|
from pathlib import Path
|
@@ -117,7 +117,7 @@ def create_resource(token_data, workunit_id, file_path, storage_id="20"): # GWC
|
|
117
117
|
"workunitid": str(workunit_id),
|
118
118
|
"name": file_path.name,
|
119
119
|
"description": f"Resource attached to workunit {workunit_id}",
|
120
|
-
"relativepath": file_path
|
120
|
+
"relativepath": file_path,
|
121
121
|
"storageid": str(storage_id),
|
122
122
|
}
|
123
123
|
)
|
@@ -125,19 +125,13 @@ def create_resource(token_data, workunit_id, file_path, storage_id="20"): # GWC
|
|
125
125
|
if result:
|
126
126
|
resource_id = result[0].get("id")
|
127
127
|
print(f"Resource attached: {file_path.name} (ID: {resource_id})")
|
128
|
-
L.log_operation(
|
129
|
-
"Attach_resource",
|
130
|
-
f"Resource attached successfully: {file_path.name}",
|
131
|
-
params=None,
|
132
|
-
flush_logs=True,
|
133
|
-
)
|
134
128
|
return result[0]
|
135
129
|
else:
|
136
130
|
raise ValueError(f"Failed to attach resource: {file_path.name}")
|
137
131
|
|
138
132
|
except Exception as e:
|
139
133
|
L.log_operation(
|
140
|
-
"error",
|
134
|
+
"error | ORIGIN: run_main_job function",
|
141
135
|
f"Failed to attach resource: {e}",
|
142
136
|
params=None,
|
143
137
|
flush_logs=True,
|
@@ -4,6 +4,8 @@ import os
|
|
4
4
|
import shutil
|
5
5
|
import subprocess
|
6
6
|
from pathlib import Path
|
7
|
+
import time
|
8
|
+
from collections import defaultdict
|
7
9
|
|
8
10
|
from .get_logger import get_logger
|
9
11
|
from .get_power_user_wrapper import get_power_user_wrapper
|
@@ -15,6 +17,10 @@ from .resource_utilities import (
|
|
15
17
|
create_workunits,
|
16
18
|
create_resources
|
17
19
|
)
|
20
|
+
from .dataset_utils import (
|
21
|
+
dataset_to_dictionary,
|
22
|
+
dictionary_to_dataset
|
23
|
+
)
|
18
24
|
|
19
25
|
from .charging import create_charge
|
20
26
|
|
@@ -26,6 +32,7 @@ SCRATCH_PATH = config.SCRATCH_PATH
|
|
26
32
|
TRX_LOGIN = config.TRX_LOGIN
|
27
33
|
TRX_SSH_KEY = config.TRX_SSH_KEY
|
28
34
|
URL = config.URL
|
35
|
+
DATASET_TEMPLATE_ID = config.DATASET_TEMPLATE_ID
|
29
36
|
|
30
37
|
def run_main_job(
|
31
38
|
files_as_byte_strings: dict,
|
@@ -34,16 +41,20 @@ def run_main_job(
|
|
34
41
|
attachment_paths: list[dict],
|
35
42
|
token: str,
|
36
43
|
service_id: int = 0,
|
37
|
-
charge:
|
44
|
+
charge: list[int] = [],
|
45
|
+
dataset_dict: dict = {}
|
38
46
|
):
|
47
|
+
|
48
|
+
|
39
49
|
"""
|
40
50
|
Main function to handle:
|
41
51
|
1) Save Files on Server
|
42
52
|
2) Execute local bash commands
|
43
53
|
3) Create workunits in B-Fabric
|
44
|
-
4)
|
45
|
-
5)
|
46
|
-
6)
|
54
|
+
4) Create Dataset in B-Fabric
|
55
|
+
5) Register resources in B-Fabric
|
56
|
+
6) Attach additional gstore files (logs/reports/etc.) to entities in B-Fabric
|
57
|
+
7) Automatically charge the relevant container for the service
|
47
58
|
|
48
59
|
:param files_as_byte_strings: {destination_path: file as byte strings}
|
49
60
|
:param bash_commands: List of bash commands to execute
|
@@ -52,10 +63,12 @@ def run_main_job(
|
|
52
63
|
for attachment to a B-Fabric entity (e.g., logs, final reports, etc.)
|
53
64
|
:param token: Authentication token
|
54
65
|
:param service_id: ID of the service to charge
|
55
|
-
:param charge:
|
66
|
+
:param charge: A list of container IDs to be charged.
|
67
|
+
:param dataset_dict: A dictionary to create a dataset in B-Fabric. keys are container IDs and values are dictionaries whose keys are field names and values are lists of values.
|
68
|
+
|
56
69
|
|
57
70
|
|
58
|
-
Dev Notes:
|
71
|
+
Dev Notes:
|
59
72
|
!!! All exceptions get logged (make sure to log the exception message i.e. "except Exception as e: log(e)") !!!
|
60
73
|
!!! If an exception doesn't occur, log that some step ran successfully to the job object !!!
|
61
74
|
"""
|
@@ -82,72 +95,98 @@ Dev Notes:
|
|
82
95
|
# Step 1: Save files to the server
|
83
96
|
try:
|
84
97
|
summary = save_files_from_bytes(files_as_byte_strings, L)
|
85
|
-
L.log_operation("Success", f"File copy summary: {summary}", params=None, flush_logs=True)
|
98
|
+
L.log_operation("Success | ORIGIN: run_main_job function", f"File copy summary: {summary}", params=None, flush_logs=True)
|
86
99
|
print("Summary:", summary)
|
87
100
|
|
88
101
|
except Exception as e:
|
89
102
|
# If something unexpected blows up the entire process
|
90
|
-
L.log_operation("Error", f"Failed to copy files: {e}", params=None, flush_logs=True)
|
103
|
+
L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to copy files: {e}", params=None, flush_logs=True)
|
91
104
|
print("Error copying files:", e)
|
92
105
|
|
93
106
|
|
94
107
|
# STEP 2: Execute bash commands
|
95
108
|
try:
|
96
|
-
bash_log = execute_and_log_bash_commands(bash_commands
|
97
|
-
L.log_operation("Success", f"Bash commands executed
|
109
|
+
bash_log = execute_and_log_bash_commands(bash_commands)
|
110
|
+
L.log_operation("Success | ORIGIN: run_main_job function", f"Bash commands executed success | origin: run_main_job functionfully:\n{bash_log}",
|
98
111
|
params=None, flush_logs=True)
|
99
112
|
except Exception as e:
|
100
|
-
L.log_operation("Error", f"Failed to execute bash commands: {e}",
|
113
|
+
L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to execute bash commands: {e}",
|
101
114
|
params=None, flush_logs=True)
|
102
115
|
print("Error executing bash commands:", e)
|
103
116
|
|
104
117
|
|
105
118
|
# STEP 3: Create Workunits
|
106
119
|
try:
|
107
|
-
workunit_map = create_workunits_step(token_data, app_data, resource_paths, L)
|
120
|
+
workunit_map, workunit_container_map = create_workunits_step(token_data, app_data, resource_paths, L)
|
108
121
|
except Exception as e:
|
109
|
-
L.log_operation("Error", f"Failed to create workunits in B-Fabric: {e}",
|
122
|
+
L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to create workunits in B-Fabric: {e}",
|
110
123
|
params=None, flush_logs=True)
|
111
124
|
print("Error creating workunits:", e)
|
112
125
|
workunit_map = []
|
113
126
|
|
114
|
-
|
127
|
+
|
128
|
+
# STEP 4: Create Dataset
|
129
|
+
if dataset_dict:
|
130
|
+
for container_id, dataset_data in dataset_dict.items():
|
131
|
+
|
132
|
+
dataset_name = f'Dataset - {str(app_data.get("name", "Unknown App"))} - Container {container_id}'
|
133
|
+
linked_workunit_id = workunit_container_map.get(str(container_id), None)
|
134
|
+
|
135
|
+
try:
|
136
|
+
dataset = dictionary_to_dataset(dataset_data, dataset_name, container_id, DATASET_TEMPLATE_ID, linked_workunit_id)
|
137
|
+
dataset = create_dataset(token_data, dataset)
|
138
|
+
L.log_operation("Success | ORIGIN: run_main_job function", f'Dataset {dataset.get("id", "Null")} created successfully for container {container_id}', params=None, flush_logs=True)
|
139
|
+
print(f"Dataset created successfully for container {container_id}")
|
140
|
+
except Exception as e:
|
141
|
+
L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to create dataset for container {container_id}: {e}", params=None, flush_logs=True)
|
142
|
+
print(f"Error creating dataset for container {container_id}:", e)
|
143
|
+
else:
|
144
|
+
L.log_operation("Info | ORIGIN: run_main_job function", "No dataset creation requested.", params=None, flush_logs=True)
|
145
|
+
print("No dataset creation requested.")
|
146
|
+
|
147
|
+
|
148
|
+
# STEP 5: Register Resources (Refactored)
|
115
149
|
try:
|
116
150
|
attach_resources_to_workunits(token_data, L, workunit_map)
|
117
151
|
except Exception as e:
|
118
|
-
L.log_operation("Error", f"Failed to register resources: {e}", params=None, flush_logs=True)
|
152
|
+
L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to register resources: {e}", params=None, flush_logs=True)
|
119
153
|
print("Error registering resources:", e)
|
120
154
|
|
121
|
-
# STEP
|
155
|
+
# STEP 6: Attach gstore files (logs, reports, etc.) to B-Fabric entity as a Link
|
122
156
|
try:
|
123
157
|
attach_gstore_files_to_entities_as_link(token_data, L, attachment_paths)
|
124
158
|
print("Attachment Paths:", attachment_paths)
|
125
159
|
except Exception as e:
|
126
|
-
L.log_operation("Error", f"Failed to attach extra files: {e}", params=None, flush_logs=True)
|
160
|
+
L.log_operation("Error | ORIGIN: run_main_job function", f"Failed to attach extra files: {e}", params=None, flush_logs=True)
|
127
161
|
print("Error attaching extra files:", e)
|
128
162
|
|
129
163
|
|
130
|
-
# STEP
|
164
|
+
# STEP 7: Charge the container for the service
|
131
165
|
if charge:
|
132
166
|
|
133
167
|
if service_id == 0:
|
134
168
|
print("Service ID not provided. Skipping charge creation.")
|
135
|
-
L.log_operation("Info", "Service ID not provided. Skipping charge creation.", params=None, flush_logs=True)
|
169
|
+
L.log_operation("Info | ORIGIN: run_main_job function", "Service ID not provided. Skipping charge creation.", params=None, flush_logs=True)
|
136
170
|
else:
|
137
|
-
container_ids =
|
171
|
+
container_ids = charge
|
172
|
+
print("Container IDs to charge:", container_ids)
|
138
173
|
if not container_ids:
|
139
|
-
L.log_operation("Error", "No container IDs found for charging.", params=None, flush_logs=True)
|
174
|
+
L.log_operation("Error | ORIGIN: run_main_job function", "No container IDs found for charging.", params=None, flush_logs=True)
|
140
175
|
print("Error: No container IDs found for charging.")
|
141
176
|
return
|
142
177
|
for container_id in container_ids:
|
143
178
|
charges = create_charge(token_data, container_id, service_id)
|
144
179
|
charge_id = charges[0].get("id")
|
145
|
-
L.log_operation("Success", f"Charge created for container {container_id} with service ID {service_id} and charge id {charge_id}", params=None, flush_logs=False)
|
180
|
+
L.log_operation("Success | ORIGIN: run_main_job function", f"Charge created for container {container_id} with service ID {service_id} and charge id {charge_id}", params=None, flush_logs=False)
|
146
181
|
print(f"Charge created with id {charge_id} for container {container_id} with service ID {service_id}")
|
147
182
|
L.flush_logs()
|
148
183
|
else:
|
149
|
-
L.log_operation("Info", "Charge creation skipped.", params=None, flush_logs=True)
|
184
|
+
L.log_operation("Info | ORIGIN: run_main_job function", "Charge creation skipped.", params=None, flush_logs=True)
|
150
185
|
print("Charge creation skipped.")
|
186
|
+
|
187
|
+
# Final log message
|
188
|
+
L.log_operation("Success | ORIGIN: run_main_job function", "All steps completed successfully.", params=None, flush_logs=True)
|
189
|
+
print("All steps completed successfully.")
|
151
190
|
|
152
191
|
#---------------------------------------------------------------------------------------------------------------------
|
153
192
|
#---------------------------------------------------------------------------------------------------------------------
|
@@ -157,8 +196,6 @@ Dev Notes:
|
|
157
196
|
# Step 1: Save Files from bytes
|
158
197
|
# -----------------------------------------------------------------------------
|
159
198
|
|
160
|
-
import os
|
161
|
-
|
162
199
|
def save_files_from_bytes(files_as_byte_strings: dict, logger):
|
163
200
|
"""
|
164
201
|
Saves byte string files to their respective paths.
|
@@ -167,7 +204,6 @@ def save_files_from_bytes(files_as_byte_strings: dict, logger):
|
|
167
204
|
:param logger: Logging instance
|
168
205
|
:return: Summary indicating how many files succeeded vs. failed
|
169
206
|
"""
|
170
|
-
results = {} # Store results: (destination) -> True (if success) or error message (if failure)
|
171
207
|
|
172
208
|
message = "All files saved successfully."
|
173
209
|
|
@@ -177,11 +213,11 @@ def save_files_from_bytes(files_as_byte_strings: dict, logger):
|
|
177
213
|
# Write file from byte string
|
178
214
|
with open(destination, "+wb") as f:
|
179
215
|
f.write(file_bytes)
|
180
|
-
logger.log_operation(
|
216
|
+
logger.log_operation("File saved | ORIGIN: run_main_job function", f"File {destination} saved successfully.", params=None, flush_logs=True)
|
181
217
|
|
182
218
|
except Exception as e:
|
183
219
|
error_msg = f"Error saving file: {destination}, Error: {str(e)}"
|
184
|
-
logger.log_operation("Error", error_msg, params=None, flush_logs=True)
|
220
|
+
logger.log_operation("Error | ORIGIN: run_main_job function", error_msg, params=None, flush_logs=True)
|
185
221
|
print(error_msg)
|
186
222
|
message = f"Error saving some files."
|
187
223
|
|
@@ -192,19 +228,18 @@ def save_files_from_bytes(files_as_byte_strings: dict, logger):
|
|
192
228
|
# Step 2: Execute Bash Commands
|
193
229
|
# -----------------------------------------------------------------------------
|
194
230
|
|
195
|
-
def execute_and_log_bash_commands(bash_commands: list[str]
|
231
|
+
def execute_and_log_bash_commands(bash_commands: list[str]):
|
196
232
|
"""
|
197
233
|
Executes a list of bash commands locally, logs and returns the output.
|
198
234
|
|
199
235
|
:param bash_commands: List of commands to execute
|
200
|
-
:param logger: Logging instance
|
201
236
|
:return: A single string containing logs for all commands
|
202
237
|
"""
|
203
238
|
logstring = ""
|
204
239
|
|
205
240
|
for cmd in bash_commands:
|
206
241
|
logstring += "---------------------------------------------------------\n"
|
207
|
-
logstring += f"Executing Command
|
242
|
+
logstring += f"Executing Command\n"
|
208
243
|
|
209
244
|
try:
|
210
245
|
# Execute the command and capture both stdout and stderr
|
@@ -216,19 +251,15 @@ def execute_and_log_bash_commands(bash_commands: list[str], logger):
|
|
216
251
|
if result.returncode == 0:
|
217
252
|
status = "SUCCESS"
|
218
253
|
log_entry = f"Command: {cmd}\nStatus: {status}\nOutput:\n{output}\n"
|
219
|
-
logger.log_operation("Info", log_entry, params=None, flush_logs=True)
|
220
254
|
else:
|
221
255
|
status = "FAILURE"
|
222
256
|
log_entry = f"Command: {cmd}\nStatus: {status}\nError Output:\n{error_output}\n"
|
223
|
-
logger.log_operation("Error", log_entry, params=None, flush_logs=True)
|
224
257
|
|
225
258
|
logstring += log_entry
|
226
259
|
print(log_entry)
|
227
260
|
|
228
261
|
except Exception as e:
|
229
262
|
logstring += f"Command: {cmd}\nStatus: ERROR\nException: {str(e)}\n"
|
230
|
-
logger.log_operation("Error", f"Command: {cmd} failed with Exception: {str(e)}",
|
231
|
-
params=None, flush_logs=True)
|
232
263
|
|
233
264
|
return logstring
|
234
265
|
|
@@ -243,17 +274,33 @@ def create_workunits_step(token_data, app_data, resource_paths, logger):
|
|
243
274
|
|
244
275
|
:param token_data: dict with token/auth info
|
245
276
|
:param app_data: dict with fields like {"id": <app_id>} or other app info
|
246
|
-
:param resource_paths: Dictionary {file_path: container_id}
|
277
|
+
:param resource_paths: Dictionary {file_path or dir_path: container_id}
|
247
278
|
:param logger: a logger instance
|
248
|
-
:return: A
|
279
|
+
:return: A tuple containing:
|
280
|
+
A dictionary mapping file_paths to workunit objects {file_path: workunit_id}
|
281
|
+
A dictionary mapping container_ids to workunit objects {container_id: workunit_id}
|
249
282
|
"""
|
250
283
|
app_id = app_data["id"] # Extract the application ID
|
251
284
|
|
252
|
-
#
|
253
|
-
|
285
|
+
# Expand any directories into individual files
|
286
|
+
expanded_paths = {}
|
287
|
+
|
288
|
+
for path_str, container_id in resource_paths.items():
|
289
|
+
path = Path(path_str)
|
290
|
+
if path.is_file():
|
291
|
+
expanded_paths[str(path)] = int(container_id)
|
292
|
+
elif path.is_dir():
|
293
|
+
for file in path.rglob("*"): #is a method that returns all files and folders in the directory and its subdirectories
|
294
|
+
if file.is_file():
|
295
|
+
expanded_paths[str(file)] = int(container_id)
|
296
|
+
else:
|
297
|
+
logger.log_operation("Warning | ORIGIN: run_main_job function", f"Path {path_str} does not exist.", flush_logs=True)
|
298
|
+
print(f"Warning: Path {path_str} does not exist or is not accessible.")
|
299
|
+
|
300
|
+
if not expanded_paths:
|
301
|
+
raise ValueError("No valid file paths found in resource_paths.")
|
254
302
|
|
255
|
-
|
256
|
-
raise ValueError("No order IDs found in resource_paths; cannot create workunits.")
|
303
|
+
container_ids = list(set(expanded_paths.values()))
|
257
304
|
|
258
305
|
# Create all workunits in one API call
|
259
306
|
created_workunits = create_workunits(
|
@@ -270,20 +317,45 @@ def create_workunits_step(token_data, app_data, resource_paths, logger):
|
|
270
317
|
workunit_map = {
|
271
318
|
file_path: wu["id"]
|
272
319
|
for wu in created_workunits
|
273
|
-
for file_path, container_id in
|
320
|
+
for file_path, container_id in expanded_paths.items()
|
274
321
|
if container_id == wu["container"]["id"]
|
275
322
|
}
|
276
323
|
|
277
|
-
|
278
|
-
|
324
|
+
workunit_container_map = {
|
325
|
+
str(wu["container"]["id"]): wu["id"]
|
326
|
+
for wu in created_workunits
|
327
|
+
}
|
279
328
|
|
329
|
+
workunit_ids = [wu.get("id") for wu in created_workunits]
|
330
|
+
logger.log_operation("Success | ORIGIN: run_main_job function", f"Total created Workunits: {workunit_ids}", params=None, flush_logs=True)
|
331
|
+
print(f"Total created Workunits: {workunit_ids}")
|
280
332
|
print(workunit_map)
|
281
|
-
|
333
|
+
|
334
|
+
return workunit_map, workunit_container_map # Returning {file_path: workunit}
|
282
335
|
|
283
336
|
|
284
337
|
|
285
338
|
# -----------------------------------------------------------------------------
|
286
|
-
# Step 4:
|
339
|
+
# Step 4: Create Dataset in B-Fabric
|
340
|
+
# -----------------------------------------------------------------------------
|
341
|
+
def create_dataset(token_data, dataset_data):
|
342
|
+
|
343
|
+
"""
|
344
|
+
Creates a dataset in B-Fabric using the provided dataset data.
|
345
|
+
:param dataset_data: Dictionary containing dataset information
|
346
|
+
:param token_data: B-Fabric token data
|
347
|
+
:return: The created dataset object
|
348
|
+
"""
|
349
|
+
|
350
|
+
wrapper = get_power_user_wrapper(token_data)
|
351
|
+
dataset = wrapper.save("dataset", dataset_data) # Save the dataset
|
352
|
+
|
353
|
+
return dataset[0]
|
354
|
+
|
355
|
+
|
356
|
+
|
357
|
+
# -----------------------------------------------------------------------------
|
358
|
+
# Step 5: Attach Resources in B-Fabric
|
287
359
|
# -----------------------------------------------------------------------------
|
288
360
|
|
289
361
|
def attach_resources_to_workunits(token_data, logger, workunit_map):
|
@@ -297,33 +369,45 @@ def attach_resources_to_workunits(token_data, logger, workunit_map):
|
|
297
369
|
:param workunit_map: Dictionary mapping file_path to workunit_id {file_path: workunit_id}
|
298
370
|
"""
|
299
371
|
if not workunit_map:
|
300
|
-
logger.log_operation("Info", "No workunits found, skipping resource registration.",
|
372
|
+
logger.log_operation("Info | ORIGIN: run_main_job function", "No workunits found, skipping resource registration.",
|
301
373
|
params=None, flush_logs=True)
|
302
374
|
print("No workunits found, skipping resource registration.")
|
303
375
|
return
|
304
|
-
|
376
|
+
|
305
377
|
print("Workunit Map:", workunit_map)
|
306
378
|
|
379
|
+
# Dictionary to count successfully created resources per workunit
|
380
|
+
# defaultdict(int) automatically starts each new key with a value of 0
|
381
|
+
workunit_resource_count = defaultdict(int)
|
382
|
+
|
307
383
|
for file_path, workunit_id in workunit_map.items():
|
308
|
-
print(f"Processing file: {file_path}, Workunit ID: {workunit_id}")
|
384
|
+
print(f"Processing file: {file_path}, Workunit ID: {workunit_id}")
|
309
385
|
# Upload the file as a resource
|
310
386
|
resource = create_resource(token_data, workunit_id, file_path)
|
311
387
|
resource_id = resource.get("id")
|
312
388
|
print("Resource ID:", resource_id)
|
313
|
-
|
314
389
|
if resource_id:
|
315
|
-
|
316
|
-
params=None, flush_logs=True)
|
390
|
+
workunit_resource_count[workunit_id] += 1
|
317
391
|
print(f"Resource {resource_id} attached to Workunit {workunit_id}")
|
318
392
|
else:
|
319
|
-
logger.log_operation("Error", f"Failed to attach resource {file_path} for Workunit {workunit_id}",
|
393
|
+
logger.log_operation("Error | ORIGIN: run_main_job function", f"Failed to attach resource {file_path} for Workunit {workunit_id}",
|
320
394
|
params=None, flush_logs=True)
|
321
395
|
print(f"Failed to attach resource {file_path} for Workunit {workunit_id}")
|
322
396
|
|
397
|
+
# Log a summary per workunit
|
398
|
+
for workunit_id, count in workunit_resource_count.items():
|
399
|
+
logger.log_operation(
|
400
|
+
"Success | ORIGIN: run_main_job function",
|
401
|
+
f"Created {count} resource(s) for Workunit ID {workunit_id}",
|
402
|
+
params=None,
|
403
|
+
flush_logs=True
|
404
|
+
)
|
405
|
+
print(f"Created {count} resource(s) for Workunit ID {workunit_id}")
|
406
|
+
|
323
407
|
|
324
408
|
|
325
409
|
# -----------------------------------------------------------------------------
|
326
|
-
# Step
|
410
|
+
# Step 6: Attachments of gstore in B-Fabric as a Link
|
327
411
|
# -----------------------------------------------------------------------------
|
328
412
|
|
329
413
|
def attach_gstore_files_to_entities_as_link(token_data, logger, attachment_paths: dict):
|
@@ -351,7 +435,7 @@ def attach_gstore_files_to_entities_as_link(token_data, logger, attachment_paths
|
|
351
435
|
# Process each attachment
|
352
436
|
for source_path, file_name in attachment_paths.items():
|
353
437
|
if not source_path or not file_name:
|
354
|
-
logger.log_operation("Error", f"Missing required attachment details: {source_path} -> {file_name}", params=None, flush_logs=True)
|
438
|
+
logger.log_operation("Error | ORIGIN: run_main_job function", f"Missing required attachment details: {source_path} -> {file_name}", params=None, flush_logs=True)
|
355
439
|
print(f"Error: Missing required attachment details: {source_path} -> {file_name}")
|
356
440
|
continue
|
357
441
|
|
@@ -371,21 +455,22 @@ def attach_gstore_files_to_entities_as_link(token_data, logger, attachment_paths
|
|
371
455
|
else: # We don't have direct access → Send to migration folder first
|
372
456
|
remote_tmp_path = f"{SCRATCH_PATH}/{file_name}"
|
373
457
|
scp_copy(source_path, TRX_LOGIN, TRX_SSH_KEY, remote_tmp_path)
|
458
|
+
print("scp copy done:")
|
374
459
|
|
375
460
|
# Move to final location
|
376
461
|
ssh_move(TRX_LOGIN, TRX_SSH_KEY, remote_tmp_path, final_remote_path)
|
462
|
+
print("ssh move done:")
|
377
463
|
|
378
464
|
# Log success
|
379
465
|
success_msg = f"Successfully attached '{file_name}' to {entity_class} (ID={entity_id})"
|
380
|
-
logger.log_operation("Success", success_msg, params=None, flush_logs=True)
|
381
466
|
print(success_msg)
|
382
467
|
|
383
468
|
# Step 3: Create API link
|
384
|
-
|
469
|
+
create_attachment_link(token_data, logger, entity_class, entity_id, file_name, entity_folder)
|
385
470
|
|
386
471
|
except Exception as e:
|
387
472
|
error_msg = f"Exception while processing '{file_name}': {e}"
|
388
|
-
logger.log_operation("Error", error_msg, params=None, flush_logs=True)
|
473
|
+
logger.log_operation("Error | ORIGIN: run_main_job function", error_msg, params=None, flush_logs=True)
|
389
474
|
print(error_msg)
|
390
475
|
|
391
476
|
def local_access(remote_path):
|
@@ -397,8 +482,11 @@ def local_access(remote_path):
|
|
397
482
|
|
398
483
|
def scp_copy(source_path, ssh_user, ssh_key, remote_path):
|
399
484
|
"""Copies a file to a remote location using SCP with the correct FGCZ server address."""
|
485
|
+
print("SCP Copying...")
|
400
486
|
cmd = ["scp", "-i", ssh_key, source_path, f"{ssh_user}:{remote_path}"]
|
487
|
+
print("SCP Command:")
|
401
488
|
subprocess.run(cmd, check=True)
|
489
|
+
print("SCP Command Executed:", cmd)
|
402
490
|
print(f"Copied {source_path} to {remote_path}")
|
403
491
|
|
404
492
|
|
@@ -408,6 +496,9 @@ def ssh_move(ssh_user, ssh_key, remote_tmp_path, final_remote_path):
|
|
408
496
|
|
409
497
|
subprocess.run(cmd, check=True)
|
410
498
|
print(f"Moved {remote_tmp_path} to {final_remote_path}")
|
499
|
+
|
500
|
+
# Wait 10 second before next move
|
501
|
+
time.sleep(10)
|
411
502
|
|
412
503
|
|
413
504
|
def g_req_copy(source_path, destination_path):
|
@@ -417,8 +508,8 @@ def g_req_copy(source_path, destination_path):
|
|
417
508
|
print(f"Copied {source_path} using g-req")
|
418
509
|
|
419
510
|
|
420
|
-
def
|
421
|
-
"""Creates an
|
511
|
+
def create_attachment_link(token_data, logger, entity_class, entity_id, file_name, folder_name):
|
512
|
+
"""Creates an attachment link in B-Fabric for the attached file."""
|
422
513
|
wrapper = get_power_user_wrapper(token_data)
|
423
514
|
url = f"{URL}/{folder_name}/{file_name}"
|
424
515
|
timestamped_filename = f"{dt.now().strftime('%Y-%m-%d_%H:%M:%S')}_{file_name}"
|
@@ -433,14 +524,14 @@ def create_api_link(token_data, logger, entity_class, entity_id, file_name, fold
|
|
433
524
|
try:
|
434
525
|
link_result = wrapper.save("link", data)
|
435
526
|
if link_result:
|
436
|
-
success_msg = f"
|
437
|
-
logger.log_operation("Success", success_msg, params=None, flush_logs=True)
|
527
|
+
success_msg = f"Attachment link created for '{file_name}': {url}"
|
528
|
+
logger.log_operation("Success | ORIGIN: run_main_job function", success_msg, params=None, flush_logs=True)
|
438
529
|
print(success_msg)
|
439
530
|
else:
|
440
|
-
raise ValueError("
|
531
|
+
raise ValueError("Attachment link creation failed")
|
441
532
|
except Exception as e:
|
442
|
-
error_msg = f"Failed to create
|
443
|
-
logger.log_operation("Error", error_msg, params=None, flush_logs=True)
|
533
|
+
error_msg = f"Failed to create attachment link for '{file_name}': {e}"
|
534
|
+
logger.log_operation("Error | ORIGIN: run_main_job function", error_msg, params=None, flush_logs=True)
|
444
535
|
print(error_msg)
|
445
536
|
|
446
537
|
|
@@ -0,0 +1,22 @@
|
|
1
|
+
bfabric_web_apps/__init__.py,sha256=eRYBvXrDM8Bhdm7cbI80L1hjtbRFqUasJiBW52Hx3TI,1715
|
2
|
+
bfabric_web_apps/layouts/layouts.py,sha256=z8gL4n4wwLdpLGomO9CftBLnGpc3r6OpmUc2-wBg8uo,14661
|
3
|
+
bfabric_web_apps/objects/BfabricInterface.py,sha256=cEUcAdr4iUH8aS2VGe3CFnAbNQiTj13hYuSVcDg725A,10646
|
4
|
+
bfabric_web_apps/objects/Logger.py,sha256=62LC94xhm7YG5LUw3yH46NqvJQsAX7wnc9D4zbY16rA,5224
|
5
|
+
bfabric_web_apps/utils/app_init.py,sha256=RCdpCXp19cF74bouYJLPe-KSETZ0Vwqtd02Ta2VXEF8,428
|
6
|
+
bfabric_web_apps/utils/callbacks.py,sha256=tB1xtHl_ePY6KJWNz3erkrZw3HFhRneewGqZm9xIYtI,12687
|
7
|
+
bfabric_web_apps/utils/charging.py,sha256=oNNazH59SFkbxJKPvCel0IxdsRHC8xpJ0AXCLvI88FI,1069
|
8
|
+
bfabric_web_apps/utils/components.py,sha256=X3NRnv--LsHWMtWL83Pzr2whOZLSEJIwXTklQdAQpZE,984
|
9
|
+
bfabric_web_apps/utils/config.py,sha256=F4EExu7EkY7upOnxk6BU6zTLt9eU6_iy2y8esIlxTSc,1209
|
10
|
+
bfabric_web_apps/utils/create_app_in_bfabric.py,sha256=Z7puke8QB4SBuDJ9x3_OjgApzovKu0Nt1g8EqkOHJpc,2758
|
11
|
+
bfabric_web_apps/utils/dataset_utils.py,sha256=p_UtoOl1kJpSm2BGdg31Ji0C7ctst40wp4LX1tUe4tI,3360
|
12
|
+
bfabric_web_apps/utils/get_logger.py,sha256=0Y3SrXW93--eglS0_ZOc34NOriAt6buFPik5n0ltzRA,434
|
13
|
+
bfabric_web_apps/utils/get_power_user_wrapper.py,sha256=T33z64XjmJ0KSlmfEmrEP8eYpbpINCVD6Xld_V7PR2g,1027
|
14
|
+
bfabric_web_apps/utils/redis_connection.py,sha256=qXSPxW6m55Ogv44BhmPCl9ACuvzmpfZNU73UJhHRXL4,133
|
15
|
+
bfabric_web_apps/utils/redis_queue.py,sha256=MCx7z_I2NusJ4P42mcLvV7STtXBFMIIvun83fM8zOGI,168
|
16
|
+
bfabric_web_apps/utils/redis_worker_init.py,sha256=wtjQL48PLNXD1s-5s3Oq5EC8BmcfKcd7IhUbTH_EYz8,1014
|
17
|
+
bfabric_web_apps/utils/resource_utilities.py,sha256=N4EiUkxXHZ18jnU2OuRqaGSroCZ73Ogb9lkeA21Kvq4,5716
|
18
|
+
bfabric_web_apps/utils/run_main_pipeline.py,sha256=whmUbO9mMom9voOCv7iS51wh6St4DfawCD337BuPvtY,23112
|
19
|
+
bfabric_web_apps-0.2.0.dist-info/LICENSE,sha256=k0O_i2k13i9e35aO-j7FerJafAqzzu8x0kkBs0OWF3c,1065
|
20
|
+
bfabric_web_apps-0.2.0.dist-info/METADATA,sha256=vhqne76HPRsHA9uFhTPgP7wudTKYJq74V7GEQYR9S1I,687
|
21
|
+
bfabric_web_apps-0.2.0.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
|
22
|
+
bfabric_web_apps-0.2.0.dist-info/RECORD,,
|
@@ -1,21 +0,0 @@
|
|
1
|
-
bfabric_web_apps/__init__.py,sha256=9OSketawJ_bkCbCN0Q3KaM6_u1_zyE74oC7jtrznFP8,1503
|
2
|
-
bfabric_web_apps/layouts/layouts.py,sha256=z8gL4n4wwLdpLGomO9CftBLnGpc3r6OpmUc2-wBg8uo,14661
|
3
|
-
bfabric_web_apps/objects/BfabricInterface.py,sha256=2BNskMzV5K1a-tXFHQubcBk7Rt-8g9du7mNAJrGohMY,10170
|
4
|
-
bfabric_web_apps/objects/Logger.py,sha256=62LC94xhm7YG5LUw3yH46NqvJQsAX7wnc9D4zbY16rA,5224
|
5
|
-
bfabric_web_apps/utils/app_init.py,sha256=RCdpCXp19cF74bouYJLPe-KSETZ0Vwqtd02Ta2VXEF8,428
|
6
|
-
bfabric_web_apps/utils/callbacks.py,sha256=tB1xtHl_ePY6KJWNz3erkrZw3HFhRneewGqZm9xIYtI,12687
|
7
|
-
bfabric_web_apps/utils/charging.py,sha256=oNNazH59SFkbxJKPvCel0IxdsRHC8xpJ0AXCLvI88FI,1069
|
8
|
-
bfabric_web_apps/utils/components.py,sha256=X3NRnv--LsHWMtWL83Pzr2whOZLSEJIwXTklQdAQpZE,984
|
9
|
-
bfabric_web_apps/utils/config.py,sha256=vJzhmc6ooFb46MM1Eg3m8gNrM4fJa-l5Tao2Py-SF_I,1115
|
10
|
-
bfabric_web_apps/utils/create_app_in_bfabric.py,sha256=eVk3cQDXxW-yo9b9n_zzGO6kLg_SLxYbIDECyvEPJXU,2752
|
11
|
-
bfabric_web_apps/utils/get_logger.py,sha256=0Y3SrXW93--eglS0_ZOc34NOriAt6buFPik5n0ltzRA,434
|
12
|
-
bfabric_web_apps/utils/get_power_user_wrapper.py,sha256=T33z64XjmJ0KSlmfEmrEP8eYpbpINCVD6Xld_V7PR2g,1027
|
13
|
-
bfabric_web_apps/utils/redis_connection.py,sha256=qXSPxW6m55Ogv44BhmPCl9ACuvzmpfZNU73UJhHRXL4,133
|
14
|
-
bfabric_web_apps/utils/redis_queue.py,sha256=MCx7z_I2NusJ4P42mcLvV7STtXBFMIIvun83fM8zOGI,168
|
15
|
-
bfabric_web_apps/utils/redis_worker_init.py,sha256=9SUc9bbgBeMbUdqJD9EkWPA4wcJjvyX6Tzanv5JfqEg,691
|
16
|
-
bfabric_web_apps/utils/resource_utilities.py,sha256=4LnV_eQjKkcpZJBsWFx--dmASyE7jfJfktk2hdHn5Fk,5856
|
17
|
-
bfabric_web_apps/utils/run_main_pipeline.py,sha256=RG-Jb3-O1Ok8L0i1gddWVeAEqJT6REITtcD9XDoC_Dc,18766
|
18
|
-
bfabric_web_apps-0.1.7.dist-info/LICENSE,sha256=k0O_i2k13i9e35aO-j7FerJafAqzzu8x0kkBs0OWF3c,1065
|
19
|
-
bfabric_web_apps-0.1.7.dist-info/METADATA,sha256=Jf3A0eVEOXvoMzFJCmxpWnoGvKNnHd2ckHg4o9w6nm0,687
|
20
|
-
bfabric_web_apps-0.1.7.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
|
21
|
-
bfabric_web_apps-0.1.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|