bfabric-web-apps 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,215 +4,166 @@ from bfabric_web_apps.utils.get_power_user_wrapper import get_power_user_wrapper
4
4
  from bfabric_scripts.bfabric_upload_resource import bfabric_upload_resource
5
5
  from pathlib import Path
6
6
 
7
- def create_workunit(token_data, application_name, application_description, application_id, container_ids):
7
+ def create_workunit(token_data, application_name, application_description, application_id, container_id):
8
8
  """
9
- Create a new workunit in B-Fabric for each container ID.
9
+ Create a single workunit in B-Fabric.
10
10
 
11
11
  Args:
12
12
  token_data (dict): Authentication token data.
13
13
  application_name (str): Name of the application.
14
14
  application_description (str): Description of the application.
15
15
  application_id (int): Application ID.
16
- container_ids (list): List of container IDs.
16
+ container_id (int): Container ID (Order ID).
17
17
 
18
18
  Returns:
19
- list: List of created workunit IDs.
19
+ obj: Created workunit object or None if creation fails.
20
20
  """
21
21
  L = get_logger(token_data)
22
22
  wrapper = bfabric_interface.get_wrapper()
23
- workunit_ids = []
24
23
 
25
- # Ensure container_ids is a list
26
- if not isinstance(container_ids, list):
27
- container_ids = [container_ids] # Convert to list if single value
28
-
29
- for container_id in container_ids:
30
- workunit_data = {
31
- "name": f"{application_name} - Order {container_id}",
32
- "description": f"{application_description} for Order {container_id}",
33
- "applicationid": int(application_id),
34
- "containerid": container_id, # Assigning order ID dynamically
35
- }
36
-
37
- try:
38
- workunit_response = L.logthis(
39
- api_call=wrapper.save,
40
- endpoint="workunit",
41
- obj=workunit_data,
42
- params=None,
43
- flush_logs=True
44
- )
45
- workunit_id = workunit_response[0].get("id")
46
- print(f"Created Workunit ID: {workunit_id} for Order ID: {container_id}")
47
- workunit_ids.append(workunit_id)
24
+ workunit_data = {
25
+ "name": f"{application_name} - Order {container_id}",
26
+ "description": f"{application_description} for Order {container_id}",
27
+ "applicationid": int(application_id),
28
+ "containerid": container_id,
29
+ }
48
30
 
49
- except Exception as e:
50
- L.log_operation(
51
- "Error",
52
- f"Failed to create workunit for Order {container_id}: {e}",
53
- params=None,
54
- flush_logs=True,
55
- )
56
- print(f"Failed to create workunit for Order {container_id}: {e}")
31
+ try:
32
+ workunit_response = L.logthis(
33
+ api_call=wrapper.save,
34
+ endpoint="workunit",
35
+ obj=workunit_data,
36
+ params=None,
37
+ flush_logs=True
38
+ )
39
+ workunit_id = workunit_response[0].get("id")
40
+ print(f"Created Workunit ID: {workunit_id} for Order ID: {container_id}")
57
41
 
58
- return workunit_ids # Returning a list of all created workunits
42
+ # First we get the existing workunit_ids for the current job object:
43
+ pre_existing_workunit_ids = [elt.get("id") for elt in wrapper.read("job", {"id": token_data.get("jobId")})[0].get("workunit", [])]
44
+
45
+ # Now we associate the job object with the workunits
46
+ job = L.logthis(
47
+ api_call=L.power_user_wrapper.save,
48
+ endpoint="job",
49
+ obj={"id": token_data.get("jobId"), "workunitid": [workunit_id] + pre_existing_workunit_ids},
50
+ params=None,
51
+ flush_logs=True
52
+ )
53
+ return workunit_response[0]
59
54
 
55
+ except Exception as e:
56
+ L.log_operation(
57
+ "Error",
58
+ f"Failed to create workunit for Order {container_id}: {e}",
59
+ params=None,
60
+ flush_logs=True,
61
+ )
62
+ print(f"Failed to create workunit for Order {container_id}: {e}")
63
+ return None
60
64
 
61
- def create_resource(token_data, workunit_id, gz_file_path):
65
+
66
+ def create_workunits(token_data, application_name, application_description, application_id, container_ids):
62
67
  """
63
- Upload a .gz resource to an existing B-Fabric workunit.
68
+ Create multiple workunits in B-Fabric.
69
+
70
+ Args:
71
+ token_data (dict): Authentication token data.
72
+ application_name (str): Name of the application.
73
+ application_description (str): Description of the application.
74
+ application_id (int): Application ID.
75
+ container_ids (list): List of container IDs.
64
76
 
77
+ Returns:
78
+ list[obj]: List of created workunit objects.
79
+ """
80
+ if not isinstance(container_ids, list):
81
+ container_ids = [container_ids] # Ensure it's a list
82
+
83
+ workunits = [
84
+ create_workunit(token_data, application_name, application_description, application_id, container_id)
85
+ for container_id in container_ids
86
+ ]
87
+
88
+ return [wu_id for wu_id in workunits if wu_id is not None] # Filter out None values
89
+
90
+
91
+ from pathlib import Path
92
+
93
+ def create_resource(token_data, workunit_id, file_path, storage_id="20"): # GWC Server is storage id 20.
94
+ """
95
+ Attach a single file as a resource to an existing B-Fabric workunit.
96
+
65
97
  Args:
66
98
  token_data (dict): Authentication token data.
67
99
  workunit_id (int): ID of the workunit to associate the resource with.
68
- gz_file_path (str): Full path to the .gz file to upload.
100
+ file_path (str): Full path to the file to attach.
69
101
 
70
102
  Returns:
71
- int: Resource ID if successful, None otherwise.
103
+ obj: Resource object if successful, None otherwise.
72
104
  """
73
105
  L = get_logger(token_data)
74
106
  wrapper = get_power_user_wrapper(token_data)
75
107
 
76
108
  try:
77
- file_path = Path(gz_file_path)
78
-
79
- # Use the proper upload function
80
- print("test", wrapper, file_path, workunit_id)
81
- result = bfabric_upload_resource(wrapper, file_path, workunit_id)
109
+ file_path = Path(file_path)
110
+
111
+ # Attaching the resource
112
+ print(f"Attaching: {file_path.name} to workunit: {workunit_id}")
113
+
114
+ result = wrapper.save(
115
+ endpoint="resource",
116
+ obj={
117
+ "workunitid": str(workunit_id),
118
+ "name": file_path.name,
119
+ "description": f"Resource attached to workunit {workunit_id}",
120
+ "relativepath": file_path.name,
121
+ "storageid": str(storage_id),
122
+ }
123
+ )
82
124
 
83
125
  if result:
84
- print(f"Resource uploaded: {file_path.name}")
126
+ resource_id = result[0].get("id")
127
+ print(f"Resource attached: {file_path.name} (ID: {resource_id})")
85
128
  L.log_operation(
86
- "upload_resource",
87
- f"Resource uploaded successfully: {file_path.name}",
129
+ "Attach_resource",
130
+ f"Resource attached successfully: {file_path.name}",
88
131
  params=None,
89
132
  flush_logs=True,
90
133
  )
91
- return result
134
+ return result[0]
92
135
  else:
93
- raise ValueError(f"Failed to upload resource: {file_path.name}")
136
+ raise ValueError(f"Failed to attach resource: {file_path.name}")
94
137
 
95
138
  except Exception as e:
96
139
  L.log_operation(
97
140
  "error",
98
- f"Failed to upload resource: {e}",
141
+ f"Failed to attach resource: {e}",
99
142
  params=None,
100
143
  flush_logs=True,
101
144
  )
102
- print(f"Failed to upload resource: {e}")
145
+ print(f"Failed to attach resource: {e}")
103
146
  return None
104
147
 
105
148
 
106
-
107
- '''
108
-
109
-
110
-
111
- # Upload a resource to the created workunit
112
- resource_name = "example_resource.txt"
113
- resource_content = b"This is an example resource content."
114
-
115
- try:
116
- resource_response = bfabric.upload_resource(
117
- resource_name=resource_name,
118
- content=resource_content,
119
- workunit_id=workunit_id
120
- )
121
- print(f"Resource '{resource_name}' uploaded successfully.")
122
- except Exception as e:
123
- print(f"Failed to upload resource: {e}")
124
- exit(1)
125
-
126
-
127
-
128
-
129
-
130
-
131
-
132
-
133
-
134
- import subprocess
135
- from zeep import Client
136
- import os
137
- from bfabric_web_apps.utils.get_logger import get_logger
138
-
139
- BFABRIC_WORKUNIT_WSDL = "https://fgcz-bfabric-test.uzh.ch:443/bfabric/workunit?wsdl"
140
- BFABRIC_RESOURCE_WSDL = "https://fgcz-bfabric-test.uzh.ch:443/bfabric/resource?wsdl"
141
-
142
- def run_pipeline_and_register_in_bfabric(run_name: str, output_dir: str):
149
+ def create_resources(token_data, workunit_id, file_paths):
143
150
  """
144
- Startet die Nextflow-Pipeline und speichert die Ergebnisse in B-Fabric.
151
+ Attach multiple files as resources to an existing B-Fabric workunit.
152
+
153
+ Args:
154
+ token_data (dict): Authentication token data.
155
+ workunit_id (int): ID of the workunit to associate the resources with.
156
+ file_paths (list): List of full paths to files to attach.
145
157
 
146
- :param run_name: Name des Sequenzierungslaufs
147
- :param output_dir: Verzeichnis, in dem die FASTQ-Dateien gespeichert werden
158
+ Returns:
159
+ list[obj]: List of successfully attached resource objects.
148
160
  """
149
- print(f"[INFO] Starte Nextflow-Pipeline für {run_name}...")
150
-
151
- # Nextflow Pipeline starten
152
- process = subprocess.run([
153
- "nextflow", "run", "nf-core/bclconvert",
154
- "-profile", "docker",
155
- "--outdir", output_dir,
156
- "-resume"
157
- ], capture_output=True, text=True)
158
-
159
- if process.returncode != 0:
160
- print(f"[ERROR] Nextflow Pipeline fehlgeschlagen: {process.stderr}")
161
- return False
162
-
163
- print(f"[INFO] Pipeline abgeschlossen. Ergebnisse werden registriert...")
164
-
165
- # Workunit in B-Fabric anlegen
166
- workunit_id = create_bfabric_workunit(run_name)
167
-
168
- # Falls Workunit erfolgreich erstellt, dann Ressourcen speichern
169
- if workunit_id:
170
- register_fastq_files_in_bfabric(output_dir, workunit_id)
171
- else:
172
- print("[ERROR] Konnte Workunit nicht in B-Fabric registrieren!")
173
-
174
- return True
161
+ if not isinstance(file_paths, list):
162
+ file_paths = [file_paths] # Ensure it's a list
175
163
 
176
- def create_bfabric_workunit(run_name: str):
177
- """Erstellt eine Workunit in B-Fabric."""
178
- try:
179
- client = Client(BFABRIC_WORKUNIT_WSDL)
180
- workunit_data = {
181
- "name": run_name,
182
- "description": "Illumina BCL zu FASTQ Konvertierung",
183
- "status": "Completed"
184
- }
185
- L = get_logger({})
186
- response = L.logthis(
187
- api_call=client.service.createWorkunit,
188
- obj=workunit_data
189
- )[0]
190
- print(f"[INFO] Workunit erstellt mit ID: {response}")
191
- return response
192
- except Exception as e:
193
- print(f"[ERROR] Fehler beim Erstellen der Workunit: {e}")
194
- return None
164
+ resources = [
165
+ create_resource(token_data, workunit_id, file_path)
166
+ for file_path in file_paths
167
+ ]
195
168
 
196
- def register_fastq_files_in_bfabric(output_dir: str, workunit_id: int):
197
- """Registriert alle FASTQ-Dateien aus dem Output-Verzeichnis in B-Fabric."""
198
- try:
199
- client = Client(BFABRIC_RESOURCE_WSDL)
200
- L = get_logger({})
201
- for file_name in os.listdir(output_dir):
202
- if file_name.endswith(".fastq.gz"):
203
- file_path = os.path.join(output_dir, file_name)
204
- resource_data = {
205
- "name": file_name,
206
- "description": "Erzeugt von nf-core/bclconvert",
207
- "path": file_path,
208
- "type": "FASTQ",
209
- "workunitId": workunit_id
210
- }
211
- response = L.logthis(
212
- api_call=client.service.createResource,
213
- obj=resource_data
214
- )[0]
215
- print(f"[INFO] Ressource gespeichert mit ID: {response}")
216
- except Exception as e:
217
- print(f"[ERROR] Fehler beim Registrieren der Ressourcen: {e}")
218
- '''
169
+ return [res_id for res_id in resources if res_id is not None] # Filter out None values