cloudos-cli 2.66.2__tar.gz → 2.68.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/PKG-INFO +110 -1
  2. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/README.md +109 -0
  3. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/__main__.py +44 -0
  4. cloudos_cli-2.68.0/cloudos_cli/_version.py +1 -0
  5. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/clos.py +40 -2
  6. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/jobs/job.py +162 -0
  7. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/link/link.py +118 -4
  8. cloudos_cli-2.68.0/cloudos_cli/related_analyses/__init__.py +8 -0
  9. cloudos_cli-2.68.0/cloudos_cli/related_analyses/related_analyses.py +251 -0
  10. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli.egg-info/PKG-INFO +110 -1
  11. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli.egg-info/SOURCES.txt +5 -1
  12. cloudos_cli-2.68.0/tests/test_related_analyses/__init__.py +1 -0
  13. cloudos_cli-2.68.0/tests/test_related_analyses/test_related_analyses.py +204 -0
  14. cloudos_cli-2.66.2/cloudos_cli/_version.py +0 -1
  15. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/LICENSE +0 -0
  16. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/__init__.py +0 -0
  17. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/configure/__init__.py +0 -0
  18. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/configure/configure.py +0 -0
  19. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/cost/__init__.py +0 -0
  20. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/cost/cost.py +0 -0
  21. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/datasets/__init__.py +0 -0
  22. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/datasets/datasets.py +0 -0
  23. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/import_wf/__init__.py +0 -0
  24. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/import_wf/import_wf.py +0 -0
  25. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/jobs/__init__.py +0 -0
  26. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/link/__init__.py +0 -0
  27. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/logging/__init__.py +0 -0
  28. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/logging/logger.py +0 -0
  29. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/procurement/__init__.py +0 -0
  30. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/procurement/images.py +0 -0
  31. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/queue/__init__.py +0 -0
  32. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/queue/queue.py +0 -0
  33. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/__init__.py +0 -0
  34. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/array_job.py +0 -0
  35. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/cloud.py +0 -0
  36. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/details.py +0 -0
  37. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/errors.py +0 -0
  38. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/last_wf.py +0 -0
  39. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/requests.py +0 -0
  40. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli/utils/resources.py +0 -0
  41. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli.egg-info/dependency_links.txt +0 -0
  42. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli.egg-info/entry_points.txt +0 -0
  43. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli.egg-info/requires.txt +0 -0
  44. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/cloudos_cli.egg-info/top_level.txt +0 -0
  45. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/setup.cfg +0 -0
  46. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/setup.py +0 -0
  47. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/tests/__init__.py +0 -0
  48. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/tests/functions_for_pytest.py +0 -0
  49. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/tests/test_cli_project_create.py +0 -0
  50. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/tests/test_cost/__init__.py +0 -0
  51. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/tests/test_cost/test_job_cost.py +0 -0
  52. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/tests/test_logging/__init__.py +0 -0
  53. {cloudos_cli-2.66.2 → cloudos_cli-2.68.0}/tests/test_logging/test_logger.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudos_cli
3
- Version: 2.66.2
3
+ Version: 2.68.0
4
4
  Summary: Python package for interacting with CloudOS
5
5
  Home-page: https://github.com/lifebit-ai/cloudos-cli
6
6
  Author: David Piñeyro
@@ -80,6 +80,7 @@ Python package for interacting with CloudOS
80
80
  - [Get Job Workdir](#get-job-workdir)
81
81
  - [List Jobs](#list-jobs)
82
82
  - [Get Job Costs](#get-job-costs)
83
+ - [Get Job Related Analyses](#get-job-related-analyses)
83
84
  - [Bash Jobs](#bash-jobs)
84
85
  - [Send Array Job](#send-array-job)
85
86
  - [Submit a Bash Array Job](#submit-a-bash-array-job)
@@ -1198,6 +1199,114 @@ cat 62c83a1191fe06013b7ef355_costs.json
1198
1199
 
1199
1200
  ```
1200
1201
 
1202
+ #### Get Job Related Analyses
1203
+
1204
+ You can view related jobs that share the same working directory in a CloudOS workspace by using the `job related` command. This feature helps track job lineages, resume workflows, and understand job relationships.
1205
+
1206
+ The information is retrieved from CloudOS and can be displayed in multiple formats:
1207
+
1208
+ - **Console display**: Rich formatted tables with pagination
1209
+ - **JSON**: Complete job data for programmatic processing
1210
+
1211
+ To get related analyses for a specific job:
1212
+
1213
+ ```bash
1214
+ cloudos job related --profile my_profile --job-id 66b5e5ded52f33061e2468d5
1215
+ ```
1216
+
1217
+ The expected output is a formatted table showing:
1218
+
1219
+ ```console
1220
+ Total related analyses found: 15
1221
+
1222
+ Related Analyses
1223
+ ┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━┓
1224
+ ┃ Status ┃ Name ┃ Owner ┃ ID ┃ Submit time ┃ Run time ┃ Total Cost ┃
1225
+ ┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━┩
1226
+ │ completed │ workflow_analysis_v1 │ John Smith │ 66b5e5ded52f33061e2468d5 │ 2024-08-09 14:23:10 │ 45m 12s │ $1.2340 │
1227
+ │ completed │ workflow_analysis_v1_resumed │ John Smith │ 66b6f2a1e52f33061e246abc │ 2024-08-10 09:15:22 │ 12m 5s │ $0.3210 │
1228
+ │ running │ workflow_analysis_v2 │ Jane Doe │ 66b7a3b2f52f33061e246def │ 2024-08-11 11:30:45 │ 5m 30s │ $0.1150 │
1229
+ │ failed │ workflow_analysis_test │ John Smith │ 66b8c4d3g52f33061e246ghi │ 2024-08-12 16:42:18 │ 2m 15s │ $0.0450 │
1230
+ │ completed │ workflow_downstream_processing │ Jane Doe │ 66b9d5e4h52f33061e246jkl │ 2024-08-13 08:20:33 │ 28m 40s │ $0.7890 │
1231
+ │ aborted │ workflow_analysis_v1_test2 │ John Smith │ 66bae6f5i52f33061e246mno │ 2024-08-14 13:55:07 │ 1m 8s │ $0.0120 │
1232
+ │ completed │ workflow_final_results │ Jane Doe │ 66bbf807j52f33061e246pqr │ 2024-08-15 10:12:44 │ 18m 22s │ $0.5670 │
1233
+ │ queued │ workflow_reanalysis │ John Smith │ 66bcd918k52f33061e246stu │ 2024-08-16 15:38:19 │ N/A │ N/A │
1234
+ │ completed │ workflow_quality_control │ Jane Doe │ 66bdea29l52f33061e246vwx │ 2024-08-17 07:45:52 │ 8m 15s │ $0.2340 │
1235
+ │ completed │ workflow_variant_calling │ John Smith │ 66befb3am52f33061e246yz1 │ 2024-08-18 12:03:28 │ 55m 48s │ $1.5620 │
1236
+ └───────────┴────────────────────────────────────┴────────────────┴──────────────────────────┴─────────────────────┴───────────┴────────────┘
1237
+ On page 1/2: n = next, p = prev, q = quit
1238
+ ```
1239
+
1240
+ The table displays key information for each related job:
1241
+ - **Status**: Current job state (initializing, running, completed, aborting, aborted, failed)
1242
+ - **Name**: Job name assigned when submitted
1243
+ - **Owner**: User who submitted the job (first name and last name)
1244
+ - **ID**: Job identifier in CloudOS
1245
+ - **Submit time**: When the job was submitted (formatted as YYYY-MM-DD HH:MM:SS)
1246
+ - **Run time**: Actual execution time (formatted as hours, minutes, seconds)
1247
+ - **Total Cost**: Compute cost in USD
1248
+
1249
+ **Pagination controls:**
1250
+ - Press `n` to navigate to the next page
1251
+ - Press `p` to navigate to the previous page
1252
+ - Press `q` to quit and return to the terminal
1253
+
1254
+ The console automatically clears between pages for a clean viewing experience, displaying 10 jobs per page.
1255
+
1256
+ **Export options:**
1257
+
1258
+ Save related analyses to JSON for programmatic analysis:
1259
+
1260
+ ```bash
1261
+ cloudos job related --profile my_profile --job-id 66b5e5ded52f33061e2468d5 --output-format json
1262
+
1263
+ cat related_analyses.json
1264
+ {
1265
+ "66b5e5ded52f33061e2468d5": {
1266
+ "status": "completed",
1267
+ "name": "workflow_analysis_v1",
1268
+ "user_name": "John",
1269
+ "user_surname": "Smith",
1270
+ "_id": "66b5e5ded52f33061e2468d5",
1271
+ "createdAt": "2024-08-09T14:23:10.000Z",
1272
+ "runTime": 2712,
1273
+ "computeCostSpent": 123400
1274
+ },
1275
+ "66b6f2a1e52f33061e246abc": {
1276
+ "status": "completed",
1277
+ "name": "workflow_analysis_v1_resumed",
1278
+ "user_name": "John",
1279
+ "user_surname": "Smith",
1280
+ "_id": "66b6f2a1e52f33061e246abc",
1281
+ "createdAt": "2024-08-10T09:15:22.000Z",
1282
+ "runTime": 725,
1283
+ "computeCostSpent": 32100
1284
+ },
1285
+ ...
1286
+ }
1287
+ ```
1288
+
1289
+ The JSON format includes:
1290
+ - `status`: Job execution status
1291
+ - `name`: Job name
1292
+ - `user_name` and `user_surname`: Owner information
1293
+ - `_id`: Job identifier
1294
+ - `createdAt`: ISO 8601 timestamp of job submission
1295
+ - `runTime`: Execution time in seconds
1296
+ - `computeCostSpent`: Total cost in cents (divide by 100 for dollars)
1297
+
1298
+ **Use cases:**
1299
+
1300
+ Related analyses are particularly useful for:
1301
+ - **Resumed workflows**: Find previous jobs to continue from checkpoints
1302
+ - **Job lineage tracking**: Understand which jobs are part of the same analysis
1303
+ - **Cost analysis**: Compare costs across related jobs
1304
+ - **Debugging**: Identify failed jobs in a workflow series
1305
+ - **Collaboration**: See all jobs from team members working on shared data
1306
+
1307
+ > [!NOTE]
1308
+ > Related jobs are identified by their shared working directory folder ID. Only jobs within the same workspace that use the same working directory will be displayed.
1309
+
1201
1310
  ### Bash Jobs
1202
1311
  Execute bash scripts on CloudOS for custom processing workflows. Bash jobs allow you to run shell commands with custom parameters and are ideal for data preprocessing or simple computational tasks.
1203
1312
 
@@ -45,6 +45,7 @@ Python package for interacting with CloudOS
45
45
  - [Get Job Workdir](#get-job-workdir)
46
46
  - [List Jobs](#list-jobs)
47
47
  - [Get Job Costs](#get-job-costs)
48
+ - [Get Job Related Analyses](#get-job-related-analyses)
48
49
  - [Bash Jobs](#bash-jobs)
49
50
  - [Send Array Job](#send-array-job)
50
51
  - [Submit a Bash Array Job](#submit-a-bash-array-job)
@@ -1163,6 +1164,114 @@ cat 62c83a1191fe06013b7ef355_costs.json
1163
1164
 
1164
1165
  ```
1165
1166
 
1167
+ #### Get Job Related Analyses
1168
+
1169
+ You can view related jobs that share the same working directory in a CloudOS workspace by using the `job related` command. This feature helps track job lineages, resume workflows, and understand job relationships.
1170
+
1171
+ The information is retrieved from CloudOS and can be displayed in multiple formats:
1172
+
1173
+ - **Console display**: Rich formatted tables with pagination
1174
+ - **JSON**: Complete job data for programmatic processing
1175
+
1176
+ To get related analyses for a specific job:
1177
+
1178
+ ```bash
1179
+ cloudos job related --profile my_profile --job-id 66b5e5ded52f33061e2468d5
1180
+ ```
1181
+
1182
+ The expected output is a formatted table showing:
1183
+
1184
+ ```console
1185
+ Total related analyses found: 15
1186
+
1187
+ Related Analyses
1188
+ ┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━┓
1189
+ ┃ Status ┃ Name ┃ Owner ┃ ID ┃ Submit time ┃ Run time ┃ Total Cost ┃
1190
+ ┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━┩
1191
+ │ completed │ workflow_analysis_v1 │ John Smith │ 66b5e5ded52f33061e2468d5 │ 2024-08-09 14:23:10 │ 45m 12s │ $1.2340 │
1192
+ │ completed │ workflow_analysis_v1_resumed │ John Smith │ 66b6f2a1e52f33061e246abc │ 2024-08-10 09:15:22 │ 12m 5s │ $0.3210 │
1193
+ │ running │ workflow_analysis_v2 │ Jane Doe │ 66b7a3b2f52f33061e246def │ 2024-08-11 11:30:45 │ 5m 30s │ $0.1150 │
1194
+ │ failed │ workflow_analysis_test │ John Smith │ 66b8c4d3g52f33061e246ghi │ 2024-08-12 16:42:18 │ 2m 15s │ $0.0450 │
1195
+ │ completed │ workflow_downstream_processing │ Jane Doe │ 66b9d5e4h52f33061e246jkl │ 2024-08-13 08:20:33 │ 28m 40s │ $0.7890 │
1196
+ │ aborted │ workflow_analysis_v1_test2 │ John Smith │ 66bae6f5i52f33061e246mno │ 2024-08-14 13:55:07 │ 1m 8s │ $0.0120 │
1197
+ │ completed │ workflow_final_results │ Jane Doe │ 66bbf807j52f33061e246pqr │ 2024-08-15 10:12:44 │ 18m 22s │ $0.5670 │
1198
+ │ queued │ workflow_reanalysis │ John Smith │ 66bcd918k52f33061e246stu │ 2024-08-16 15:38:19 │ N/A │ N/A │
1199
+ │ completed │ workflow_quality_control │ Jane Doe │ 66bdea29l52f33061e246vwx │ 2024-08-17 07:45:52 │ 8m 15s │ $0.2340 │
1200
+ │ completed │ workflow_variant_calling │ John Smith │ 66befb3am52f33061e246yz1 │ 2024-08-18 12:03:28 │ 55m 48s │ $1.5620 │
1201
+ └───────────┴────────────────────────────────────┴────────────────┴──────────────────────────┴─────────────────────┴───────────┴────────────┘
1202
+ On page 1/2: n = next, p = prev, q = quit
1203
+ ```
1204
+
1205
+ The table displays key information for each related job:
1206
+ - **Status**: Current job state (initializing, running, completed, aborting, aborted, failed)
1207
+ - **Name**: Job name assigned when submitted
1208
+ - **Owner**: User who submitted the job (first name and last name)
1209
+ - **ID**: Job identifier in CloudOS
1210
+ - **Submit time**: When the job was submitted (formatted as YYYY-MM-DD HH:MM:SS)
1211
+ - **Run time**: Actual execution time (formatted as hours, minutes, seconds)
1212
+ - **Total Cost**: Compute cost in USD
1213
+
1214
+ **Pagination controls:**
1215
+ - Press `n` to navigate to the next page
1216
+ - Press `p` to navigate to the previous page
1217
+ - Press `q` to quit and return to the terminal
1218
+
1219
+ The console automatically clears between pages for a clean viewing experience, displaying 10 jobs per page.
1220
+
1221
+ **Export options:**
1222
+
1223
+ Save related analyses to JSON for programmatic analysis:
1224
+
1225
+ ```bash
1226
+ cloudos job related --profile my_profile --job-id 66b5e5ded52f33061e2468d5 --output-format json
1227
+
1228
+ cat related_analyses.json
1229
+ {
1230
+ "66b5e5ded52f33061e2468d5": {
1231
+ "status": "completed",
1232
+ "name": "workflow_analysis_v1",
1233
+ "user_name": "John",
1234
+ "user_surname": "Smith",
1235
+ "_id": "66b5e5ded52f33061e2468d5",
1236
+ "createdAt": "2024-08-09T14:23:10.000Z",
1237
+ "runTime": 2712,
1238
+ "computeCostSpent": 123400
1239
+ },
1240
+ "66b6f2a1e52f33061e246abc": {
1241
+ "status": "completed",
1242
+ "name": "workflow_analysis_v1_resumed",
1243
+ "user_name": "John",
1244
+ "user_surname": "Smith",
1245
+ "_id": "66b6f2a1e52f33061e246abc",
1246
+ "createdAt": "2024-08-10T09:15:22.000Z",
1247
+ "runTime": 725,
1248
+ "computeCostSpent": 32100
1249
+ },
1250
+ ...
1251
+ }
1252
+ ```
1253
+
1254
+ The JSON format includes:
1255
+ - `status`: Job execution status
1256
+ - `name`: Job name
1257
+ - `user_name` and `user_surname`: Owner information
1258
+ - `_id`: Job identifier
1259
+ - `createdAt`: ISO 8601 timestamp of job submission
1260
+ - `runTime`: Execution time in seconds
1261
+ - `computeCostSpent`: Total cost in cents (divide by 100 for dollars)
1262
+
1263
+ **Use cases:**
1264
+
1265
+ Related analyses are particularly useful for:
1266
+ - **Resumed workflows**: Find previous jobs to continue from checkpoints
1267
+ - **Job lineage tracking**: Understand which jobs are part of the same analysis
1268
+ - **Cost analysis**: Compare costs across related jobs
1269
+ - **Debugging**: Identify failed jobs in a workflow series
1270
+ - **Collaboration**: See all jobs from team members working on shared data
1271
+
1272
+ > [!NOTE]
1273
+ > Related jobs are identified by their shared working directory folder ID. Only jobs within the same workspace that use the same working directory will be displayed.
1274
+
1166
1275
  ### Bash Jobs
1167
1276
  Execute bash scripts on CloudOS for custom processing workflows. Bash jobs allow you to run shell commands with custom parameters and are ideal for data preprocessing or simple computational tasks.
1168
1277
 
@@ -30,6 +30,7 @@ from cloudos_cli.configure.configure import (
30
30
  get_shared_config,
31
31
  CLOUDOS_URL
32
32
  )
33
+ from cloudos_cli.related_analyses.related_analyses import related_analyses
33
34
 
34
35
 
35
36
  # GLOBAL VARS
@@ -1385,6 +1386,49 @@ def job_cost(ctx,
1385
1386
  cost_viewer.display_costs(job_id, workspace_id, output_format, verify_ssl)
1386
1387
 
1387
1388
 
1389
+ @job.command('related')
1390
+ @click.option('-k',
1391
+ '--apikey',
1392
+ help='Your CloudOS API key',
1393
+ required=True)
1394
+ @click.option('-c',
1395
+ '--cloudos-url',
1396
+ help=(f'The CloudOS url you are trying to access to. Default={CLOUDOS_URL}.'),
1397
+ default=CLOUDOS_URL,
1398
+ required=True)
1399
+ @click.option('--workspace-id',
1400
+ help='The specific CloudOS workspace id.',
1401
+ required=True)
1402
+ @click.option('--job-id',
1403
+ help='The job id in CloudOS to get costs for.',
1404
+ required=True)
1405
+ @click.option('--output-format',
1406
+ help='The desired output format. Default=stdout.',
1407
+ type=click.Choice(['stdout', 'json'], case_sensitive=False),
1408
+ default='stdout')
1409
+ @click.option('--disable-ssl-verification',
1410
+ help=('Disable SSL certificate verification. Please, remember that this option is ' +
1411
+ 'not generally recommended for security reasons.'),
1412
+ is_flag=True)
1413
+ @click.option('--ssl-cert',
1414
+ help='Path to your SSL certificate file.')
1415
+ @click.option('--profile', help='Profile to use from the config file', default=None)
1416
+ @click.pass_context
1417
+ @with_profile_config(required_params=['apikey', 'workspace_id'])
1418
+ def related(ctx,
1419
+ apikey,
1420
+ cloudos_url,
1421
+ workspace_id,
1422
+ job_id,
1423
+ output_format,
1424
+ disable_ssl_verification,
1425
+ ssl_cert,
1426
+ profile):
1427
+ """Retrieve related job analyses in CloudOS."""
1428
+ verify_ssl = ssl_selector(disable_ssl_verification, ssl_cert)
1429
+ related_analyses(cloudos_url, apikey, job_id, workspace_id, output_format, verify_ssl)
1430
+
1431
+
1388
1432
  @click.command(help='Clone or resume a job with modified parameters')
1389
1433
  @click.option('-k',
1390
1434
  '--apikey',
@@ -0,0 +1 @@
1
+ __version__ = '2.68.0'
@@ -224,8 +224,44 @@ class Cloudos:
224
224
  if "resumeWorkDir" not in r_json:
225
225
  raise ValueError("Working directories are not available. This may be because the analysis was run without resumable mode enabled, or because intermediate results have since been removed.")
226
226
 
227
- # Check if logs field exists, if not fall back to original folder-based approach
228
- elif "logs" in r_json:
227
+ # If resumeWorkDir exists, use the folders API to get the shared working directory
228
+ resume_workdir_id = r_json.get("resumeWorkDir")
229
+ if resume_workdir_id:
230
+ try:
231
+ # Use folders API to get the actual shared working directory
232
+ workdir_bucket_r = retry_requests_get(f"{cloudos_url}/api/v1/folders",
233
+ params=dict(id=resume_workdir_id, teamId=workspace_id),
234
+ headers=headers, verify=verify)
235
+ if workdir_bucket_r.status_code == 401:
236
+ raise NotAuthorisedException
237
+ elif workdir_bucket_r.status_code >= 400:
238
+ raise BadRequestException(workdir_bucket_r)
239
+
240
+ workdir_bucket_o = workdir_bucket_r.json()
241
+ if len(workdir_bucket_o) > 1:
242
+ raise ValueError(f"Request returned more than one result for folder id {resume_workdir_id}")
243
+ workdir_bucket_info = workdir_bucket_o[0]
244
+
245
+ if workdir_bucket_info["folderType"] == "S3Folder":
246
+ bucket_name = workdir_bucket_info["s3BucketName"]
247
+ bucket_path = workdir_bucket_info["s3Prefix"]
248
+ workdir_path = f"s3://{bucket_name}/{bucket_path}"
249
+ elif workdir_bucket_info["folderType"] == "AzureBlobFolder":
250
+ storage_account = f"az://{workspace_id}.blob.core.windows.net"
251
+ container_name = workdir_bucket_info["blobContainerName"]
252
+ blob_prefix = workdir_bucket_info["blobPrefix"]
253
+ workdir_path = f"{storage_account}/{container_name}/{blob_prefix}"
254
+ else:
255
+ raise ValueError("Unsupported cloud provider")
256
+
257
+ return workdir_path
258
+ except Exception as e:
259
+ # If folders API fails, fall back to logs-based approach
260
+ print(f"Warning: Could not get shared workdir from folders API: {e}")
261
+ pass
262
+
263
+ # Check if logs field exists for fallback approach
264
+ if "logs" in r_json:
229
265
  # Get workdir information from logs object using the same pattern as get_job_logs
230
266
  logs_obj = r_json["logs"]
231
267
  cloud_name, cloud_meta, cloud_storage = find_cloud(self.cloudos_url, self.apikey, workspace_id, logs_obj)
@@ -247,6 +283,7 @@ class Cloudos:
247
283
  workdir_path = f"{storage_account_prefix}/{logs_bucket}/{workdir_path_suffix}"
248
284
  else:
249
285
  raise ValueError("Unsupported cloud provider")
286
+
250
287
  return workdir_path
251
288
  else:
252
289
  # Fallback to original folder-based approach for backward compatibility
@@ -283,6 +320,7 @@ class Cloudos:
283
320
  workdir_path = f"{storage_account}/{container_name}/{blob_prefix}"
284
321
  else:
285
322
  raise ValueError("Unsupported cloud provider")
323
+
286
324
  return workdir_path
287
325
 
288
326
  def _handle_job_access_denied(self, job_id, workspace_id, verify=True):
@@ -13,6 +13,7 @@ import base64
13
13
  from cloudos_cli.utils.array_job import classify_pattern, get_file_or_folder_id, extract_project
14
14
  import os
15
15
  import click
16
+ from datetime import datetime
16
17
 
17
18
 
18
19
  @dataclass
@@ -1265,3 +1266,164 @@ class Job(Cloudos):
1265
1266
  else:
1266
1267
  return obj
1267
1268
 
1269
+ def get_job_relatedness(self, workspace_id, workdir_folder_id, limit=100, verify=True):
1270
+ """Get ALL related jobs that share the same working directory folder.
1271
+
1272
+ This method retrieves all jobs sharing the same working directory folder,
1273
+ using pagination internally to fetch all results from the API.
1274
+
1275
+ Parameters
1276
+ ----------
1277
+ workspace_id : str
1278
+ The CloudOS workspace ID.
1279
+ workdir_folder_id : str
1280
+ The working directory folder ID to filter jobs by.
1281
+ limit : int
1282
+ Batch size for API requests (default: 100). This parameter is kept
1283
+ for backwards compatibility but fetches all jobs regardless.
1284
+ verify : [bool | str], optional
1285
+ Whether to use SSL verification or not. Alternatively, if
1286
+ a string is passed, it will be interpreted as the path to
1287
+ the SSL certificate file. Default is True.
1288
+
1289
+ Returns
1290
+ -------
1291
+ dict
1292
+ A dictionary where keys are job IDs and values are dictionaries
1293
+ containing job details: status, name, user name, user surname,
1294
+ _id, createdAt, runTime, and computeCostSpent.
1295
+
1296
+ Raises
1297
+ ------
1298
+ BadRequestException
1299
+ If the request fails with a status code indicating an error.
1300
+ """
1301
+ headers = {
1302
+ "Content-type": "application/json",
1303
+ "apikey": self.apikey
1304
+ }
1305
+
1306
+ # Fetch ALL related jobs using pagination
1307
+ all_jobs = []
1308
+ current_page = 1
1309
+ batch_size = limit # API request batch size
1310
+
1311
+ while True:
1312
+ params = {
1313
+ "limit": batch_size,
1314
+ "page": current_page,
1315
+ "sort": "-createdAt",
1316
+ "archived.status": "false",
1317
+ "workDirectory.folderId": workdir_folder_id,
1318
+ "teamId": workspace_id
1319
+ }
1320
+
1321
+ url = f"{self.cloudos_url}/api/v2/jobs"
1322
+ response = retry_requests_get(url, params=params, headers=headers, verify=verify)
1323
+
1324
+ if response.status_code >= 400:
1325
+ raise BadRequestException(response)
1326
+
1327
+ content = json.loads(response.content)
1328
+ jobs = content.get("jobs", [])
1329
+
1330
+ if not jobs:
1331
+ break # No more jobs to fetch
1332
+
1333
+ all_jobs.extend(jobs)
1334
+
1335
+ if len(jobs) < batch_size:
1336
+ break # Last page reached (fewer jobs than batch size)
1337
+
1338
+ current_page += 1
1339
+
1340
+ # Create final content with all fetched jobs
1341
+ content = {"jobs": all_jobs}
1342
+
1343
+ # Process the jobs and extract the required fields
1344
+ related_jobs = {}
1345
+
1346
+ for job in content.get("jobs", []):
1347
+ job_id = job.get("_id")
1348
+ if job_id:
1349
+ # Calculate runtime if both startTime and endTime exist
1350
+ run_time = None
1351
+ start_time = job.get("startTime")
1352
+ end_time = job.get("endTime")
1353
+ if start_time and end_time:
1354
+ try:
1355
+ start_dt = datetime.fromisoformat(start_time.replace('Z', '+00:00'))
1356
+ end_dt = datetime.fromisoformat(end_time.replace('Z', '+00:00'))
1357
+ run_time = (end_dt - start_dt).total_seconds()
1358
+ except (ValueError, AttributeError):
1359
+ run_time = None
1360
+
1361
+ # Extract user information
1362
+ user_info = job.get("user", {})
1363
+
1364
+ related_jobs[job_id] = {
1365
+ "_id": job_id,
1366
+ "status": job.get("status"),
1367
+ "name": job.get("name"),
1368
+ "user_name": user_info.get("name"),
1369
+ "user_surname": user_info.get("surname"),
1370
+ "createdAt": job.get("createdAt"),
1371
+ "runTime": run_time,
1372
+ "computeCostSpent": job.get("computeCostSpent")
1373
+ }
1374
+
1375
+ return related_jobs
1376
+
1377
+ def get_parent_job(self, workspace_id, folder_id, verify=True):
1378
+ """Get the parent job of a given folder.
1379
+
1380
+ Parameters
1381
+ ----------
1382
+ workspace_id : str
1383
+ The CloudOS workspace ID.
1384
+ folder_id : str
1385
+ The ID of the folder whose parent job is to be retrieved.
1386
+ verify : [bool | str], optional
1387
+ Whether to use SSL verification or not. Alternatively, if
1388
+ a string is passed, it will be interpreted as the path to
1389
+ the SSL certificate file. Default is True.
1390
+
1391
+ Returns
1392
+ -------
1393
+ dict
1394
+ A dictionary containing details of the parent job.
1395
+
1396
+ Raises
1397
+ ------
1398
+ BadRequestException
1399
+ If the request fails with a status code indicating an error.
1400
+ """
1401
+ headers = {
1402
+ "Content-type": "application/json",
1403
+ "apikey": self.apikey
1404
+ }
1405
+
1406
+ params = {
1407
+ "id": folder_id,
1408
+ "status": "ready",
1409
+ "teamId": workspace_id
1410
+ }
1411
+
1412
+ url = f"{self.cloudos_url}/api/v1/folders/"
1413
+ response = retry_requests_get(url, params=params, headers=headers, verify=verify)
1414
+
1415
+ if response.status_code >= 400:
1416
+ raise BadRequestException(response)
1417
+
1418
+ content = json.loads(response.content)
1419
+
1420
+ # The API returns a list of folders; we need to extract the parent job ID from the first item (if present)
1421
+ if isinstance(content, list) and content:
1422
+ parent_job_id = content[0].get("parent", {}).get("id")
1423
+ else:
1424
+ parent_job_id = None
1425
+
1426
+ if not parent_job_id:
1427
+ return None
1428
+ else:
1429
+ return parent_job_id