cloudos-cli 2.44.0__tar.gz → 2.45.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/PKG-INFO +3 -1
  2. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/README.md +2 -0
  3. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/__main__.py +17 -5
  4. cloudos_cli-2.45.0/cloudos_cli/_version.py +1 -0
  5. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/clos.py +77 -18
  6. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/jobs/job.py +2 -1
  7. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/utils/__init__.py +2 -1
  8. cloudos_cli-2.45.0/cloudos_cli/utils/last_wf.py +60 -0
  9. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli.egg-info/PKG-INFO +3 -1
  10. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli.egg-info/SOURCES.txt +1 -0
  11. cloudos_cli-2.44.0/cloudos_cli/_version.py +0 -1
  12. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/LICENSE +0 -0
  13. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/__init__.py +0 -0
  14. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/configure/__init__.py +0 -0
  15. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/configure/configure.py +0 -0
  16. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/datasets/__init__.py +0 -0
  17. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/datasets/datasets.py +0 -0
  18. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/import_wf/__init__.py +0 -0
  19. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/import_wf/import_wf.py +0 -0
  20. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/jobs/__init__.py +0 -0
  21. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/link/__init__.py +0 -0
  22. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/link/link.py +0 -0
  23. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/queue/__init__.py +0 -0
  24. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/queue/queue.py +0 -0
  25. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/utils/array_job.py +0 -0
  26. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/utils/cloud.py +0 -0
  27. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/utils/details.py +0 -0
  28. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/utils/errors.py +0 -0
  29. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/utils/requests.py +0 -0
  30. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli/utils/resources.py +0 -0
  31. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli.egg-info/dependency_links.txt +0 -0
  32. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli.egg-info/entry_points.txt +0 -0
  33. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli.egg-info/requires.txt +0 -0
  34. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/cloudos_cli.egg-info/top_level.txt +0 -0
  35. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/setup.cfg +0 -0
  36. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/setup.py +0 -0
  37. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/tests/__init__.py +0 -0
  38. {cloudos_cli-2.44.0 → cloudos_cli-2.45.0}/tests/functions_for_pytest.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudos_cli
3
- Version: 2.44.0
3
+ Version: 2.45.0
4
4
  Summary: Python package for interacting with CloudOS
5
5
  Home-page: https://github.com/lifebit-ai/cloudos-cli
6
6
  Author: David Piñeyro
@@ -370,6 +370,8 @@ Executing run...
370
370
  Your job took 420 seconds to complete successfully.
371
371
  ```
372
372
 
373
+ When there are duplicated `--workflow-name` in the platform, the flag `--last` can be added, and it will use the latest import of that pipeline in the workspace, based on the date. For example, the pipeline `lifebit-process` was imported on May 23 2025 and again on May 30 2025, with the `--last` flag, it will use the import of May 30 of 2025.
374
+
373
375
  #### Send a bash job to CloudOS (sequential sample processing)
374
376
 
375
377
  A bash job can be sent to CloudOS using the command `bash` and the subcommand `job`. In this case, the `--workflow-name` must be a bash job already
@@ -335,6 +335,8 @@ Executing run...
335
335
  Your job took 420 seconds to complete successfully.
336
336
  ```
337
337
 
338
+ When there are duplicated `--workflow-name` in the platform, the flag `--last` can be added, and it will use the latest import of that pipeline in the workspace, based on the date. For example, the pipeline `lifebit-process` was imported on May 23 2025 and again on May 30 2025, with the `--last` flag, it will use the import of May 30 of 2025.
339
+
338
340
  #### Send a bash job to CloudOS (sequential sample processing)
339
341
 
340
342
  A bash job can be sent to CloudOS using the command `bash` and the subcommand `job`. In this case, the `--workflow-name` must be a bash job already
@@ -238,6 +238,9 @@ def configure(ctx, profile, make_default):
238
238
  @click.option('--workflow-name',
239
239
  help='The name of a CloudOS workflow or pipeline.',
240
240
  required=True)
241
+ @click.option('--last',
242
+ help=('When the workflows are duplicated, use the latest imported workflow (by date).'),
243
+ is_flag=True)
241
244
  @click.option('--job-config',
242
245
  help=('A config file similar to a nextflow.config file, ' +
243
246
  'but only with the parameters to use with your job.'))
@@ -370,6 +373,7 @@ def run(ctx,
370
373
  workspace_id,
371
374
  project_name,
372
375
  workflow_name,
376
+ last,
373
377
  job_config,
374
378
  parameter,
375
379
  git_commit,
@@ -488,8 +492,8 @@ def run(ctx,
488
492
  if verbose:
489
493
  print('\t...Detecting workflow type')
490
494
  cl = Cloudos(cloudos_url, apikey, cromwell_token)
491
- workflow_type = cl.detect_workflow(workflow_name, workspace_id, verify_ssl)
492
- is_module = cl.is_module(workflow_name, workspace_id, verify_ssl)
495
+ workflow_type = cl.detect_workflow(workflow_name, workspace_id, verify_ssl, last)
496
+ is_module = cl.is_module(workflow_name, workspace_id, verify_ssl, last)
493
497
  if execution_platform == 'hpc' and workflow_type == 'wdl':
494
498
  raise ValueError(f'The workflow {workflow_name} is a WDL workflow. ' +
495
499
  'WDL is not supported on HPC execution platform.')
@@ -529,7 +533,7 @@ def run(ctx,
529
533
  print('\t...Preparing objects')
530
534
  j = jb.Job(cloudos_url, apikey, None, workspace_id, project_name, workflow_name,
531
535
  mainfile=wdl_mainfile, importsfile=wdl_importsfile,
532
- repository_platform=repository_platform, verify=verify_ssl)
536
+ repository_platform=repository_platform, verify=verify_ssl, last=last)
533
537
  if verbose:
534
538
  print('\tThe following Job object was created:')
535
539
  print('\t' + str(j))
@@ -2056,6 +2060,9 @@ def remove_profile(ctx, profile):
2056
2060
  @click.option('--workflow-name',
2057
2061
  help='The name of a CloudOS workflow or pipeline.',
2058
2062
  required=True)
2063
+ @click.option('--last',
2064
+ help=('When the workflows are duplicated, use the latest imported workflow (by date).'),
2065
+ is_flag=True)
2059
2066
  @click.option('-p',
2060
2067
  '--parameter',
2061
2068
  multiple=True,
@@ -2136,6 +2143,7 @@ def run_bash_job(ctx,
2136
2143
  workspace_id,
2137
2144
  project_name,
2138
2145
  workflow_name,
2146
+ last,
2139
2147
  parameter,
2140
2148
  job_name,
2141
2149
  do_not_save_logs,
@@ -2203,7 +2211,7 @@ def run_bash_job(ctx,
2203
2211
 
2204
2212
  j = jb.Job(cloudos_url, apikey, None, workspace_id, project_name, workflow_name,
2205
2213
  mainfile=None, importsfile=None,
2206
- repository_platform=repository_platform, verify=verify_ssl)
2214
+ repository_platform=repository_platform, verify=verify_ssl, last=last)
2207
2215
 
2208
2216
  if job_queue is not None:
2209
2217
  batch = True
@@ -2291,6 +2299,9 @@ def run_bash_job(ctx,
2291
2299
  @click.option('--workflow-name',
2292
2300
  help='The name of a CloudOS workflow or pipeline.',
2293
2301
  required=True)
2302
+ @click.option('--last',
2303
+ help=('When the workflows are duplicated, use the latest imported workflow (by date).'),
2304
+ is_flag=True)
2294
2305
  @click.option('-p',
2295
2306
  '--parameter',
2296
2307
  multiple=True,
@@ -2410,6 +2421,7 @@ def run_bash_array_job(ctx,
2410
2421
  workspace_id,
2411
2422
  project_name,
2412
2423
  workflow_name,
2424
+ last,
2413
2425
  parameter,
2414
2426
  job_name,
2415
2427
  do_not_save_logs,
@@ -2512,7 +2524,7 @@ def run_bash_array_job(ctx,
2512
2524
 
2513
2525
  j = jb.Job(cloudos_url, apikey, None, workspace_id, project_name, workflow_name,
2514
2526
  mainfile=None, importsfile=None,
2515
- repository_platform=repository_platform, verify=verify_ssl)
2527
+ repository_platform=repository_platform, verify=verify_ssl, last=last)
2516
2528
 
2517
2529
  # retrieve columns
2518
2530
  r = j.retrieve_cols_from_array_file(
@@ -0,0 +1 @@
1
+ __version__ = '2.45.0'
@@ -10,6 +10,7 @@ from cloudos_cli.utils.cloud import find_cloud
10
10
  from cloudos_cli.utils.errors import BadRequestException, JoBNotCompletedException, NotAuthorisedException
11
11
  from cloudos_cli.utils.requests import retry_requests_get, retry_requests_post, retry_requests_put
12
12
  import pandas as pd
13
+ from cloudos_cli.utils.last_wf import youngest_workflow_id_by_name
13
14
 
14
15
  # GLOBAL VARS
15
16
  JOB_COMPLETED = 'completed'
@@ -462,7 +463,7 @@ class Cloudos:
462
463
  return df
463
464
 
464
465
  def get_workflow_list(self, workspace_id, verify=True, get_all=True,
465
- page=1, page_size=10, max_page_size=1000,
466
+ page=1, page_size=10, max_page_size=100,
466
467
  archived_status=False):
467
468
  """Get all the workflows from a CloudOS workspace.
468
469
 
@@ -570,7 +571,7 @@ class Cloudos:
570
571
  df = df_full.loc[:, present_columns]
571
572
  return df
572
573
 
573
- def detect_workflow(self, workflow_name, workspace_id, verify=True):
574
+ def detect_workflow(self, workflow_name, workspace_id, verify=True, last=False):
574
575
  """Detects workflow type: nextflow or wdl.
575
576
 
576
577
  Parameters
@@ -590,14 +591,14 @@ class Cloudos:
590
591
  The workflow type detected
591
592
  """
592
593
  # get list with workflow types
593
- wt_all = self.workflow_content_query(workspace_id, workflow_name, verify=verify, query="workflowType")
594
+ wt_all = self.workflow_content_query(workspace_id, workflow_name, verify=verify, query="workflowType", last=last)
594
595
  # make unique
595
596
  wt = list(dict.fromkeys(wt_all))
596
597
  if len(wt) > 1:
597
598
  raise ValueError(f'More than one workflow type detected for {workflow_name}: {wt}')
598
599
  return str(wt[0])
599
600
 
600
- def is_module(self, workflow_name, workspace_id, verify=True):
601
+ def is_module(self, workflow_name, workspace_id, verify=True, last=False):
601
602
  """Detects whether the workflow is a system module or not.
602
603
 
603
604
  System modules use fixed queues, so this check is important to
@@ -620,7 +621,7 @@ class Cloudos:
620
621
  True, if the workflow is a system module, false otherwise.
621
622
  """
622
623
  # get a list of all groups
623
- group = self.workflow_content_query(workspace_id, workflow_name, verify=verify, query="group")
624
+ group = self.workflow_content_query(workspace_id, workflow_name, verify=verify, query="group", last=last)
624
625
 
625
626
  module_groups = ['system-tools',
626
627
  'data-factory-data-connection-etl',
@@ -636,7 +637,7 @@ class Cloudos:
636
637
  return False
637
638
 
638
639
  def get_project_list(self, workspace_id, verify=True, get_all=True,
639
- page=1, page_size=10, max_page_size=1000):
640
+ page=1, page_size=10, max_page_size=100):
640
641
  """Get all the project from a CloudOS workspace.
641
642
 
642
643
  Parameters
@@ -915,8 +916,8 @@ class Cloudos:
915
916
 
916
917
  return project_id
917
918
 
918
- def get_workflow_content(self, workspace_id, workflow_name, verify=True):
919
- """Retrieve the workflow content from API.
919
+ def get_workflow_max_pagination(self, workspace_id, workflow_name, verify=True):
920
+ """Retrieve the workflows max pages from API.
920
921
 
921
922
  Parameters
922
923
  ----------
@@ -931,8 +932,8 @@ class Cloudos:
931
932
 
932
933
  Returns
933
934
  -------
934
- dict
935
- The server response containing workflow details.
935
+ int
936
+ The server response with max pagination for workflows.
936
937
 
937
938
  Raises
938
939
  ------
@@ -944,24 +945,82 @@ class Cloudos:
944
945
  "Content-type": "application/json",
945
946
  "apikey": self.apikey
946
947
  }
948
+ # determine pagination, there might be a lot with the same name
947
949
  url = f"{self.cloudos_url}/api/v3/workflows?teamId={workspace_id}&search={workflow_name}"
948
950
  response = retry_requests_get(url, headers=headers, verify=verify)
949
951
  if response.status_code >= 400:
950
952
  raise BadRequestException(response)
951
- content = json.loads(response.content)
952
- return content
953
+ pag_content = json.loads(response.content)
954
+ max_pagination = pag_content["paginationMetadata"]["Pagination-Count"]
955
+ if max_pagination == 0:
956
+ raise ValueError(f'No workflow found with name: {workflow_name} in workspace: {workspace_id}')
953
957
 
954
- def workflow_content_query(self, workspace_id, workflow_name, verify=True, query="workflowType"):
958
+ return max_pagination
955
959
 
956
- content = self.get_workflow_content(workspace_id, workflow_name, verify=verify)
960
+ def get_workflow_content(self, workspace_id, workflow_name, verify=True, last=False, max_page_size=100):
961
+ """Retrieve the workflow content from API.
962
+
963
+ Parameters
964
+ ----------
965
+ workspace_id : str
966
+ The CloudOS workspace ID to search for the workflow.
967
+ workflow_name : str
968
+ The name of the workflow to search for.
969
+ verify : [bool | str], optional
970
+ Whether to use SSL verification or not. Alternatively, if
971
+ a string is passed, it will be interpreted as the path to
972
+ the SSL certificate file. Default is True.
973
+
974
+ Returns
975
+ -------
976
+ dict
977
+ The server response containing workflow details.
978
+
979
+ Raises
980
+ ------
981
+ BadRequestException
982
+ If the request to retrieve the project fails with a status code
983
+ indicating an error.
984
+ """
985
+ headers = {
986
+ "Content-type": "application/json",
987
+ "apikey": self.apikey
988
+ }
989
+ max_pagination = self.get_workflow_max_pagination(workspace_id, workflow_name, verify=verify)
990
+
991
+ # get all the matching content
992
+ if max_pagination > max_page_size:
993
+ content = {"workflows": []}
994
+ for page_start in range(0, max_pagination, max_page_size):
995
+ page_size = min(max_page_size, max_pagination - page_start)
996
+ url = f"{self.cloudos_url}/api/v3/workflows?teamId={workspace_id}&search={workflow_name}&pageSize={page_size}&page={page_start // max_page_size + 1}"
997
+ response = retry_requests_get(url, headers=headers, verify=verify)
998
+ # keep structure as a dict
999
+ content["workflows"].extend(json.loads(response.content).get("workflows", []))
1000
+ else:
1001
+ url = f"{self.cloudos_url}/api/v3/workflows?teamId={workspace_id}&search={workflow_name}&pageSize={max_pagination}"
1002
+ response = retry_requests_get(url, headers=headers, verify=verify)
1003
+ # return all content
1004
+ content = json.loads(response.content)
1005
+
1006
+ if response.status_code >= 400:
1007
+ raise BadRequestException(response)
957
1008
 
958
1009
  # check for duplicates
959
1010
  wf = [wf.get("name") for wf in content.get("workflows", []) if wf.get("name") == workflow_name]
960
1011
 
961
- if len(wf) == 0:
962
- raise ValueError(f'No workflow found with name: {workflow_name}')
963
- if len(wf) > 1:
964
- raise ValueError(f'More than one workflow found with name: {workflow_name}')
1012
+ if len(wf) == 0 or len(content["workflows"]) == 0:
1013
+ raise ValueError(f'No workflow found with name: {workflow_name} in workspace: {workspace_id}')
1014
+ if len(wf) > 1 and not last:
1015
+ raise ValueError(f'More than one workflow found with name: {workflow_name}. ' + \
1016
+ "To run the last imported workflow use '--last' flag.")
1017
+ else:
1018
+ content = youngest_workflow_id_by_name(content, workflow_name)
1019
+ return content
1020
+
1021
+ def workflow_content_query(self, workspace_id, workflow_name, verify=True, query="workflowType", last=False):
1022
+
1023
+ content = self.get_workflow_content(workspace_id, workflow_name, verify=verify, last=last)
965
1024
 
966
1025
  # use 'query' to look in the content
967
1026
  return [wf.get(query) for wf in content.get("workflows", []) if wf.get("name") == workflow_name]
@@ -52,6 +52,7 @@ class Job(Cloudos):
52
52
  workspace_id: str
53
53
  project_name: str
54
54
  workflow_name: str
55
+ last: bool = False
55
56
  verify: Union[bool, str] = True
56
57
  mainfile: str = None
57
58
  importsfile: str = None
@@ -148,7 +149,7 @@ class Job(Cloudos):
148
149
  raise ValueError('Your specified resource is not supported. ' +
149
150
  f'Use one of the following: {allowed_resources}')
150
151
  if resource == 'workflows':
151
- content = self.get_workflow_content(workspace_id, name, verify=verify)
152
+ content = self.get_workflow_content(workspace_id, name, verify=verify, last=self.last)
152
153
  for element in content["workflows"]:
153
154
  if (element["name"] == name and element["workflowType"] == "docker" and
154
155
  not element["archived"]["status"]):
@@ -9,5 +9,6 @@ from .cloud import find_cloud
9
9
  from .cloud import find_cloud
10
10
  from .array_job import is_valid_regex, is_glob_pattern, is_probably_regex, classify_pattern, generate_datasets_for_project, get_file_or_folder_id
11
11
  from .details import get_path
12
+ from .last_wf import youngest_workflow_id_by_name
12
13
 
13
- __all__ = ['errors', 'requests', 'resources', 'cloud', 'details', 'array_job']
14
+ __all__ = ['errors', 'requests', 'resources', 'cloud', 'details', 'array_job', 'last_wf']
@@ -0,0 +1,60 @@
1
+ from datetime import datetime, timezone
2
+
3
+
4
+ def _parse_iso8601_z(dt_str):
5
+ """
6
+ Parse an ISO8601 timestamp string that may end with 'Z' (UTC).
7
+ Returns a timezone-aware datetime, or None if parsing fails.
8
+ """
9
+ if not dt_str:
10
+ return None
11
+ if dt_str.endswith('Z'):
12
+ dt_str = dt_str[:-1] + '+00:00'
13
+ try:
14
+ return datetime.fromisoformat(dt_str)
15
+ except ValueError:
16
+ return None
17
+
18
+
19
+ def youngest_workflow_id_by_name(content, target_name, ignore_case=False, return_workflow=True):
20
+ """
21
+ Find the most recently created workflow whose .name matches `target_name`.
22
+
23
+ Parameters
24
+ ----------
25
+ content : dict
26
+ API response dict containing top-level "workflows".
27
+ target_name : str
28
+ Name to match.
29
+ ignore_case : bool, default False
30
+ If True, case-insensitive comparison.
31
+ return_workflow : bool, default False
32
+ If True, return the whole workflow dict; else return its _id.
33
+
34
+ Returns
35
+ -------
36
+ str | dict | None
37
+ _id (default), full workflow dict (if return_workflow=True), or None if no match.
38
+ """
39
+ workflows = content.get('workflows') or []
40
+ if ignore_case:
41
+ target_cmp = target_name.lower()
42
+ matches = [wf for wf in workflows if wf.get('name', '').lower() == target_cmp]
43
+ else:
44
+ matches = [wf for wf in workflows if wf.get('name') == target_name]
45
+
46
+ if not matches:
47
+ return None
48
+
49
+ def sort_key(wf):
50
+ created = _parse_iso8601_z(wf.get('createdAt'))
51
+ updated = _parse_iso8601_z(wf.get('updatedAt'))
52
+ # Prefer createdAt; fall back to updatedAt; else epoch 0
53
+ return created or updated or datetime.fromtimestamp(0, tz=timezone.utc)
54
+
55
+ youngest = max(matches, key=sort_key)
56
+ # keep structure as dictionary, will return inner dictionary just for the selected workflow
57
+ youngest_d = {"workflows":[ youngest ]}
58
+ youngest_id = youngest.get('_id')
59
+
60
+ return youngest_d if return_workflow else youngest_id
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudos_cli
3
- Version: 2.44.0
3
+ Version: 2.45.0
4
4
  Summary: Python package for interacting with CloudOS
5
5
  Home-page: https://github.com/lifebit-ai/cloudos-cli
6
6
  Author: David Piñeyro
@@ -370,6 +370,8 @@ Executing run...
370
370
  Your job took 420 seconds to complete successfully.
371
371
  ```
372
372
 
373
+ When there are duplicated `--workflow-name` in the platform, the flag `--last` can be added, and it will use the latest import of that pipeline in the workspace, based on the date. For example, the pipeline `lifebit-process` was imported on May 23 2025 and again on May 30 2025, with the `--last` flag, it will use the import of May 30 of 2025.
374
+
373
375
  #### Send a bash job to CloudOS (sequential sample processing)
374
376
 
375
377
  A bash job can be sent to CloudOS using the command `bash` and the subcommand `job`. In this case, the `--workflow-name` must be a bash job already
@@ -28,6 +28,7 @@ cloudos_cli/utils/array_job.py
28
28
  cloudos_cli/utils/cloud.py
29
29
  cloudos_cli/utils/details.py
30
30
  cloudos_cli/utils/errors.py
31
+ cloudos_cli/utils/last_wf.py
31
32
  cloudos_cli/utils/requests.py
32
33
  cloudos_cli/utils/resources.py
33
34
  tests/__init__.py
@@ -1 +0,0 @@
1
- __version__ = '2.44.0'
File without changes
File without changes
File without changes