yellowdog-python-examples 8.2.1__py3-none-any.whl → 8.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- yellowdog_cli/__init__.py +1 -1
- yellowdog_cli/abort.py +11 -11
- yellowdog_cli/admin.py +2 -2
- yellowdog_cli/application.py +104 -0
- yellowdog_cli/boost.py +3 -3
- yellowdog_cli/cancel.py +9 -9
- yellowdog_cli/cloudwizard.py +4 -4
- yellowdog_cli/compare.py +6 -6
- yellowdog_cli/create.py +56 -54
- yellowdog_cli/delete.py +10 -10
- yellowdog_cli/download.py +15 -15
- yellowdog_cli/finish.py +9 -9
- yellowdog_cli/instantiate.py +19 -17
- yellowdog_cli/list.py +45 -43
- yellowdog_cli/provision.py +28 -28
- yellowdog_cli/remove.py +28 -26
- yellowdog_cli/resize.py +9 -9
- yellowdog_cli/show.py +29 -27
- yellowdog_cli/shutdown.py +9 -9
- yellowdog_cli/submit.py +29 -29
- yellowdog_cli/terminate.py +8 -8
- yellowdog_cli/upload.py +11 -11
- yellowdog_cli/utils/args.py +2 -0
- yellowdog_cli/utils/cloudwizard_aws.py +32 -32
- yellowdog_cli/utils/cloudwizard_azure.py +27 -27
- yellowdog_cli/utils/cloudwizard_common.py +12 -10
- yellowdog_cli/utils/cloudwizard_gcp.py +8 -8
- yellowdog_cli/utils/csv_data.py +7 -7
- yellowdog_cli/utils/entity_utils.py +85 -20
- yellowdog_cli/utils/follow_utils.py +5 -5
- yellowdog_cli/utils/interactive.py +8 -8
- yellowdog_cli/utils/load_config.py +11 -11
- yellowdog_cli/utils/load_resources.py +4 -4
- yellowdog_cli/utils/misc_utils.py +6 -3
- yellowdog_cli/utils/printing.py +10 -9
- yellowdog_cli/utils/provision_utils.py +2 -2
- yellowdog_cli/utils/settings.py +1 -0
- yellowdog_cli/utils/start_hold_common.py +7 -7
- yellowdog_cli/utils/submit_utils.py +5 -5
- yellowdog_cli/utils/upload_utils.py +3 -3
- yellowdog_cli/utils/variables.py +5 -5
- yellowdog_cli/utils/wrapper.py +32 -53
- {yellowdog_python_examples-8.2.1.dist-info → yellowdog_python_examples-8.3.1.dist-info}/METADATA +3 -2
- yellowdog_python_examples-8.3.1.dist-info/RECORD +65 -0
- {yellowdog_python_examples-8.2.1.dist-info → yellowdog_python_examples-8.3.1.dist-info}/entry_points.txt +1 -0
- yellowdog_python_examples-8.2.1.dist-info/RECORD +0 -64
- {yellowdog_python_examples-8.2.1.dist-info → yellowdog_python_examples-8.3.1.dist-info}/WHEEL +0 -0
- {yellowdog_python_examples-8.2.1.dist-info → yellowdog_python_examples-8.3.1.dist-info}/licenses/LICENSE +0 -0
- {yellowdog_python_examples-8.2.1.dist-info → yellowdog_python_examples-8.3.1.dist-info}/top_level.txt +0 -0
yellowdog_cli/create.py
CHANGED
|
@@ -55,7 +55,7 @@ from yellowdog_cli.utils.entity_utils import (
|
|
|
55
55
|
find_compute_requirement_template_id_by_name,
|
|
56
56
|
find_compute_source_template_id_by_name,
|
|
57
57
|
find_image_name_or_id,
|
|
58
|
-
|
|
58
|
+
get_application_group_summaries,
|
|
59
59
|
get_application_id_by_name,
|
|
60
60
|
get_group_id_by_name,
|
|
61
61
|
get_group_name_by_id,
|
|
@@ -69,8 +69,8 @@ from yellowdog_cli.utils.interactive import confirmed
|
|
|
69
69
|
from yellowdog_cli.utils.load_resources import load_resource_specifications
|
|
70
70
|
from yellowdog_cli.utils.printing import (
|
|
71
71
|
print_error,
|
|
72
|
+
print_info,
|
|
72
73
|
print_json,
|
|
73
|
-
print_log,
|
|
74
74
|
print_warning,
|
|
75
75
|
)
|
|
76
76
|
from yellowdog_cli.utils.settings import (
|
|
@@ -155,7 +155,7 @@ def create_resources(
|
|
|
155
155
|
resources = deepcopy(resources) # Avoid overwriting the input argument
|
|
156
156
|
|
|
157
157
|
if ARGS_PARSER.dry_run:
|
|
158
|
-
|
|
158
|
+
print_info(
|
|
159
159
|
"Dry-run: displaying processed JSON resource specifications. Note:"
|
|
160
160
|
" 'resource' property is removed."
|
|
161
161
|
)
|
|
@@ -280,7 +280,7 @@ def create_compute_source_template(resource: Dict):
|
|
|
280
280
|
compute_source = CLIENT.compute_client.add_compute_source_template(
|
|
281
281
|
compute_source_template
|
|
282
282
|
)
|
|
283
|
-
|
|
283
|
+
print_info(f"Created Compute Source Template '{name}' ({compute_source.id})")
|
|
284
284
|
else:
|
|
285
285
|
if not confirmed(f"Update existing Compute Source Template '{name}'?"):
|
|
286
286
|
return
|
|
@@ -288,7 +288,7 @@ def create_compute_source_template(resource: Dict):
|
|
|
288
288
|
compute_source = CLIENT.compute_client.update_compute_source_template(
|
|
289
289
|
compute_source_template
|
|
290
290
|
)
|
|
291
|
-
|
|
291
|
+
print_info(
|
|
292
292
|
f"Updated existing Compute Source Template '{name}'"
|
|
293
293
|
f" ({compute_source.id})"
|
|
294
294
|
)
|
|
@@ -364,7 +364,7 @@ def create_compute_requirement_template(resource: Dict):
|
|
|
364
364
|
_get_images_id(source_image_id, source, PROP_IMAGE_ID)
|
|
365
365
|
|
|
366
366
|
if source_template_substitutions > 0:
|
|
367
|
-
|
|
367
|
+
print_info(
|
|
368
368
|
f"Replaced {source_template_substitutions} Compute Source Template name(s) with ID(s)"
|
|
369
369
|
)
|
|
370
370
|
|
|
@@ -395,7 +395,7 @@ def create_compute_requirement_template(resource: Dict):
|
|
|
395
395
|
)
|
|
396
396
|
global CLEAR_CRT_CACHE
|
|
397
397
|
CLEAR_CRT_CACHE = True
|
|
398
|
-
|
|
398
|
+
print_info(f"Created Compute Requirement Template '{name}' ({template.id})")
|
|
399
399
|
if ARGS_PARSER.quiet:
|
|
400
400
|
print(template.id)
|
|
401
401
|
return
|
|
@@ -409,7 +409,9 @@ def create_compute_requirement_template(resource: Dict):
|
|
|
409
409
|
template = CLIENT.compute_client.update_compute_requirement_template(
|
|
410
410
|
compute_template
|
|
411
411
|
)
|
|
412
|
-
|
|
412
|
+
print_info(
|
|
413
|
+
f"Updated existing Compute Requirement Template '{name}' ({template.id})"
|
|
414
|
+
)
|
|
413
415
|
if ARGS_PARSER.quiet:
|
|
414
416
|
print(template.id)
|
|
415
417
|
|
|
@@ -430,7 +432,7 @@ def create_keyring(resource: Dict, show_secrets: bool = False):
|
|
|
430
432
|
if not confirmed(f"Keyring '{name}' already exists: delete and recreate?"):
|
|
431
433
|
return
|
|
432
434
|
CLIENT.keyring_client.delete_keyring_by_name(name)
|
|
433
|
-
|
|
435
|
+
print_info(f"Deleted Keyring '{name}'")
|
|
434
436
|
|
|
435
437
|
try:
|
|
436
438
|
keyring, keyring_password = create_keyring_via_api(name, description)
|
|
@@ -439,7 +441,7 @@ def create_keyring(resource: Dict, show_secrets: bool = False):
|
|
|
439
441
|
if ARGS_PARSER.show_keyring_passwords or show_secrets
|
|
440
442
|
else "<REDACTED>"
|
|
441
443
|
)
|
|
442
|
-
|
|
444
|
+
print_info(
|
|
443
445
|
f"Created Keyring '{name}' ({keyring.id}): Password = {keyring_password}"
|
|
444
446
|
)
|
|
445
447
|
if ARGS_PARSER.quiet:
|
|
@@ -483,7 +485,7 @@ def create_credential(resource: Dict):
|
|
|
483
485
|
credential = _get_model_object(credential_type, credential_data)
|
|
484
486
|
try:
|
|
485
487
|
CLIENT.keyring_client.put_credential_by_name(keyring_name, credential)
|
|
486
|
-
|
|
488
|
+
print_info(f"Added Credential '{name}' to Keyring '{keyring_name}'")
|
|
487
489
|
except HTTPError as e:
|
|
488
490
|
print_error(f"Failed to add Credential '{name}' to Keyring '{keyring_name}'")
|
|
489
491
|
if e.response.status_code == 400:
|
|
@@ -531,7 +533,7 @@ def create_image_family(resource):
|
|
|
531
533
|
# This will update the Image Family but not its constituent
|
|
532
534
|
# Image Group/Image resources
|
|
533
535
|
CLIENT.images_client.update_image_family(image_family)
|
|
534
|
-
|
|
536
|
+
print_info(
|
|
535
537
|
f"Updated existing Machine Image Family '{fq_name}' ('{image_family.id}')"
|
|
536
538
|
)
|
|
537
539
|
if ARGS_PARSER.quiet:
|
|
@@ -541,7 +543,7 @@ def create_image_family(resource):
|
|
|
541
543
|
# This will create the Image Family and all of its constituent
|
|
542
544
|
# Image Group/Image resources
|
|
543
545
|
image_family = _create_image_family(image_family, fq_name)
|
|
544
|
-
|
|
546
|
+
print_info(f"Created Machine Image Family '{fq_name}' ({image_family.id})")
|
|
545
547
|
if ARGS_PARSER.quiet:
|
|
546
548
|
print(image_family.id)
|
|
547
549
|
else:
|
|
@@ -558,7 +560,7 @@ def create_image_family(resource):
|
|
|
558
560
|
if existing_image_group.name not in updated_image_group_names:
|
|
559
561
|
if confirmed(f"Remove existing Image Group '{existing_image_group.name}'?"):
|
|
560
562
|
CLIENT.images_client.delete_image_group(existing_image_group)
|
|
561
|
-
|
|
563
|
+
print_info(f"Deleted Image Group '{existing_image_group.name}'")
|
|
562
564
|
|
|
563
565
|
# Update Image Groups
|
|
564
566
|
for image_group in image_groups:
|
|
@@ -590,7 +592,7 @@ def _create_image_group(
|
|
|
590
592
|
return
|
|
591
593
|
image_group.id = existing_image_group.id
|
|
592
594
|
CLIENT.images_client.update_image_group(image_group)
|
|
593
|
-
|
|
595
|
+
print_info(f"Updated existing Machine Image Group '{image_group.name}'")
|
|
594
596
|
if ARGS_PARSER.quiet:
|
|
595
597
|
print(image_group.id)
|
|
596
598
|
except HTTPError as e:
|
|
@@ -598,7 +600,7 @@ def _create_image_group(
|
|
|
598
600
|
image_group = CLIENT.images_client.add_image_group(
|
|
599
601
|
image_family, image_group
|
|
600
602
|
)
|
|
601
|
-
|
|
603
|
+
print_info(f"Created Machine Image Group '{image_group.name}'")
|
|
602
604
|
if ARGS_PARSER.quiet:
|
|
603
605
|
print(image_group.id)
|
|
604
606
|
else:
|
|
@@ -617,7 +619,7 @@ def _create_image_group(
|
|
|
617
619
|
if existing_image.name not in updated_image_names:
|
|
618
620
|
if confirmed(f"Remove existing Image '{existing_image.name}'?"):
|
|
619
621
|
CLIENT.images_client.delete_image(existing_image)
|
|
620
|
-
|
|
622
|
+
print_info(f"Deleted Image '{existing_image.name}'")
|
|
621
623
|
|
|
622
624
|
# Update Images
|
|
623
625
|
for image in images:
|
|
@@ -641,10 +643,10 @@ def _create_image(image: MachineImage, image_group: MachineImageGroup):
|
|
|
641
643
|
if image.id is not None: # Existing Image
|
|
642
644
|
if confirmed(f"Update existing Machine Image '{image.name}'?"):
|
|
643
645
|
image = CLIENT.images_client.update_image(image)
|
|
644
|
-
|
|
646
|
+
print_info(f"Updated existing Machine Image '{image.name}'")
|
|
645
647
|
else: # New Image
|
|
646
648
|
image = CLIENT.images_client.add_image(image_group, image)
|
|
647
|
-
|
|
649
|
+
print_info(f"Created Machine Image '{image.name}'")
|
|
648
650
|
except InvalidRequestException as e:
|
|
649
651
|
print_error(f"Unable to create/update Image '{image.name}': {e}")
|
|
650
652
|
|
|
@@ -667,7 +669,7 @@ def create_namespace_configuration(resource: Dict):
|
|
|
667
669
|
)
|
|
668
670
|
for config in namespace_configurations:
|
|
669
671
|
if config.namespace == namespace:
|
|
670
|
-
|
|
672
|
+
print_info(
|
|
671
673
|
f"Updating existing Namespace Storage Configuration '{namespace}'"
|
|
672
674
|
)
|
|
673
675
|
|
|
@@ -676,7 +678,7 @@ def create_namespace_configuration(resource: Dict):
|
|
|
676
678
|
CLIENT.object_store_client.put_namespace_storage_configuration(
|
|
677
679
|
namespace_configuration
|
|
678
680
|
)
|
|
679
|
-
|
|
681
|
+
print_info(f"Created/updated Namespace Storage Configuration '{namespace}'")
|
|
680
682
|
except Exception as e:
|
|
681
683
|
print_error(
|
|
682
684
|
"Unable to create/update Namespace Storage Configuration"
|
|
@@ -702,13 +704,13 @@ def create_configured_worker_pool(resource: Dict):
|
|
|
702
704
|
cwp_response: AddConfiguredWorkerPoolResponse = (
|
|
703
705
|
CLIENT.worker_pool_client.add_configured_worker_pool(cwp_request)
|
|
704
706
|
)
|
|
705
|
-
|
|
707
|
+
print_info(
|
|
706
708
|
f"Created Configured Worker Pool '{name}' ({cwp_response.workerPool.id})"
|
|
707
709
|
)
|
|
708
|
-
|
|
710
|
+
print_info(
|
|
709
711
|
f" Worker Pool Token = '{cwp_response.token.secret}'"
|
|
710
712
|
)
|
|
711
|
-
|
|
713
|
+
print_info(
|
|
712
714
|
" Worker Pool Expiry Time = "
|
|
713
715
|
f"{str(cwp_response.token.expiryTime).split('.')[0]}"
|
|
714
716
|
)
|
|
@@ -745,7 +747,7 @@ def create_allowance(resource: Dict):
|
|
|
745
747
|
f"Compute Source Template name '{template_name_or_id}' not found"
|
|
746
748
|
)
|
|
747
749
|
return
|
|
748
|
-
|
|
750
|
+
print_info(
|
|
749
751
|
f"Replaced Source Template name '{template_name_or_id}'"
|
|
750
752
|
f" with ID {template_id}"
|
|
751
753
|
)
|
|
@@ -770,7 +772,7 @@ def create_allowance(resource: Dict):
|
|
|
770
772
|
f"Compute Requirement Template name '{template_name_or_id}' not found"
|
|
771
773
|
)
|
|
772
774
|
return
|
|
773
|
-
|
|
775
|
+
print_info(
|
|
774
776
|
f"Replaced Requirement Template name '{template_name_or_id}'"
|
|
775
777
|
f" with ID {template_id}"
|
|
776
778
|
)
|
|
@@ -790,7 +792,7 @@ def create_allowance(resource: Dict):
|
|
|
790
792
|
raise Exception(
|
|
791
793
|
f"Unable to parse '{PROP_EFFECTIVE_FROM}' date '{effective_from}'"
|
|
792
794
|
)
|
|
793
|
-
|
|
795
|
+
print_info(
|
|
794
796
|
f"Property '{PROP_EFFECTIVE_FROM}' = '{effective_from}' set to "
|
|
795
797
|
f"'{_display_datetime(resource[PROP_EFFECTIVE_FROM])}'"
|
|
796
798
|
)
|
|
@@ -802,7 +804,7 @@ def create_allowance(resource: Dict):
|
|
|
802
804
|
raise Exception(
|
|
803
805
|
f"Unable to parse '{PROP_EFFECTIVE_UNTIL}' date '{effective_until}'"
|
|
804
806
|
)
|
|
805
|
-
|
|
807
|
+
print_info(
|
|
806
808
|
f"Property '{PROP_EFFECTIVE_UNTIL}' = '{effective_until}' set to "
|
|
807
809
|
f"'{_display_datetime(resource[PROP_EFFECTIVE_UNTIL])}'"
|
|
808
810
|
)
|
|
@@ -823,7 +825,7 @@ def create_allowance(resource: Dict):
|
|
|
823
825
|
if ARGS_PARSER.match_allowances_by_description:
|
|
824
826
|
# Look for existing Allowances that match the description string
|
|
825
827
|
if description is not None:
|
|
826
|
-
|
|
828
|
+
print_info(
|
|
827
829
|
"Checking for and removing existing Allowance(s) matching "
|
|
828
830
|
f"description '{description}'"
|
|
829
831
|
)
|
|
@@ -834,9 +836,9 @@ def create_allowance(resource: Dict):
|
|
|
834
836
|
_get_model_object(type, resource)
|
|
835
837
|
)
|
|
836
838
|
if description is None:
|
|
837
|
-
|
|
839
|
+
print_info(f"Created new Allowance {allowance.id}")
|
|
838
840
|
else:
|
|
839
|
-
|
|
841
|
+
print_info(f"Created new Allowance '{description}' ({allowance.id})")
|
|
840
842
|
except Exception as e:
|
|
841
843
|
print_error(f"Unable to create Allowance: {e}")
|
|
842
844
|
return
|
|
@@ -886,11 +888,11 @@ def create_attribute_definition(resource: Dict, resource_type: str):
|
|
|
886
888
|
}
|
|
887
889
|
|
|
888
890
|
# Attempt attribute creation
|
|
889
|
-
|
|
891
|
+
print_info(f"Attempting to create or update Attribute Definition '{name}'")
|
|
890
892
|
response = post(url=url, headers=headers, json=payload)
|
|
891
893
|
|
|
892
894
|
if response.status_code == 200:
|
|
893
|
-
|
|
895
|
+
print_info(f"Created new Attribute Definition '{name}'")
|
|
894
896
|
return
|
|
895
897
|
|
|
896
898
|
if "Attribute already exists" in response.text:
|
|
@@ -899,7 +901,7 @@ def create_attribute_definition(resource: Dict, resource_type: str):
|
|
|
899
901
|
|
|
900
902
|
response = put(url=url, headers=headers, json=payload)
|
|
901
903
|
if response.status_code == 200:
|
|
902
|
-
|
|
904
|
+
print_info(f"Updated existing Attribute Definition '{name}'")
|
|
903
905
|
return
|
|
904
906
|
|
|
905
907
|
raise Exception(f"HTTP {response.status_code} ({response.text})")
|
|
@@ -938,7 +940,7 @@ def create_namespace_policy(resource: Dict):
|
|
|
938
940
|
)
|
|
939
941
|
return
|
|
940
942
|
|
|
941
|
-
|
|
943
|
+
print_info(
|
|
942
944
|
f"Created or updated Namespace Policy '{namespace_policy.namespace}' with "
|
|
943
945
|
f"'autoscalingMaxNodes={namespace_policy.autoscalingMaxNodes}'"
|
|
944
946
|
)
|
|
@@ -1048,10 +1050,10 @@ def create_group(resource: Dict):
|
|
|
1048
1050
|
RoleScope(role_spec.global_, role_spec.namespaces),
|
|
1049
1051
|
)
|
|
1050
1052
|
if role_spec.global_:
|
|
1051
|
-
|
|
1053
|
+
print_info(f"Added/updated role '{role_spec.name}' with global scope")
|
|
1052
1054
|
else:
|
|
1053
1055
|
ns_list_quoted = [f"'{ns}'" for ns in role_spec.namespaces]
|
|
1054
|
-
|
|
1056
|
+
print_info(
|
|
1055
1057
|
f"Added/updated role '{role_spec.name}' scoped to "
|
|
1056
1058
|
f"namespace(s): {', '.join(ns_list_quoted)}"
|
|
1057
1059
|
)
|
|
@@ -1065,7 +1067,7 @@ def create_group(resource: Dict):
|
|
|
1065
1067
|
group_id_,
|
|
1066
1068
|
role_spec.id,
|
|
1067
1069
|
)
|
|
1068
|
-
|
|
1070
|
+
print_info(f"Removed role '{role_spec.name}'")
|
|
1069
1071
|
|
|
1070
1072
|
def get_roles_to_remove(
|
|
1071
1073
|
existing_roles: List[GroupRole], new_roles: List[RoleSpecification]
|
|
@@ -1101,7 +1103,7 @@ def create_group(resource: Dict):
|
|
|
1101
1103
|
group_: Group = CLIENT.account_client.add_group(
|
|
1102
1104
|
AddGroupRequest(name=name, description=description)
|
|
1103
1105
|
)
|
|
1104
|
-
|
|
1106
|
+
print_info(f"Created Group '{group_.name}' ({group_.id})")
|
|
1105
1107
|
clear_group_caches()
|
|
1106
1108
|
return group_
|
|
1107
1109
|
|
|
@@ -1115,7 +1117,7 @@ def create_group(resource: Dict):
|
|
|
1115
1117
|
group_: Group = CLIENT.account_client.update_group(
|
|
1116
1118
|
group_id_, UpdateGroupRequest(name=name, description=description)
|
|
1117
1119
|
)
|
|
1118
|
-
|
|
1120
|
+
print_info(f"Updated Group '{group_.name}' ({group_.id})")
|
|
1119
1121
|
return group_
|
|
1120
1122
|
|
|
1121
1123
|
# Main logic
|
|
@@ -1156,17 +1158,17 @@ def create_application(resource: Dict):
|
|
|
1156
1158
|
Helper function to add/remove groups from an application.
|
|
1157
1159
|
"""
|
|
1158
1160
|
current_group_ids = {
|
|
1159
|
-
group.id for group in
|
|
1161
|
+
group.id for group in get_application_group_summaries(CLIENT, app.id)
|
|
1160
1162
|
}
|
|
1161
1163
|
|
|
1162
1164
|
if current_group_ids == new_group_ids:
|
|
1163
|
-
|
|
1165
|
+
print_info("No Group additions or deletions required")
|
|
1164
1166
|
return
|
|
1165
1167
|
|
|
1166
1168
|
group_ids_to_remove = current_group_ids - new_group_ids
|
|
1167
1169
|
for group_id in group_ids_to_remove:
|
|
1168
1170
|
CLIENT.account_client.remove_application_from_group(group_id, app.id)
|
|
1169
|
-
|
|
1171
|
+
print_info(
|
|
1170
1172
|
f"Removed Group '{get_group_name_by_id(CLIENT, group_id)}' "
|
|
1171
1173
|
f"from Application ({group_id})"
|
|
1172
1174
|
)
|
|
@@ -1174,7 +1176,7 @@ def create_application(resource: Dict):
|
|
|
1174
1176
|
group_ids_to_add = new_group_ids - current_group_ids
|
|
1175
1177
|
for group_id in group_ids_to_add:
|
|
1176
1178
|
CLIENT.account_client.add_application_to_group(group_id, app.id)
|
|
1177
|
-
|
|
1179
|
+
print_info(
|
|
1178
1180
|
f"Added Group '{get_group_name_by_id(CLIENT, group_id)}' "
|
|
1179
1181
|
f"to Application ({group_id})"
|
|
1180
1182
|
)
|
|
@@ -1183,8 +1185,8 @@ def create_application(resource: Dict):
|
|
|
1183
1185
|
"""
|
|
1184
1186
|
Helper function to display the app key and secret.
|
|
1185
1187
|
"""
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
+
print_info(f"Application Key ID = '{api_key.id}'", override_quiet=True)
|
|
1189
|
+
print_info(f"Application Key Secret = '{api_key.secret}'", override_quiet=True)
|
|
1188
1190
|
|
|
1189
1191
|
def add_application():
|
|
1190
1192
|
"""
|
|
@@ -1194,7 +1196,7 @@ def create_application(resource: Dict):
|
|
|
1194
1196
|
_get_model_object(RN_ADD_APPLICATION_REQUEST, resource)
|
|
1195
1197
|
)
|
|
1196
1198
|
app = app_response.application
|
|
1197
|
-
|
|
1199
|
+
print_info(f"Created Application '{app.name}' ({app.id})")
|
|
1198
1200
|
show_key_and_secret(app_response.apiKey)
|
|
1199
1201
|
clear_application_caches()
|
|
1200
1202
|
update_groups(app)
|
|
@@ -1210,11 +1212,11 @@ def create_application(resource: Dict):
|
|
|
1210
1212
|
app: Application = CLIENT.account_client.update_application(
|
|
1211
1213
|
app_id, _get_model_object(RN_UPDATE_APPLICATION_REQUEST, resource)
|
|
1212
1214
|
)
|
|
1213
|
-
|
|
1215
|
+
print_info(f"Updated Application '{app.name}' ({app.id})")
|
|
1214
1216
|
update_groups(app)
|
|
1215
1217
|
|
|
1216
1218
|
if ARGS_PARSER.regenerate_app_keys:
|
|
1217
|
-
|
|
1219
|
+
print_info("Regenerating Application key and secret")
|
|
1218
1220
|
api_key = CLIENT.account_client.regenerate_application_api_key(app_id)
|
|
1219
1221
|
if api_key is None:
|
|
1220
1222
|
print_error("New API key/secret not returned")
|
|
@@ -1268,7 +1270,7 @@ def update_user(resource: Dict, internal_user: bool):
|
|
|
1268
1270
|
current_group_ids = {group.id for group in get_user_groups(CLIENT, user.id)}
|
|
1269
1271
|
|
|
1270
1272
|
if current_group_ids == new_group_ids:
|
|
1271
|
-
|
|
1273
|
+
print_info("No Group additions or deletions required")
|
|
1272
1274
|
return
|
|
1273
1275
|
|
|
1274
1276
|
if not confirmed(f"Update Groups for User '{username}' ({user.id})?"):
|
|
@@ -1277,7 +1279,7 @@ def update_user(resource: Dict, internal_user: bool):
|
|
|
1277
1279
|
group_ids_to_remove = current_group_ids - new_group_ids
|
|
1278
1280
|
for group_id in group_ids_to_remove:
|
|
1279
1281
|
CLIENT.account_client.remove_user_from_group(group_id, user.id)
|
|
1280
|
-
|
|
1282
|
+
print_info(
|
|
1281
1283
|
f"Removed Group '{get_group_name_by_id(CLIENT, group_id)}' "
|
|
1282
1284
|
f"({group_id})"
|
|
1283
1285
|
)
|
|
@@ -1285,7 +1287,7 @@ def update_user(resource: Dict, internal_user: bool):
|
|
|
1285
1287
|
group_ids_to_add = new_group_ids - current_group_ids
|
|
1286
1288
|
for group_id in group_ids_to_add:
|
|
1287
1289
|
CLIENT.account_client.add_user_to_group(group_id, user.id)
|
|
1288
|
-
|
|
1290
|
+
print_info(
|
|
1289
1291
|
f"Added Group '{get_group_name_by_id(CLIENT, group_id)}' "
|
|
1290
1292
|
f"({group_id})"
|
|
1291
1293
|
)
|
|
@@ -1311,7 +1313,7 @@ def update_user(resource: Dict, internal_user: bool):
|
|
|
1311
1313
|
|
|
1312
1314
|
username = user.username if isinstance(user, InternalUser) else user.name
|
|
1313
1315
|
update_groups()
|
|
1314
|
-
|
|
1316
|
+
print_info(f"Actions complete for User '{username}' ({user.id})")
|
|
1315
1317
|
|
|
1316
1318
|
|
|
1317
1319
|
def create_namespace(resource: Dict):
|
|
@@ -1334,7 +1336,7 @@ def create_namespace(resource: Dict):
|
|
|
1334
1336
|
else:
|
|
1335
1337
|
raise Exception(f"Failed to create namespace '{name}' ({e})")
|
|
1336
1338
|
|
|
1337
|
-
|
|
1339
|
+
print_info(f"Created namespace '{name}' ({namespace_id})")
|
|
1338
1340
|
|
|
1339
1341
|
if ARGS_PARSER.quiet:
|
|
1340
1342
|
print(namespace_id)
|
yellowdog_cli/delete.py
CHANGED
|
@@ -10,7 +10,7 @@ from yellowdog_cli.utils.entity_utils import (
|
|
|
10
10
|
)
|
|
11
11
|
from yellowdog_cli.utils.interactive import confirmed, select
|
|
12
12
|
from yellowdog_cli.utils.misc_utils import unpack_namespace_in_prefix
|
|
13
|
-
from yellowdog_cli.utils.printing import
|
|
13
|
+
from yellowdog_cli.utils.printing import print_info
|
|
14
14
|
from yellowdog_cli.utils.wrapper import ARGS_PARSER, CLIENT, CONFIG_COMMON, main_wrapper
|
|
15
15
|
|
|
16
16
|
|
|
@@ -19,14 +19,14 @@ def main():
|
|
|
19
19
|
|
|
20
20
|
# Non-exact matching of namespace property
|
|
21
21
|
if ARGS_PARSER.non_exact_namespace_match:
|
|
22
|
-
|
|
22
|
+
print_info("Using non-exact namespace matching")
|
|
23
23
|
matching_namespaces = get_non_exact_namespace_matches(
|
|
24
24
|
CLIENT, CONFIG_COMMON.namespace
|
|
25
25
|
)
|
|
26
26
|
if len(matching_namespaces) == 0:
|
|
27
|
-
|
|
27
|
+
print_info("No matching namespaces")
|
|
28
28
|
return
|
|
29
|
-
|
|
29
|
+
print_info(f"{len(matching_namespaces)} namespace(s) to consider")
|
|
30
30
|
for namespace in matching_namespaces:
|
|
31
31
|
_, tag = unpack_namespace_in_prefix(namespace, CONFIG_COMMON.name_tag)
|
|
32
32
|
delete_object_paths(namespace, tag, ARGS_PARSER.all)
|
|
@@ -56,7 +56,7 @@ def delete_object_paths(namespace: str, prefix: str, flat: bool):
|
|
|
56
56
|
Delete Object Paths matching the namespace and prefix. Set 'flat' to
|
|
57
57
|
enumerate Object Paths at all levels.
|
|
58
58
|
"""
|
|
59
|
-
|
|
59
|
+
print_info(
|
|
60
60
|
f"Deleting Object Paths in namespace '{namespace}' and "
|
|
61
61
|
f"prefix starting with '{prefix}'"
|
|
62
62
|
)
|
|
@@ -64,7 +64,7 @@ def delete_object_paths(namespace: str, prefix: str, flat: bool):
|
|
|
64
64
|
object_paths_to_delete = list_matching_object_paths(CLIENT, namespace, prefix, flat)
|
|
65
65
|
|
|
66
66
|
if len(object_paths_to_delete) == 0:
|
|
67
|
-
|
|
67
|
+
print_info("No matching Object Paths")
|
|
68
68
|
return
|
|
69
69
|
|
|
70
70
|
object_paths_to_delete = select(CLIENT, object_paths_to_delete)
|
|
@@ -72,15 +72,15 @@ def delete_object_paths(namespace: str, prefix: str, flat: bool):
|
|
|
72
72
|
if len(object_paths_to_delete) > 0 and confirmed(
|
|
73
73
|
f"Delete {len(object_paths_to_delete)} Object Path(s)?"
|
|
74
74
|
):
|
|
75
|
-
|
|
75
|
+
print_info(f"{len(object_paths_to_delete)} Object Path(s) to Delete")
|
|
76
76
|
CLIENT.object_store_client.delete_objects(
|
|
77
77
|
namespace=namespace, object_paths=object_paths_to_delete
|
|
78
78
|
)
|
|
79
79
|
for object_path in object_paths_to_delete:
|
|
80
|
-
|
|
81
|
-
|
|
80
|
+
print_info(f"Deleted Object Path: '{object_path.displayName}'")
|
|
81
|
+
print_info(f"Deleted {len(object_paths_to_delete)} Object Path(s)")
|
|
82
82
|
else:
|
|
83
|
-
|
|
83
|
+
print_info("Nothing to delete")
|
|
84
84
|
|
|
85
85
|
|
|
86
86
|
# Entry point
|
yellowdog_cli/download.py
CHANGED
|
@@ -21,7 +21,7 @@ from yellowdog_cli.utils.entity_utils import (
|
|
|
21
21
|
)
|
|
22
22
|
from yellowdog_cli.utils.interactive import confirmed, select
|
|
23
23
|
from yellowdog_cli.utils.misc_utils import unpack_namespace_in_prefix
|
|
24
|
-
from yellowdog_cli.utils.printing import print_batch_download_files,
|
|
24
|
+
from yellowdog_cli.utils.printing import print_batch_download_files, print_info
|
|
25
25
|
from yellowdog_cli.utils.wrapper import ARGS_PARSER, CLIENT, CONFIG_COMMON, main_wrapper
|
|
26
26
|
|
|
27
27
|
|
|
@@ -30,14 +30,14 @@ def main():
|
|
|
30
30
|
|
|
31
31
|
# Non-exact matching of namespace property
|
|
32
32
|
if ARGS_PARSER.non_exact_namespace_match:
|
|
33
|
-
|
|
33
|
+
print_info("Using non-exact namespace matching")
|
|
34
34
|
matching_namespaces = get_non_exact_namespace_matches(
|
|
35
35
|
CLIENT, CONFIG_COMMON.namespace
|
|
36
36
|
)
|
|
37
37
|
if len(matching_namespaces) == 0:
|
|
38
|
-
|
|
38
|
+
print_info("No matching namespaces")
|
|
39
39
|
return
|
|
40
|
-
|
|
40
|
+
print_info(f"{len(matching_namespaces)} namespace(s) to consider")
|
|
41
41
|
for namespace in matching_namespaces:
|
|
42
42
|
_, tag = unpack_namespace_in_prefix(namespace, CONFIG_COMMON.name_tag)
|
|
43
43
|
download_object_paths(
|
|
@@ -73,7 +73,7 @@ def download_object_paths(
|
|
|
73
73
|
"""
|
|
74
74
|
Download Object Paths matching namespace, prefix and pattern.
|
|
75
75
|
"""
|
|
76
|
-
|
|
76
|
+
print_info(
|
|
77
77
|
f"Downloading Objects in namespace '{namespace}' and "
|
|
78
78
|
f"prefix starting with '{prefix}'"
|
|
79
79
|
+ ("" if pattern is None else f", matching name pattern '{pattern}'")
|
|
@@ -84,22 +84,22 @@ def download_object_paths(
|
|
|
84
84
|
)
|
|
85
85
|
|
|
86
86
|
if len(object_paths_to_download) == 0:
|
|
87
|
-
|
|
87
|
+
print_info("No matching Object Paths")
|
|
88
88
|
return
|
|
89
89
|
|
|
90
90
|
object_paths_to_download = select(CLIENT, object_paths_to_download)
|
|
91
91
|
|
|
92
92
|
if len(object_paths_to_download) == 0:
|
|
93
|
-
|
|
93
|
+
print_info("No Objects Paths to include")
|
|
94
94
|
return
|
|
95
95
|
|
|
96
|
-
|
|
96
|
+
print_info("Note: existing local objects will be overwritten without warning")
|
|
97
97
|
if not confirmed(
|
|
98
98
|
f"Download matching objects in {len(object_paths_to_download)} Object Path(s)?"
|
|
99
99
|
):
|
|
100
100
|
return
|
|
101
101
|
|
|
102
|
-
|
|
102
|
+
print_info(f"{len(object_paths_to_download)} Object Path(s) to include")
|
|
103
103
|
|
|
104
104
|
download_dir: str = _create_download_directory(
|
|
105
105
|
"." if ARGS_PARSER.directory == "" else ARGS_PARSER.directory
|
|
@@ -118,7 +118,7 @@ def download_object_paths(
|
|
|
118
118
|
if pattern is None
|
|
119
119
|
else f"{object_path.name}{pattern.lstrip('/')}"
|
|
120
120
|
)
|
|
121
|
-
|
|
121
|
+
print_info(f"Finding object paths matching '{object_name_pattern}'")
|
|
122
122
|
download_batch_builder.find_source_objects(
|
|
123
123
|
namespace=namespace,
|
|
124
124
|
object_name_pattern=object_name_pattern,
|
|
@@ -129,14 +129,14 @@ def download_object_paths(
|
|
|
129
129
|
)
|
|
130
130
|
|
|
131
131
|
if download_batch is None:
|
|
132
|
-
|
|
132
|
+
print_info(f"No matching Objects found in included Object Paths")
|
|
133
133
|
return
|
|
134
134
|
|
|
135
135
|
object_count = print_batch_download_files(
|
|
136
136
|
download_batch_builder, ARGS_PARSER.flatten_download_paths
|
|
137
137
|
)
|
|
138
138
|
|
|
139
|
-
|
|
139
|
+
print_info("Starting batch download")
|
|
140
140
|
download_batch.start()
|
|
141
141
|
future: futures.Future = download_batch.when_status_matches(
|
|
142
142
|
lambda status: status == FileTransferStatus.Completed
|
|
@@ -144,7 +144,7 @@ def download_object_paths(
|
|
|
144
144
|
CLIENT.object_store_client.start_transfers()
|
|
145
145
|
futures.wait((future,))
|
|
146
146
|
|
|
147
|
-
|
|
147
|
+
print_info(f"Downloaded {object_count} Object(s)")
|
|
148
148
|
|
|
149
149
|
|
|
150
150
|
def _create_download_directory(directory_name: str) -> str:
|
|
@@ -154,9 +154,9 @@ def _create_download_directory(directory_name: str) -> str:
|
|
|
154
154
|
"""
|
|
155
155
|
path = Path(directory_name).resolve()
|
|
156
156
|
if path.exists():
|
|
157
|
-
|
|
157
|
+
print_info(f"Downloading to existing directory: '{path}'")
|
|
158
158
|
else:
|
|
159
|
-
|
|
159
|
+
print_info(f"Creating download directory: '{path}'")
|
|
160
160
|
path.mkdir(parents=True, exist_ok=True)
|
|
161
161
|
return str(path)
|
|
162
162
|
|
yellowdog_cli/finish.py
CHANGED
|
@@ -19,7 +19,7 @@ from yellowdog_cli.utils.entity_utils import (
|
|
|
19
19
|
from yellowdog_cli.utils.follow_utils import follow_ids
|
|
20
20
|
from yellowdog_cli.utils.interactive import confirmed, select
|
|
21
21
|
from yellowdog_cli.utils.misc_utils import link_entity
|
|
22
|
-
from yellowdog_cli.utils.printing import print_error,
|
|
22
|
+
from yellowdog_cli.utils.printing import print_error, print_info, print_warning
|
|
23
23
|
from yellowdog_cli.utils.wrapper import ARGS_PARSER, CLIENT, CONFIG_COMMON, main_wrapper
|
|
24
24
|
|
|
25
25
|
|
|
@@ -29,7 +29,7 @@ def main():
|
|
|
29
29
|
_finish_work_requirements_by_name_or_id(ARGS_PARSER.work_requirement_names)
|
|
30
30
|
return
|
|
31
31
|
|
|
32
|
-
|
|
32
|
+
print_info(
|
|
33
33
|
"Finishing Work Requirements in namespace "
|
|
34
34
|
f"'{CONFIG_COMMON.namespace}' with tags "
|
|
35
35
|
f"including '{CONFIG_COMMON.name_tag}'"
|
|
@@ -69,7 +69,7 @@ def main():
|
|
|
69
69
|
CLIENT.work_client.get_work_requirement_by_id(work_summary.id)
|
|
70
70
|
)
|
|
71
71
|
finished_count += 1
|
|
72
|
-
|
|
72
|
+
print_info(
|
|
73
73
|
f"Finished {link_entity(CONFIG_COMMON.url, work_requirement)} "
|
|
74
74
|
f"('{work_summary.name}')"
|
|
75
75
|
)
|
|
@@ -80,22 +80,22 @@ def main():
|
|
|
80
80
|
)
|
|
81
81
|
|
|
82
82
|
elif work_summary.status == WorkRequirementStatus.FINISHING:
|
|
83
|
-
|
|
83
|
+
print_info(
|
|
84
84
|
f"Work Requirement '{work_summary.name}' is already finishing"
|
|
85
85
|
)
|
|
86
86
|
finishing_count += 1
|
|
87
87
|
work_requirement_ids.append(work_summary.id)
|
|
88
88
|
|
|
89
89
|
if finished_count > 1:
|
|
90
|
-
|
|
90
|
+
print_info(f"Finished {finished_count} Work Requirement(s)")
|
|
91
91
|
elif finished_count == 0 and finishing_count == 0:
|
|
92
|
-
|
|
92
|
+
print_info("No Work Requirements to finish")
|
|
93
93
|
|
|
94
94
|
if ARGS_PARSER.follow:
|
|
95
95
|
follow_ids(work_requirement_ids)
|
|
96
96
|
|
|
97
97
|
else:
|
|
98
|
-
|
|
98
|
+
print_info("No Work Requirements to finish")
|
|
99
99
|
|
|
100
100
|
|
|
101
101
|
def _finish_work_requirements_by_name_or_id(names_or_ids: List[str]):
|
|
@@ -129,7 +129,7 @@ def _finish_work_requirements_by_name_or_id(names_or_ids: List[str]):
|
|
|
129
129
|
|
|
130
130
|
work_requirement_summaries.append(work_requirement_summary)
|
|
131
131
|
if work_requirement_summary.status == WorkRequirementStatus.FINISHING:
|
|
132
|
-
|
|
132
|
+
print_info(f"Work Requirement '{name_or_id}' is already finishing")
|
|
133
133
|
else:
|
|
134
134
|
if not confirmed(f"Finish Work Requirement '{name_or_id}'?"):
|
|
135
135
|
continue
|
|
@@ -137,7 +137,7 @@ def _finish_work_requirements_by_name_or_id(names_or_ids: List[str]):
|
|
|
137
137
|
CLIENT.work_client.finish_work_requirement_by_id(
|
|
138
138
|
work_requirement_summary.id
|
|
139
139
|
)
|
|
140
|
-
|
|
140
|
+
print_info(f"Finished Work Requirement '{name_or_id}'")
|
|
141
141
|
except Exception as e:
|
|
142
142
|
print_error(f"Failed to finish Work Requirement '{name_or_id}': {e}")
|
|
143
143
|
|