cloudpub 1.6.0__tar.gz → 1.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {cloudpub-1.6.0/cloudpub.egg-info → cloudpub-1.7.0}/PKG-INFO +17 -2
  2. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/error.py +4 -0
  3. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/models/ms_azure.py +11 -1
  4. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/ms_azure/service.py +105 -38
  5. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/ms_azure/utils.py +31 -102
  6. {cloudpub-1.6.0 → cloudpub-1.7.0/cloudpub.egg-info}/PKG-INFO +17 -2
  7. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub.egg-info/SOURCES.txt +4 -1
  8. cloudpub-1.7.0/requirements.txt +187 -0
  9. {cloudpub-1.6.0 → cloudpub-1.7.0}/setup.py +1 -1
  10. cloudpub-1.7.0/tests/test_common.py +38 -0
  11. cloudpub-1.7.0/tests/test_models.py +34 -0
  12. cloudpub-1.7.0/tests/test_utils.py +19 -0
  13. cloudpub-1.6.0/requirements.txt +0 -153
  14. {cloudpub-1.6.0 → cloudpub-1.7.0}/LICENSE +0 -0
  15. {cloudpub-1.6.0 → cloudpub-1.7.0}/MANIFEST.in +0 -0
  16. {cloudpub-1.6.0 → cloudpub-1.7.0}/README.md +0 -0
  17. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/__init__.py +0 -0
  18. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/aws/__init__.py +0 -0
  19. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/aws/service.py +0 -0
  20. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/aws/utils.py +0 -0
  21. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/common.py +0 -0
  22. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/models/__init__.py +0 -0
  23. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/models/aws.py +0 -0
  24. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/models/common.py +0 -0
  25. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/ms_azure/__init__.py +0 -0
  26. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/ms_azure/session.py +0 -0
  27. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub/utils.py +0 -0
  28. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub.egg-info/dependency_links.txt +0 -0
  29. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub.egg-info/not-zip-safe +0 -0
  30. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub.egg-info/requires.txt +0 -0
  31. {cloudpub-1.6.0 → cloudpub-1.7.0}/cloudpub.egg-info/top_level.txt +0 -0
  32. {cloudpub-1.6.0 → cloudpub-1.7.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: cloudpub
3
- Version: 1.6.0
3
+ Version: 1.7.0
4
4
  Summary: Services for publishing products in cloud environments
5
5
  Home-page: https://github.com/release-engineering/cloudpub
6
6
  Author: Jonathan Gangi
@@ -13,3 +13,18 @@ Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Classifier: Programming Language :: Python :: 3.13
15
15
  License-File: LICENSE
16
+ Requires-Dist: attrs
17
+ Requires-Dist: deepdiff
18
+ Requires-Dist: requests
19
+ Requires-Dist: tenacity
20
+ Requires-Dist: packaging
21
+ Requires-Dist: boto3>=1.34.117
22
+ Dynamic: author
23
+ Dynamic: author-email
24
+ Dynamic: classifier
25
+ Dynamic: home-page
26
+ Dynamic: keywords
27
+ Dynamic: license
28
+ Dynamic: license-file
29
+ Dynamic: requires-dist
30
+ Dynamic: summary
@@ -21,5 +21,9 @@ class NotFoundError(ValueError):
21
21
  """Represent a missing resource."""
22
22
 
23
23
 
24
+ class ConflictError(RuntimeError):
25
+ """Report a submission conflict error."""
26
+
27
+
24
28
  class Timeout(Exception):
25
29
  """Represent a missing resource."""
@@ -711,11 +711,21 @@ class ProductProperty(AzureProductLinkedResource):
711
711
  `Schema definition for ProductProperty <https://schema.mp.microsoft.com/schema/property/2022-03-01-preview2>`_
712
712
  """ # noqa E501
713
713
 
714
+ schema: str = field(
715
+ validator=instance_of(str),
716
+ metadata={
717
+ "alias": MS_SCHEMA,
718
+ "const": "https://schema.mp.microsoft.com/schema/property/2022-03-01-preview2",
719
+ },
720
+ )
721
+ """
722
+ The `resource schema`_ for Graph API."""
723
+
714
724
  kind: str
715
725
  """Expected to be ``azureVM``"""
716
726
 
717
727
  terms_of_use: Optional[str] = field(
718
- validator=optional(instance_of(str)), metadata={"alias": "termsOfUse"}
728
+ validator=optional(instance_of(str)), metadata={"alias": "termsOfUseUrl"}
719
729
  )
720
730
  """The product terms of use."""
721
731
 
@@ -7,13 +7,13 @@ from typing import Any, Dict, Iterator, List, Optional, Tuple, Union, cast
7
7
 
8
8
  from deepdiff import DeepDiff
9
9
  from requests import HTTPError
10
- from tenacity import retry
11
- from tenacity.retry import retry_if_result
10
+ from tenacity import RetryError, Retrying, retry
11
+ from tenacity.retry import retry_if_exception_type, retry_if_result
12
12
  from tenacity.stop import stop_after_attempt, stop_after_delay
13
- from tenacity.wait import wait_chain, wait_fixed
13
+ from tenacity.wait import wait_fixed
14
14
 
15
15
  from cloudpub.common import BaseService
16
- from cloudpub.error import InvalidStateError, NotFoundError
16
+ from cloudpub.error import ConflictError, InvalidStateError, NotFoundError, Timeout
17
17
  from cloudpub.models.ms_azure import (
18
18
  RESOURCE_MAPING,
19
19
  AzureResource,
@@ -92,18 +92,31 @@ class AzureService(BaseService[AzurePublishingMetadata]):
92
92
  CONFIGURE_SCHEMA = "https://schema.mp.microsoft.com/schema/configure/{AZURE_API_VERSION}"
93
93
  DIFF_EXCLUDES = [r"root\['resources'\]\[[0-9]+\]\['url'\]"]
94
94
 
95
- def __init__(self, credentials: Dict[str, str]):
95
+ def __init__(
96
+ self,
97
+ credentials: Dict[str, str],
98
+ retry_interval: Union[int, float] = 300,
99
+ retry_timeout: Union[int, float] = 3600 * 24 * 7,
100
+ ):
96
101
  """
97
102
  Create a new AuzureService object.
98
103
 
99
104
  Args:
100
105
  credentials (dict)
101
106
  Dictionary with Azure credentials to authenticate on Product Ingestion API.
107
+ retry_interval (int, float)
108
+ The wait time interval in seconds for retrying jobs.
109
+ Defaults to 300
110
+ retry_timeout (int, float)
111
+ The max time in seconds to attempt retries.
112
+ Defaults to 7 days.
102
113
  """
103
114
  self.session = PartnerPortalSession.make_graph_api_session(
104
115
  auth_keys=credentials, schema_version=self.AZURE_SCHEMA_VERSION
105
116
  )
106
117
  self._products: List[ProductSummary] = []
118
+ self.retry_interval = retry_interval
119
+ self.retry_timeout = retry_timeout
107
120
 
108
121
  def _configure(self, data: Dict[str, Any]) -> ConfigureStatus:
109
122
  """
@@ -162,15 +175,27 @@ class AzureService(BaseService[AzurePublishingMetadata]):
162
175
  log.debug("Query Job details response: %s", parsed_resp)
163
176
  return parsed_resp
164
177
 
165
- @retry(
166
- retry=retry_if_result(predicate=is_azure_job_not_complete),
167
- wait=wait_chain(
168
- *[wait_fixed(wait=60)] # First wait for 1 minute # noqa: W503
169
- + [wait_fixed(wait=10 * 60)] # Then wait for 10 minutes # noqa: W503
170
- + [wait_fixed(wait=30 * 60)] # Finally wait each 30 minutes # noqa: W503
171
- ),
172
- stop=stop_after_delay(max_delay=60 * 60 * 24 * 7), # Give up after retrying for 7 days
173
- )
178
+ def query_job_status(self, job_id: str) -> ConfigureStatus:
179
+ """Query the job status for a given Job ID.
180
+
181
+ It will raise error if any invalid state is detected.
182
+
183
+ Args:
184
+ job_id (str): The job ID to query details from.
185
+
186
+ Returns:
187
+ ConfigureStatus: The ConfigureStatus from JobID
188
+ Raises:
189
+ InvalidStateError: If the job has failed.
190
+ """
191
+ job_details = self._query_job_details(job_id=job_id)
192
+ if job_details.job_result == "failed":
193
+ error_message = f"Job {job_id} failed: \n{job_details.errors}"
194
+ self._raise_error(InvalidStateError, error_message)
195
+ elif job_details.job_result == "succeeded":
196
+ log.debug("Job %s succeeded", job_id)
197
+ return job_details
198
+
174
199
  def _wait_for_job_completion(self, job_id: str) -> ConfigureStatus:
175
200
  """
176
201
  Wait until the specified job ID is complete.
@@ -189,13 +214,15 @@ class AzureService(BaseService[AzurePublishingMetadata]):
189
214
  Raises:
190
215
  InvalidStateError if the job failed
191
216
  """
192
- job_details = self._query_job_details(job_id=job_id)
193
- if job_details.job_result == "failed":
194
- error_message = f"Job {job_id} failed: \n{job_details.errors}"
195
- self._raise_error(InvalidStateError, error_message)
196
- elif job_details.job_result == "succeeded":
197
- log.debug("Job %s succeeded", job_id)
198
- return job_details
217
+ r = Retrying(
218
+ retry=retry_if_result(predicate=is_azure_job_not_complete),
219
+ wait=wait_fixed(self.retry_interval),
220
+ stop=stop_after_delay(max_delay=self.retry_timeout),
221
+ )
222
+ try:
223
+ return r(self.query_job_status, job_id)
224
+ except RetryError:
225
+ self._raise_error(Timeout, f"Time out waiting for job {job_id}")
199
226
 
200
227
  def configure(self, resources: List[AzureResource]) -> ConfigureStatus:
201
228
  """
@@ -437,6 +464,21 @@ class AzureService(BaseService[AzurePublishingMetadata]):
437
464
  remote = self.get_product(product.id, target=target)
438
465
  return DeepDiff(remote.to_json(), product.to_json(), exclude_regex_paths=self.DIFF_EXCLUDES)
439
466
 
467
+ def diff_two_offers(self, last_offer: Product, prev_offer: Product) -> DeepDiff:
468
+ """Compute the difference between two provided products.
469
+
470
+ Args:
471
+ last_offer (Product)
472
+ The lastest offer state to diff
473
+ prev_offer (Product)
474
+ The previous offer state to diff
475
+ Returns:
476
+ DeepDiff: The diff data.
477
+ """
478
+ return DeepDiff(
479
+ prev_offer.to_json(), last_offer.to_json(), exclude_regex_paths=self.DIFF_EXCLUDES
480
+ )
481
+
440
482
  def submit_to_status(
441
483
  self, product_id: str, status: str, resources: Optional[List[AzureResource]] = None
442
484
  ) -> ConfigureStatus:
@@ -477,31 +519,48 @@ class AzureService(BaseService[AzurePublishingMetadata]):
477
519
  log.debug("Set the status \"%s\" to submission.", status)
478
520
  return self.configure(resources=cfg_res)
479
521
 
480
- @retry(
481
- wait=wait_fixed(300),
482
- stop=stop_after_delay(max_delay=60 * 60 * 24 * 7), # Give up after retrying for 7 days,
483
- reraise=True,
484
- )
485
522
  def ensure_can_publish(self, product_id: str) -> None:
486
523
  """
487
524
  Ensure the offer is not already being published.
488
525
 
489
- It will wait for up to 7 days retrying to make sure it's possible to publish before
490
- giving up and raising.
526
+ It will raise ConflictError if a publish is already in progress in any submission target.
491
527
 
492
528
  Args:
493
529
  product_id (str)
494
530
  The product ID to check the offer's publishing status
495
531
  Raises:
496
- RuntimeError: whenever a publishing is already in progress.
532
+ ConflictError: whenever a publishing is already in progress for any submission target.
497
533
  """
498
534
  log.info("Ensuring no other publishing jobs are in progress for \"%s\"", product_id)
499
- submission_targets = ["preview", "live"]
500
535
 
501
- for target in submission_targets:
502
- sub = self.get_submission_state(product_id, state=target)
503
- if sub and sub.status and sub.status == "running":
504
- raise RuntimeError(f"The offer {product_id} is already being published to {target}")
536
+ for sub in self.get_submissions(product_id):
537
+ if sub and sub.status and sub.status != "completed":
538
+ msg = (
539
+ f"The offer {product_id} is already being published to "
540
+ f"{sub.target.targetType}: {sub.status}/{sub.result}"
541
+ )
542
+ log.error(msg)
543
+ raise ConflictError(msg)
544
+
545
+ def wait_active_publishing(self, product_id: str) -> None:
546
+ """
547
+ Wait when there's an existing submission in progress.
548
+
549
+ Args:
550
+ product_id (str)
551
+ The product ID of to verify the submissions state.
552
+ """
553
+ r = Retrying(
554
+ retry=retry_if_exception_type(ConflictError),
555
+ wait=wait_fixed(self.retry_interval),
556
+ stop=stop_after_delay(max_delay=self.retry_timeout),
557
+ )
558
+ log.info("Checking for active changes on %s.", product_id)
559
+
560
+ try:
561
+ r(self.ensure_can_publish, product_id)
562
+ except RetryError:
563
+ self._raise_error(Timeout, f"Timed out waiting for {product_id} to be unlocked")
505
564
 
506
565
  def get_plan_tech_config(self, product: Product, plan: PlanSummary) -> VMIPlanTechConfig:
507
566
  """
@@ -548,14 +607,13 @@ class AzureService(BaseService[AzurePublishingMetadata]):
548
607
  # The following resources shouldn't be required:
549
608
  # -> customer-leads
550
609
  # -> test-drive
551
- # -> property
552
610
  # -> *listing*
553
611
  # -> reseller
554
612
  # -> price-and-availability-*
555
613
  # NOTE: The "submission" resource will be already added by the "submit_to_status" method
556
614
  #
557
- # With that it needs only the related "product" and "plan" resources alongisde the
558
- # updated tech_config
615
+ # With that it needs only the related "product", "property" and "plan" resources alongisde
616
+ # the updated tech_config
559
617
  product_id = tech_config.product_id
560
618
  plan_id = tech_config.plan_id
561
619
  prod_res = cast(
@@ -566,6 +624,14 @@ class AzureService(BaseService[AzurePublishingMetadata]):
566
624
  if prd.id == product_id
567
625
  ],
568
626
  )[0]
627
+ property = cast(
628
+ List[ProductProperty],
629
+ [
630
+ prop
631
+ for prop in self.filter_product_resources(product=product, resource="property")
632
+ if prop.product_id == product_id # type: ignore [union-attr]
633
+ ],
634
+ )[0]
569
635
  plan_res = cast(
570
636
  List[PlanSummary],
571
637
  [
@@ -574,7 +640,7 @@ class AzureService(BaseService[AzurePublishingMetadata]):
574
640
  if pln.id == plan_id
575
641
  ],
576
642
  )[0]
577
- return [prod_res, plan_res, tech_config]
643
+ return [prod_res, property, plan_res, tech_config]
578
644
 
579
645
  def compute_targets(self, product_id: str) -> List[str]:
580
646
  """List all the possible publishing targets order to seek data from Azure.
@@ -815,6 +881,7 @@ class AzureService(BaseService[AzurePublishingMetadata]):
815
881
  plan_name = metadata.destination.split("/")[-1]
816
882
  product_id = self.get_productid(product_name)
817
883
  sas_in_target = SasFoundStatus.missing
884
+ self.wait_active_publishing(product_id=product_id)
818
885
  log.info(
819
886
  "Preparing to associate the image \"%s\" with the plan \"%s\" from product \"%s\"",
820
887
  metadata.image_path,
@@ -1,7 +1,7 @@
1
1
  # SPDX-License-Identifier: GPL-3.0-or-later
2
2
  import logging
3
3
  from operator import attrgetter
4
- from typing import Any, Dict, List, Optional, Tuple, TypedDict
4
+ from typing import Any, Dict, List, Optional, TypedDict
5
5
 
6
6
  from deepdiff import DeepDiff
7
7
 
@@ -241,56 +241,6 @@ def is_legacy_gen_supported(metadata: AzurePublishingMetadata) -> bool:
241
241
  return metadata.architecture == "x64" and metadata.support_legacy
242
242
 
243
243
 
244
- def prepare_vm_images(
245
- metadata: AzurePublishingMetadata,
246
- gen1: Optional[VMImageDefinition],
247
- gen2: Optional[VMImageDefinition],
248
- source: VMImageSource,
249
- ) -> List[VMImageDefinition]:
250
- """
251
- Update the vm_images list with the proper SAS based in existing generation(s).
252
-
253
- Args:
254
- metadata (AzurePublishingMetadata)
255
- The VHD publishing metadata.
256
- gen1 (VMImageDefinition, optional)
257
- The VMImageDefinition for Gen1 VHD.
258
- If not set the argument `gen2` must be set.
259
- gen2 (VMImageDefinition, optional)
260
- The VMImageDefinition for Gen2 VHD.
261
- If not set the argument `gen1` must be set.
262
- source (VMImageSource):
263
- The VMImageSource with the updated SAS URI.
264
- Returns:
265
- list: A new list containing the expected VMImageDefinition(s)
266
- """
267
- if not gen1 and not gen2:
268
- msg = "At least one argument of \"gen1\" or \"gen2\" must be set."
269
- log.error(msg)
270
- raise ValueError(msg)
271
-
272
- raw_source = source.to_json()
273
- json_gen1 = {
274
- "imageType": get_image_type_mapping(metadata.architecture, "V1"),
275
- "source": raw_source,
276
- }
277
- json_gen2 = {
278
- "imageType": get_image_type_mapping(metadata.architecture, "V2"),
279
- "source": raw_source,
280
- }
281
-
282
- if metadata.generation == "V2":
283
- # In this case we need to set a V2 SAS URI
284
- gen2_new = VMImageDefinition.from_json(json_gen2)
285
- if is_legacy_gen_supported(metadata): # and in this case a V1 as well
286
- gen1_new = VMImageDefinition.from_json(json_gen1)
287
- return [gen2_new, gen1_new]
288
- return [gen2_new]
289
- else:
290
- # It's expected to be a Gen1 only, let's get rid of Gen2
291
- return [VMImageDefinition.from_json(json_gen1)]
292
-
293
-
294
244
  def _all_skus_present(old_skus: List[VMISku], disk_versions: List[DiskVersion]) -> bool:
295
245
  image_types = set()
296
246
  for sku in old_skus:
@@ -485,47 +435,6 @@ def seek_disk_version(
485
435
  return None
486
436
 
487
437
 
488
- def vm_images_by_generation(
489
- disk_version: DiskVersion, architecture: str
490
- ) -> Tuple[Optional[VMImageDefinition], ...]:
491
- """
492
- Return a tuple containing the Gen1 and Gen2 VHD images in this order.
493
-
494
- If one of the images doesn't exist it will return None in the expected tuple position.
495
-
496
- Args:
497
- disk_version
498
- The disk version to retrieve the VMImageDefinitions from
499
- architecture
500
- The expected architecture for the VMImageDefinition.
501
- Returns:
502
- Gen1 and Gen2 VMImageDefinitions when they exist.
503
- """
504
- log.debug("Sorting the VMImageDefinition by generation.")
505
- # Here we have 3 possibilities:
506
- # 1. vm_images => "Gen1" only
507
- # 2. vm_images => "Gen2" only
508
- # 3. vm_images => "Gen1" and "Gen2"
509
-
510
- # So let's get the first image whatever it is
511
- img = disk_version.vm_images.pop(0)
512
-
513
- # If first `img` is Gen2 we set the other one as `img_legacy`
514
- if img.image_type == get_image_type_mapping(architecture, "V2"):
515
- img_legacy = disk_version.vm_images.pop(0) if len(disk_version.vm_images) > 0 else None
516
-
517
- else: # Otherwise we set it as `img_legacy` and get the gen2
518
- img_legacy = img
519
- img = (
520
- disk_version.vm_images.pop(0) # type: ignore
521
- if len(disk_version.vm_images) > 0
522
- else None
523
- )
524
- log.debug("Image for current generation: %s", img)
525
- log.debug("Image for legacy generation: %s", img_legacy)
526
- return img, img_legacy
527
-
528
-
529
438
  def create_vm_image_definitions(
530
439
  metadata: AzurePublishingMetadata, source: VMImageSource
531
440
  ) -> List[VMImageDefinition]:
@@ -580,21 +489,41 @@ def set_new_sas_disk_version(
580
489
  # If we already have a VMImageDefinition let's use it
581
490
  if disk_version.vm_images:
582
491
  log.debug("The DiskVersion \"%s\" contains inner images.", disk_version.version_number)
583
- img, img_legacy = vm_images_by_generation(disk_version, metadata.architecture)
584
-
585
- # Now we replace the SAS URI for the vm_images
586
492
  log.info(
587
493
  "Adjusting the VMImages from existing DiskVersion \"%s\""
588
- "to fit the new image with SAS \"%s\".",
494
+ " to fit the new image with SAS \"%s\".",
589
495
  disk_version.version_number,
590
496
  metadata.image_path,
591
497
  )
592
- disk_version.vm_images = prepare_vm_images(
593
- metadata=metadata,
594
- gen1=img_legacy,
595
- gen2=img,
596
- source=source,
597
- )
498
+ # Verify whether the arch is present for the new image
499
+ is_arch_present = False
500
+ # If the arch is present, update the SAS URI
501
+ for img in disk_version.vm_images:
502
+ if (
503
+ img.image_type == get_image_type_mapping(metadata.architecture, metadata.generation)
504
+ ) or (
505
+ metadata.support_legacy
506
+ and img.image_type == get_image_type_mapping(metadata.architecture, "V1") # noqa
507
+ ):
508
+ is_arch_present = True
509
+ img.source.os_disk.uri = source.os_disk.uri
510
+
511
+ # If the arch is not present, add it to the DiskVersion
512
+ if not is_arch_present:
513
+ if metadata.support_legacy:
514
+ disk_version.vm_images.append(
515
+ VMImageDefinition(
516
+ image_type=get_image_type_mapping(metadata.architecture, "V1"),
517
+ source=source.to_json(),
518
+ )
519
+ )
520
+ disk_version.vm_images.append(
521
+ VMImageDefinition(
522
+ image_type=get_image_type_mapping(metadata.architecture, metadata.generation),
523
+ source=source.to_json(),
524
+ )
525
+ )
526
+ return disk_version
598
527
 
599
528
  # If no VMImages, we need to create them from scratch
600
529
  else:
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: cloudpub
3
- Version: 1.6.0
3
+ Version: 1.7.0
4
4
  Summary: Services for publishing products in cloud environments
5
5
  Home-page: https://github.com/release-engineering/cloudpub
6
6
  Author: Jonathan Gangi
@@ -13,3 +13,18 @@ Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Classifier: Programming Language :: Python :: 3.13
15
15
  License-File: LICENSE
16
+ Requires-Dist: attrs
17
+ Requires-Dist: deepdiff
18
+ Requires-Dist: requests
19
+ Requires-Dist: tenacity
20
+ Requires-Dist: packaging
21
+ Requires-Dist: boto3>=1.34.117
22
+ Dynamic: author
23
+ Dynamic: author-email
24
+ Dynamic: classifier
25
+ Dynamic: home-page
26
+ Dynamic: keywords
27
+ Dynamic: license
28
+ Dynamic: license-file
29
+ Dynamic: requires-dist
30
+ Dynamic: summary
@@ -23,4 +23,7 @@ cloudpub/models/ms_azure.py
23
23
  cloudpub/ms_azure/__init__.py
24
24
  cloudpub/ms_azure/service.py
25
25
  cloudpub/ms_azure/session.py
26
- cloudpub/ms_azure/utils.py
26
+ cloudpub/ms_azure/utils.py
27
+ tests/test_common.py
28
+ tests/test_models.py
29
+ tests/test_utils.py
@@ -0,0 +1,187 @@
1
+ #
2
+ # This file is autogenerated by pip-compile with python 3.10
3
+ # To update, run:
4
+ #
5
+ # pip-compile --cert=None --client-cert=None --generate-hashes --index-url=None --output-file=requirements.txt --pip-args=None
6
+ #
7
+ attrs==25.4.0 \
8
+ --hash=sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11 \
9
+ --hash=sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373
10
+ # via cloudpub (setup.py)
11
+ boto3==1.42.39 \
12
+ --hash=sha256:d03f82363314759eff7f84a27b9e6428125f89d8119e4588e8c2c1d79892c956 \
13
+ --hash=sha256:d9d6ce11df309707b490d2f5f785b761cfddfd6d1f665385b78c9d8ed097184b
14
+ # via cloudpub (setup.py)
15
+ botocore==1.42.39 \
16
+ --hash=sha256:0f00355050821e91a5fe6d932f7bf220f337249b752899e3e4cf6ed54326249e \
17
+ --hash=sha256:9e0d0fed9226449cc26fcf2bbffc0392ac698dd8378e8395ce54f3ec13f81d58
18
+ # via
19
+ # boto3
20
+ # s3transfer
21
+ certifi==2026.1.4 \
22
+ --hash=sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c \
23
+ --hash=sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120
24
+ # via requests
25
+ charset-normalizer==3.4.4 \
26
+ --hash=sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad \
27
+ --hash=sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93 \
28
+ --hash=sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394 \
29
+ --hash=sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89 \
30
+ --hash=sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc \
31
+ --hash=sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86 \
32
+ --hash=sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63 \
33
+ --hash=sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d \
34
+ --hash=sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f \
35
+ --hash=sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8 \
36
+ --hash=sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0 \
37
+ --hash=sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505 \
38
+ --hash=sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161 \
39
+ --hash=sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af \
40
+ --hash=sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152 \
41
+ --hash=sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318 \
42
+ --hash=sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72 \
43
+ --hash=sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4 \
44
+ --hash=sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e \
45
+ --hash=sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3 \
46
+ --hash=sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576 \
47
+ --hash=sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c \
48
+ --hash=sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1 \
49
+ --hash=sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8 \
50
+ --hash=sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1 \
51
+ --hash=sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2 \
52
+ --hash=sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44 \
53
+ --hash=sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26 \
54
+ --hash=sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88 \
55
+ --hash=sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016 \
56
+ --hash=sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede \
57
+ --hash=sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf \
58
+ --hash=sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a \
59
+ --hash=sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc \
60
+ --hash=sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0 \
61
+ --hash=sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84 \
62
+ --hash=sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db \
63
+ --hash=sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1 \
64
+ --hash=sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7 \
65
+ --hash=sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed \
66
+ --hash=sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8 \
67
+ --hash=sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133 \
68
+ --hash=sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e \
69
+ --hash=sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef \
70
+ --hash=sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14 \
71
+ --hash=sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2 \
72
+ --hash=sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0 \
73
+ --hash=sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d \
74
+ --hash=sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828 \
75
+ --hash=sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f \
76
+ --hash=sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf \
77
+ --hash=sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6 \
78
+ --hash=sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328 \
79
+ --hash=sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090 \
80
+ --hash=sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa \
81
+ --hash=sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381 \
82
+ --hash=sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c \
83
+ --hash=sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb \
84
+ --hash=sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc \
85
+ --hash=sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a \
86
+ --hash=sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec \
87
+ --hash=sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc \
88
+ --hash=sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac \
89
+ --hash=sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e \
90
+ --hash=sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313 \
91
+ --hash=sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569 \
92
+ --hash=sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3 \
93
+ --hash=sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d \
94
+ --hash=sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525 \
95
+ --hash=sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894 \
96
+ --hash=sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3 \
97
+ --hash=sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9 \
98
+ --hash=sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a \
99
+ --hash=sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9 \
100
+ --hash=sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14 \
101
+ --hash=sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25 \
102
+ --hash=sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50 \
103
+ --hash=sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf \
104
+ --hash=sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1 \
105
+ --hash=sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3 \
106
+ --hash=sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac \
107
+ --hash=sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e \
108
+ --hash=sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815 \
109
+ --hash=sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c \
110
+ --hash=sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6 \
111
+ --hash=sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6 \
112
+ --hash=sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e \
113
+ --hash=sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4 \
114
+ --hash=sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84 \
115
+ --hash=sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69 \
116
+ --hash=sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15 \
117
+ --hash=sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191 \
118
+ --hash=sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0 \
119
+ --hash=sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897 \
120
+ --hash=sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd \
121
+ --hash=sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2 \
122
+ --hash=sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794 \
123
+ --hash=sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d \
124
+ --hash=sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074 \
125
+ --hash=sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3 \
126
+ --hash=sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224 \
127
+ --hash=sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838 \
128
+ --hash=sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a \
129
+ --hash=sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d \
130
+ --hash=sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d \
131
+ --hash=sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f \
132
+ --hash=sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8 \
133
+ --hash=sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490 \
134
+ --hash=sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966 \
135
+ --hash=sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9 \
136
+ --hash=sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3 \
137
+ --hash=sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e \
138
+ --hash=sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608
139
+ # via requests
140
+ deepdiff==8.6.1 \
141
+ --hash=sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a \
142
+ --hash=sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b
143
+ # via cloudpub (setup.py)
144
+ idna==3.11 \
145
+ --hash=sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea \
146
+ --hash=sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902
147
+ # via requests
148
+ jmespath==1.1.0 \
149
+ --hash=sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d \
150
+ --hash=sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64
151
+ # via
152
+ # boto3
153
+ # botocore
154
+ orderly-set==5.5.0 \
155
+ --hash=sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7 \
156
+ --hash=sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce
157
+ # via deepdiff
158
+ packaging==26.0 \
159
+ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \
160
+ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529
161
+ # via cloudpub (setup.py)
162
+ python-dateutil==2.9.0.post0 \
163
+ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
164
+ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
165
+ # via botocore
166
+ requests==2.32.5 \
167
+ --hash=sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6 \
168
+ --hash=sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf
169
+ # via cloudpub (setup.py)
170
+ s3transfer==0.16.0 \
171
+ --hash=sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe \
172
+ --hash=sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920
173
+ # via boto3
174
+ six==1.17.0 \
175
+ --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
176
+ --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
177
+ # via python-dateutil
178
+ tenacity==9.1.2 \
179
+ --hash=sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb \
180
+ --hash=sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138
181
+ # via cloudpub (setup.py)
182
+ urllib3==2.6.3 \
183
+ --hash=sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed \
184
+ --hash=sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4
185
+ # via
186
+ # botocore
187
+ # requests
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
  setup(
4
4
  name='cloudpub',
5
5
  description='Services for publishing products in cloud environments',
6
- version='1.6.0',
6
+ version='1.7.0',
7
7
  keywords='stratosphere cloudpub cloudpublish',
8
8
  author='Jonathan Gangi',
9
9
  author_email='jgangi@redhat.com',
@@ -0,0 +1,38 @@
1
+ from abc import ABC
2
+ from typing import Any, Dict
3
+
4
+ import pytest
5
+
6
+ from cloudpub.common import BaseService, PublishingMetadata
7
+
8
+
9
+ class TestPublishingMetadata:
10
+ def test_publishing_metadata_with_defaults(self, common_metadata: Dict[str, Any]) -> None:
11
+ m = PublishingMetadata(**common_metadata)
12
+ err_template = "The attribute \"{attribute}\" must default to \"{default}\"."
13
+ assert m.overwrite is False, err_template.format(attribute="overwrite", default="False")
14
+ assert m.keepdraft is False, err_template.format(attribute="keepdraft", default="False")
15
+
16
+ @pytest.mark.parametrize(
17
+ "invalid_dict,expected_err",
18
+ [
19
+ ({"architecture": None}, "The parameter \"architecture\" must not be None."),
20
+ ({"destination": None}, "The parameter \"destination\" must not be None."),
21
+ ({"image_path": None}, "The parameter \"image_path\" must not be None."),
22
+ ],
23
+ )
24
+ def test_metadata_invalid(
25
+ self, invalid_dict: Dict[str, str], expected_err: str, common_metadata: Dict[str, Any]
26
+ ) -> None:
27
+ common_metadata.update(invalid_dict)
28
+
29
+ with pytest.raises(ValueError, match=expected_err):
30
+ PublishingMetadata(**common_metadata)
31
+
32
+
33
+ class TestBaseService:
34
+ def test_has_publish(self) -> None:
35
+ assert hasattr(BaseService, "publish"), "The abstract method \"publish\" is not defined."
36
+
37
+ def test_base_service_abstract(self) -> None:
38
+ assert issubclass(BaseService, ABC), "The BaseService must be a subclass of \"ABC\"."
@@ -0,0 +1,34 @@
1
+ import logging
2
+
3
+ import pytest
4
+ from _pytest.logging import LogCaptureFixture
5
+ from attrs import define, field
6
+
7
+ from cloudpub.models.common import AttrsJSONDecodeMixin
8
+
9
+
10
+ @define
11
+ class FooClass(AttrsJSONDecodeMixin):
12
+ foo: str = field(metadata={"const": "FIXED_VALUE"})
13
+ bar: str
14
+
15
+
16
+ def test_decode_invalid_json(caplog: LogCaptureFixture) -> None:
17
+ expected_err = "Got an unsupported JSON type: \"<class 'str'>\". Expected: \"<class 'dict'>\'"
18
+
19
+ with caplog.at_level(logging.ERROR):
20
+ with pytest.raises(ValueError, match=expected_err):
21
+ AttrsJSONDecodeMixin.from_json("invalid")
22
+ assert expected_err in caplog.text
23
+
24
+
25
+ def test_constant():
26
+ test_data = {
27
+ "foo": "bar",
28
+ "bar": "foo",
29
+ }
30
+
31
+ a = FooClass.from_json(test_data)
32
+
33
+ assert a.foo == "FIXED_VALUE"
34
+ assert a.bar == "foo"
@@ -0,0 +1,19 @@
1
+ from typing import Dict
2
+
3
+ import pytest
4
+
5
+ from cloudpub.utils import get_url_params
6
+
7
+
8
+ @pytest.mark.parametrize(
9
+ "url,params",
10
+ [
11
+ ("https://foo.com/bar", {}),
12
+ ("https://foo.com/bar?foo=bar", {"foo": "bar"}),
13
+ ("https://foo.com/bar?foo=bar", {"foo": "bar"}),
14
+ ("https://foo.com/bar?foo=bar&test=pass", {"foo": "bar", "test": "pass"}),
15
+ ],
16
+ )
17
+ def test_get_url_params(url: str, params: Dict[str, str]) -> None:
18
+ p = get_url_params(url)
19
+ assert p == params
@@ -1,153 +0,0 @@
1
- #
2
- # This file is autogenerated by pip-compile with python 3.10
3
- # To update, run:
4
- #
5
- # pip-compile --cert=None --client-cert=None --generate-hashes --index-url=None --output-file=requirements.txt --pip-args=None
6
- #
7
- attrs==25.4.0 \
8
- --hash=sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11 \
9
- --hash=sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373
10
- # via cloudpub (setup.py)
11
- boto3==1.40.50 \
12
- --hash=sha256:62901bc616c64236700001f530fc66b659ecd1acb4f541ddac6fcae3a1d37ea6 \
13
- --hash=sha256:ae34363e8f34a49ab130d10c507a611926c1101d5d14d70be5598ca308e13266
14
- # via cloudpub (setup.py)
15
- botocore==1.40.50 \
16
- --hash=sha256:1d3d5b5759c9cb30202cd5ad231ec8afb1abe5be0c088a1707195c2cbae0e742 \
17
- --hash=sha256:53126c153fae0670dc54f03d01c89b1af144acedb1020199b133dedb309e434d
18
- # via
19
- # boto3
20
- # s3transfer
21
- certifi==2025.10.5 \
22
- --hash=sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de \
23
- --hash=sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43
24
- # via requests
25
- charset-normalizer==3.4.3 \
26
- --hash=sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91 \
27
- --hash=sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0 \
28
- --hash=sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154 \
29
- --hash=sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601 \
30
- --hash=sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884 \
31
- --hash=sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07 \
32
- --hash=sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c \
33
- --hash=sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64 \
34
- --hash=sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe \
35
- --hash=sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f \
36
- --hash=sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432 \
37
- --hash=sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc \
38
- --hash=sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa \
39
- --hash=sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9 \
40
- --hash=sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae \
41
- --hash=sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19 \
42
- --hash=sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d \
43
- --hash=sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e \
44
- --hash=sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4 \
45
- --hash=sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7 \
46
- --hash=sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312 \
47
- --hash=sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92 \
48
- --hash=sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31 \
49
- --hash=sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c \
50
- --hash=sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f \
51
- --hash=sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99 \
52
- --hash=sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b \
53
- --hash=sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15 \
54
- --hash=sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392 \
55
- --hash=sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f \
56
- --hash=sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8 \
57
- --hash=sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491 \
58
- --hash=sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0 \
59
- --hash=sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc \
60
- --hash=sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0 \
61
- --hash=sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f \
62
- --hash=sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a \
63
- --hash=sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40 \
64
- --hash=sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927 \
65
- --hash=sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849 \
66
- --hash=sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce \
67
- --hash=sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14 \
68
- --hash=sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05 \
69
- --hash=sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c \
70
- --hash=sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c \
71
- --hash=sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a \
72
- --hash=sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc \
73
- --hash=sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34 \
74
- --hash=sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9 \
75
- --hash=sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096 \
76
- --hash=sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14 \
77
- --hash=sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30 \
78
- --hash=sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b \
79
- --hash=sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b \
80
- --hash=sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942 \
81
- --hash=sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db \
82
- --hash=sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5 \
83
- --hash=sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b \
84
- --hash=sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce \
85
- --hash=sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669 \
86
- --hash=sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0 \
87
- --hash=sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018 \
88
- --hash=sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93 \
89
- --hash=sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe \
90
- --hash=sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049 \
91
- --hash=sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a \
92
- --hash=sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef \
93
- --hash=sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2 \
94
- --hash=sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca \
95
- --hash=sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16 \
96
- --hash=sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f \
97
- --hash=sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb \
98
- --hash=sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1 \
99
- --hash=sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557 \
100
- --hash=sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37 \
101
- --hash=sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7 \
102
- --hash=sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72 \
103
- --hash=sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c \
104
- --hash=sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9
105
- # via requests
106
- deepdiff==8.6.1 \
107
- --hash=sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a \
108
- --hash=sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b
109
- # via cloudpub (setup.py)
110
- idna==3.11 \
111
- --hash=sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea \
112
- --hash=sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902
113
- # via requests
114
- jmespath==1.0.1 \
115
- --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
116
- --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
117
- # via
118
- # boto3
119
- # botocore
120
- orderly-set==5.5.0 \
121
- --hash=sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7 \
122
- --hash=sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce
123
- # via deepdiff
124
- packaging==25.0 \
125
- --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \
126
- --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f
127
- # via cloudpub (setup.py)
128
- python-dateutil==2.9.0.post0 \
129
- --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
130
- --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
131
- # via botocore
132
- requests==2.32.5 \
133
- --hash=sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6 \
134
- --hash=sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf
135
- # via cloudpub (setup.py)
136
- s3transfer==0.14.0 \
137
- --hash=sha256:ea3b790c7077558ed1f02a3072fb3cb992bbbd253392f4b6e9e8976941c7d456 \
138
- --hash=sha256:eff12264e7c8b4985074ccce27a3b38a485bb7f7422cc8046fee9be4983e4125
139
- # via boto3
140
- six==1.17.0 \
141
- --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
142
- --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
143
- # via python-dateutil
144
- tenacity==9.1.2 \
145
- --hash=sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb \
146
- --hash=sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138
147
- # via cloudpub (setup.py)
148
- urllib3==2.5.0 \
149
- --hash=sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760 \
150
- --hash=sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc
151
- # via
152
- # botocore
153
- # requests
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes