spl-core 7.14.0__py3-none-any.whl → 7.14.0rc1.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
spl_core/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "7.14.0"
1
+ __version__ = "7.14.0-rc1.dev.1"
@@ -1,47 +1,10 @@
1
1
  import json
2
2
  import os
3
3
  from dataclasses import dataclass
4
- from datetime import datetime, timezone
5
4
  from pathlib import Path
6
- from typing import Any, Dict, List, Optional
5
+ from typing import Dict, List, Optional
7
6
 
8
7
  import py7zr
9
- from py_app_dev.core.logging import logger
10
- from py_app_dev.core.subprocess import SubprocessExecutor
11
-
12
-
13
- @dataclass
14
- class BuildMetadata:
15
- """
16
- Contains build metadata extracted from environment variables.
17
-
18
- Attributes:
19
- branch_name: The branch name, PR identifier (e.g., "PR-123"), or tag name
20
- build_number: The build number or "local_build"
21
- is_tag: Whether this is a tag build
22
- pr_number: The PR number (without "PR-" prefix) for pull request builds, None otherwise
23
- """
24
-
25
- branch_name: str
26
- build_number: str
27
- is_tag: bool
28
- pr_number: Optional[str]
29
-
30
-
31
- @dataclass
32
- class GitMetadata:
33
- """
34
- Contains git metadata extracted from environment variables or git commands.
35
-
36
- Attributes:
37
- commit_id: The git commit SHA (full hash)
38
- commit_message: The git commit message subject line (first line only)
39
- repository_url: The git repository URL (from remote.origin.url)
40
- """
41
-
42
- commit_id: Optional[str]
43
- commit_message: Optional[str]
44
- repository_url: Optional[str]
45
8
 
46
9
 
47
10
  class ArtifactsArchive:
@@ -120,7 +83,7 @@ class ArtifactsArchive:
120
83
  archive_path.unlink()
121
84
 
122
85
  if not self.archive_artifacts:
123
- logger.warning("No artifacts registered for archiving")
86
+ print("Warning: No artifacts registered for archiving")
124
87
  # Create empty 7z file
125
88
  with py7zr.SevenZipFile(archive_path, "w") as archive:
126
89
  pass
@@ -130,7 +93,7 @@ class ArtifactsArchive:
130
93
  with py7zr.SevenZipFile(archive_path, "w") as archive:
131
94
  for artifact in self.archive_artifacts:
132
95
  if not artifact.absolute_path.exists():
133
- logger.warning(f"Artifact {artifact.absolute_path} does not exist, skipping")
96
+ print(f"Warning: Artifact {artifact.absolute_path} does not exist, skipping")
134
97
  continue
135
98
 
136
99
  try:
@@ -140,13 +103,13 @@ class ArtifactsArchive:
140
103
  # py7zr can handle directories directly
141
104
  archive.writeall(artifact.absolute_path, arcname=str(artifact.archive_path))
142
105
  except Exception as file_error:
143
- logger.warning(f"Failed to add {artifact.absolute_path} to archive: {file_error}")
106
+ print(f"Warning: Failed to add {artifact.absolute_path} to archive: {file_error}")
144
107
  continue
145
108
 
146
- logger.info(f"7z file created at: {archive_path}")
109
+ print(f"7z file created at: {archive_path}")
147
110
  return archive_path
148
111
  except Exception as e:
149
- logger.error(f"Error creating artifacts 7z file: {e}")
112
+ print(f"Error creating artifacts 7z file: {e}")
150
113
  raise e
151
114
 
152
115
 
@@ -156,10 +119,9 @@ class ArtifactsArchiver:
156
119
  It provides a unified interface for registering artifacts to different archives.
157
120
  """
158
121
 
159
- def __init__(self, artifactory_base_url: Optional[str] = None) -> None:
122
+ def __init__(self) -> None:
160
123
  self.archives: Dict[str, ArtifactsArchive] = {}
161
124
  self._target_repos: Dict[str, str] = {}
162
- self.artifactory_base_url = artifactory_base_url
163
125
 
164
126
  def add_archive(self, out_dir: Path, archive_filename: str, target_repo: Optional[str] = None, archive_name: str = "default") -> ArtifactsArchive:
165
127
  """
@@ -215,37 +177,6 @@ class ArtifactsArchiver:
215
177
 
216
178
  return self.archives[archive_name]
217
179
 
218
- def get_archive_url(self, archive_name: str = "default") -> Optional[str]:
219
- """
220
- Get the Artifactory URL for a specific archive.
221
-
222
- Args:
223
- archive_name: Name of the archive (defaults to "default")
224
-
225
- Returns:
226
- The full Artifactory URL for the archive, or None if no target repo configured
227
-
228
- Example:
229
- "https://artifactory.example.com/artifactory/my-repo/results/develop/123/result.7z"
230
- """
231
- if archive_name not in self.archives:
232
- return None
233
-
234
- if archive_name not in self._target_repos:
235
- return None
236
-
237
- if self.artifactory_base_url is None:
238
- return None
239
-
240
- archive = self.archives[archive_name]
241
- target_repo = self._target_repos[archive_name]
242
- metadata = self._get_build_metadata()
243
-
244
- # Construct the URL following the same pattern as create_rt_upload_json
245
- archive_url = f"{self.artifactory_base_url}/{target_repo}/{metadata.branch_name}/{metadata.build_number}/{archive.archive_name}"
246
-
247
- return archive_url
248
-
249
180
  def create_all_archives(self) -> Dict[str, Path]:
250
181
  """
251
182
  Create all registered archives.
@@ -283,26 +214,28 @@ class ArtifactsArchiver:
283
214
  else:
284
215
  return 28 # 4 weeks for PRs, feature branches, and other branches
285
216
 
286
- @staticmethod
287
- def _get_build_metadata() -> BuildMetadata:
217
+ def create_rt_upload_json(self, out_dir: Path) -> Path:
288
218
  """
289
- Get build metadata from environment variables or defaults.
219
+ Create a single rt-upload.json file containing all archives.
290
220
 
291
- Detects Jenkins environment variables when available, otherwise falls back
292
- to local development defaults.
221
+ This function replicates the logic from the Jenkinsfile for determining the RT_TARGET
222
+ and creating the upload specification file. It uses Jenkins environment variables
223
+ when available, otherwise falls back to default values.
224
+
225
+ Args:
226
+ output_dir: Directory where the rt-upload.json file will be created
293
227
 
294
228
  Returns:
295
- BuildMetadata instance containing:
296
- - branch_name: The branch name, PR identifier (e.g., "PR-123"), or tag name
297
- - build_number: The build number or "local_build"
298
- - is_tag: Whether this is a tag build
299
- - pr_number: The PR number (without "PR-" prefix) for pull requests, None otherwise
229
+ Path to the created rt-upload.json file
300
230
  """
231
+ # Set local defaults first
232
+ change_id = None
301
233
  branch_name = "local_branch"
302
234
  build_number = "local_build"
303
235
  is_tag = False
304
- pr_number = None
305
236
 
237
+ # Adapt values when Jenkins environment is detected
238
+ # TODO: check if an existing library can be used for CI context detection
306
239
  if os.environ.get("JENKINS_URL"):
307
240
  change_id = os.environ.get("CHANGE_ID")
308
241
  jenkins_branch_name = os.environ.get("BRANCH_NAME")
@@ -312,7 +245,6 @@ class ArtifactsArchiver:
312
245
  if change_id:
313
246
  # Pull request case
314
247
  branch_name = f"PR-{change_id}"
315
- pr_number = change_id
316
248
  elif tag_name:
317
249
  # Tag build case
318
250
  branch_name = tag_name
@@ -324,99 +256,8 @@ class ArtifactsArchiver:
324
256
  if jenkins_build_number:
325
257
  build_number = jenkins_build_number
326
258
 
327
- return BuildMetadata(
328
- branch_name=branch_name,
329
- build_number=build_number,
330
- is_tag=is_tag,
331
- pr_number=pr_number,
332
- )
333
-
334
- @staticmethod
335
- def _get_git_metadata() -> GitMetadata:
336
- """
337
- Get git metadata from environment variables or git commands.
338
-
339
- Attempts to retrieve git information in the following order:
340
- 1. Environment variables (GIT_COMMIT, GIT_URL) - typically set by Jenkins Git plugin
341
- 2. Git commands as fallback - executed locally using git CLI
342
-
343
- The commit message captured is only the subject line (first line), not the full message.
344
-
345
- Returns:
346
- GitMetadata instance containing:
347
- - commit_id: The git commit SHA, or None if unavailable
348
- - commit_message: The commit subject line (first line only), or None if unavailable
349
- - repository_url: The git repository URL, or None if unavailable
350
- """
351
- commit_id = None
352
- commit_message = None
353
- repository_url = None
354
-
355
- # Try environment variables first (Jenkins Git plugin)
356
- env_commit = os.environ.get("GIT_COMMIT")
357
- env_url = os.environ.get("GIT_URL")
358
-
359
- if env_commit:
360
- commit_id = env_commit if env_commit.strip() else None
361
- if env_url:
362
- repository_url = env_url if env_url.strip() else None
363
-
364
- # Fallback to git commands if environment variables not available
365
- # Get commit ID
366
- if not commit_id:
367
- try:
368
- result = SubprocessExecutor(["git", "rev-parse", "HEAD"]).execute(handle_errors=False)
369
- if result and result.returncode == 0:
370
- value = result.stdout.strip()
371
- commit_id = value if value else None
372
- except Exception as e:
373
- logger.warning(f"Failed to get commit ID from git: {e}")
374
-
375
- # Get commit message (subject line only)
376
- if not commit_message:
377
- try:
378
- result = SubprocessExecutor(["git", "log", "-1", "--format=%s"]).execute(handle_errors=False)
379
- if result and result.returncode == 0:
380
- value = result.stdout.strip()
381
- commit_message = value if value else None
382
- except Exception as e:
383
- logger.warning(f"Failed to get commit message from git: {e}")
384
-
385
- # Get repository URL
386
- if not repository_url:
387
- try:
388
- result = SubprocessExecutor(["git", "config", "--get", "remote.origin.url"]).execute(handle_errors=False)
389
- if result and result.returncode == 0:
390
- value = result.stdout.strip()
391
- repository_url = value if value else None
392
- except Exception as e:
393
- logger.warning(f"Failed to get repository URL from git: {e}")
394
-
395
- return GitMetadata(
396
- commit_id=commit_id,
397
- commit_message=commit_message,
398
- repository_url=repository_url,
399
- )
400
-
401
- def create_rt_upload_json(self, out_dir: Path) -> Path:
402
- """
403
- Create a single rt-upload.json file containing all archives.
404
-
405
- This function replicates the logic from the Jenkinsfile for determining the RT_TARGET
406
- and creating the upload specification file. It uses Jenkins environment variables
407
- when available, otherwise falls back to default values.
408
-
409
- Args:
410
- output_dir: Directory where the rt-upload.json file will be created
411
-
412
- Returns:
413
- Path to the created rt-upload.json file
414
- """
415
- # Get build metadata from environment or defaults
416
- metadata = self._get_build_metadata()
417
-
418
259
  # Calculate retention period based on branch/tag
419
- retention_period = self.calculate_retention_period(metadata.branch_name, metadata.is_tag)
260
+ retention_period = self.calculate_retention_period(branch_name, is_tag)
420
261
 
421
262
  # Create the files array for Artifactory upload format
422
263
  files_array = []
@@ -426,7 +267,7 @@ class ArtifactsArchiver:
426
267
  target_repo = self._target_repos[archive_name]
427
268
 
428
269
  # Construct the RT target path
429
- rt_target = f"{target_repo}/{metadata.branch_name}/{metadata.build_number}/"
270
+ rt_target = f"{target_repo}/{branch_name}/{build_number}/"
430
271
 
431
272
  # Add this archive to the files array with retention_period property
432
273
  files_array.append(
@@ -450,137 +291,6 @@ class ArtifactsArchiver:
450
291
 
451
292
  return json_path
452
293
 
453
- def create_artifacts_json(self, variant: str, out_dir: Path) -> Path:
454
- """
455
- Create an initial artifacts.json file with build metadata structure.
456
-
457
- This function creates a fresh artifacts.json file with build metadata
458
- but no artifacts. Use update_artifacts_json() to add artifact categories.
459
- It uses Jenkins environment variables when available, otherwise falls back to default values.
460
-
461
- The JSON file includes conditional keys based on the build type:
462
- - For pull requests: includes "pull_request" key with the PR number (e.g., "117")
463
- - For tag builds: includes "tag" key with the tag name (e.g., "v1.2.3")
464
- - For regular branch builds: includes "branch" key with the branch name (e.g., "develop")
465
-
466
- Optional fields (included only if available):
467
- - build_url: Jenkins build URL from BUILD_URL environment variable
468
- - commit_id: Git commit SHA from GIT_COMMIT env var or git rev-parse HEAD
469
- - commit_message: Git commit subject line from git log (first line only)
470
- - repository_url: Git repository URL from GIT_URL env var or git config
471
-
472
- Args:
473
- variant: The variant name (e.g., "Disco")
474
- out_dir: Directory where the artifacts.json file will be created
475
-
476
- Returns:
477
- Path to the created artifacts.json file
478
-
479
- Raises:
480
- ValueError: If variant is empty or None
481
- """
482
- # Input validation
483
- if not variant or not variant.strip():
484
- raise ValueError("Variant name cannot be empty or None")
485
-
486
- # Get metadata from environment or defaults
487
- build_metadata = self._get_build_metadata()
488
- git_metadata = self._get_git_metadata()
489
-
490
- # Create the initial artifacts.json structure with base metadata
491
- artifacts_data: Dict[str, Any] = {
492
- "variant": variant,
493
- "build_timestamp": datetime.now(timezone.utc).isoformat(timespec="seconds") + "Z",
494
- "build_number": build_metadata.build_number,
495
- }
496
-
497
- # Add build_url if available
498
- build_url = os.environ.get("BUILD_URL")
499
- if build_url:
500
- artifacts_data["build_url"] = build_url
501
-
502
- # Add conditional keys based on build type
503
- if build_metadata.pr_number:
504
- # Pull request build
505
- artifacts_data["pull_request"] = build_metadata.pr_number
506
- elif build_metadata.is_tag:
507
- # Tag build
508
- artifacts_data["tag"] = build_metadata.branch_name
509
- else:
510
- # Regular branch build (or local build)
511
- artifacts_data["branch"] = build_metadata.branch_name
512
-
513
- # Add git metadata if available
514
- if git_metadata.commit_id:
515
- artifacts_data["commit_id"] = git_metadata.commit_id
516
- if git_metadata.commit_message:
517
- artifacts_data["commit_message"] = git_metadata.commit_message
518
- if git_metadata.repository_url:
519
- artifacts_data["repository_url"] = git_metadata.repository_url
520
-
521
- # Add empty artifacts dictionary
522
- artifacts_data["artifacts"] = {}
523
-
524
- # Create the artifacts.json file
525
- json_path = out_dir / "artifacts.json"
526
- json_path.parent.mkdir(parents=True, exist_ok=True)
527
-
528
- with open(json_path, "w") as f:
529
- json.dump(artifacts_data, f, indent=2)
530
-
531
- return json_path
532
-
533
- def update_artifacts_json(self, category: str, artifacts: Dict[str, str], artifacts_json_path: Path) -> Path:
534
- """
535
- Add or update artifacts in a specific category for the artifacts.json file.
536
-
537
- Args:
538
- category: The artifact category (e.g., "test_reports", "sca_reports", "build_binaries")
539
- artifacts: Dictionary mapping artifact names to their URLs/paths
540
- artifacts_json_path: Path to the artifacts.json file to be updated
541
-
542
- Returns:
543
- Path to the updated artifacts.json file
544
-
545
- Raises:
546
- ValueError: If category is empty, artifacts dictionary is empty, or JSON structure is invalid
547
- FileNotFoundError: If artifacts.json file does not exist
548
- """
549
- # Input validation
550
- if not category or not category.strip():
551
- raise ValueError("Category name cannot be empty or None")
552
- if not artifacts:
553
- raise ValueError("Artifacts dictionary cannot be empty")
554
-
555
- # Check if artifacts.json file exists
556
- if not artifacts_json_path.exists():
557
- raise FileNotFoundError(f"artifacts.json file does not exist at {artifacts_json_path}. Please create it first using create_artifacts_json().")
558
-
559
- # Read existing artifacts.json file
560
- try:
561
- with open(artifacts_json_path) as f:
562
- artifacts_data = json.load(f)
563
- except json.JSONDecodeError as e:
564
- raise ValueError(f"Could not parse artifacts.json: {e}") from e
565
- except OSError as e:
566
- raise ValueError(f"Could not read artifacts.json: {e}") from e
567
-
568
- # Validate that the file has the expected structure
569
- if not artifacts_data or "artifacts" not in artifacts_data:
570
- raise ValueError("artifacts.json file has invalid structure. Expected 'artifacts' section not found.")
571
-
572
- # Update the specific category with new artifacts
573
- if category in artifacts_data["artifacts"]:
574
- artifacts_data["artifacts"][category].update(artifacts)
575
- else:
576
- artifacts_data["artifacts"][category] = artifacts.copy()
577
-
578
- # Write the updated data back to the file
579
- with open(artifacts_json_path, "w") as f:
580
- json.dump(artifacts_data, f, indent=2)
581
-
582
- return artifacts_json_path
583
-
584
294
  def list_archives(self) -> List[str]:
585
295
  """
586
296
  Get a list of all archive names.
@@ -637,18 +347,3 @@ class ArtifactsArchiver:
637
347
  #
638
348
  # created_files = archiver.create_all_archives()
639
349
  # upload_json = archiver.create_rt_upload_json(Path("./build/output")) # only includes archives with target repos
640
- #
641
- # ## Artifacts.json use case (variant-specific metadata):
642
- # archiver = ArtifactsArchiver()
643
- # variant = "Disco"
644
- # out_dir = Path("./build/output")
645
- #
646
- # # Create initial artifacts.json file first, then add categories
647
- # # The resulting JSON will contain conditional keys based on build type:
648
- # # - For PRs: {"variant": "Disco", "build_timestamp": "...", "build_number": "123", "pull_request": "117", "artifacts": {}}
649
- # # - For tags: {"variant": "Disco", "build_timestamp": "...", "build_number": "123", "tag": "v1.2.3", "artifacts": {}}
650
- # # - For branches: {"variant": "Disco", "build_timestamp": "...", "build_number": "123", "branch": "develop", "artifacts": {}}
651
- # artifacts_json_path = archiver.create_artifacts_json(variant, out_dir)
652
- # archiver.update_artifacts_json("test_reports", test_reports, artifacts_json_path)
653
- # archiver.update_artifacts_json("sca_reports", sca_reports, artifacts_json_path)
654
- # archiver.update_artifacts_json("build_binaries", build_binaries, artifacts_json_path)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: spl-core
3
- Version: 7.14.0
3
+ Version: 7.14.0rc1.dev1
4
4
  Summary: Software Product Line Support for CMake
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -31,14 +31,14 @@ Requires-Dist: sphinx (>=7.3,<8.0)
31
31
  Requires-Dist: sphinx-book-theme (>=1.1,<2.0)
32
32
  Requires-Dist: sphinx-copybutton (>=0.5,<0.6)
33
33
  Requires-Dist: sphinx-design (>=0.5,<0.7)
34
- Requires-Dist: sphinx-needs (>=2.0,<3.0)
34
+ Requires-Dist: sphinx-needs (>=2,<7)
35
35
  Requires-Dist: sphinx-new-tab-link (>=0.4,<0.9)
36
36
  Requires-Dist: sphinx-rtd-size (>=0.2,<0.3)
37
- Requires-Dist: sphinx-rtd-theme (>=2.0,<3.0)
37
+ Requires-Dist: sphinx-rtd-theme (>=2,<4)
38
38
  Requires-Dist: sphinx-test-reports (>=1.0,<2.0)
39
39
  Requires-Dist: sphinxcontrib-datatemplates (>=0.11,<0.12)
40
- Requires-Dist: sphinxcontrib-mermaid (>=0.9,<0.10)
41
- Requires-Dist: sphinxcontrib-plantuml (>=0.29,<0.31)
40
+ Requires-Dist: sphinxcontrib-mermaid (>=0.9,<1.1)
41
+ Requires-Dist: sphinxcontrib-plantuml (>=0.29,<0.32)
42
42
  Requires-Dist: typer (>=0,<1)
43
43
  Project-URL: Bug Tracker, https://github.com/avengineers/spl-core/issues
44
44
  Project-URL: Changelog, https://github.com/avengineers/spl-core/blob/develop/CHANGELOG.md
@@ -1,4 +1,4 @@
1
- spl_core/__init__.py,sha256=HyLrDM036NFqqqTSta893ALzgsTY2LDDQ1vUG5gp5B8,23
1
+ spl_core/__init__.py,sha256=G6RF3CCenxPNRixbd1rxgiDN02DG4kg0q5CrKmjUKso,33
2
2
  spl_core/__run.py,sha256=DphnN7_Bjiw_mOOztsHxTDHS8snz1g2MMWAaJpZxPKM,361
3
3
  spl_core/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  spl_core/common/path.py,sha256=sDujd3n4XP1XGjHc7ImXEdjihO6A8BOIDbKCf7HgQ0Y,462
@@ -61,11 +61,11 @@ spl_core/main.py,sha256=_hL4j155WZMXog_755bgAH1PeUwvTdJZvVdVw9EWhvo,1225
61
61
  spl_core/spl.cmake,sha256=YQMhpSJ9yZaJ34m_W1UqrlTh_r7AKMDuH6-hzK4w98A,4585
62
62
  spl_core/steps/collect_pr_changes.py,sha256=su3yCtSJM9XDlMNOD0L72ooQdKzruc0lUat858Na0Kg,5539
63
63
  spl_core/test_utils/archive_artifacts_collection.py,sha256=x7LH5dGIvssyhXsTFzB6rjgb5D2efKvHVpnjId3MNDk,5126
64
- spl_core/test_utils/artifacts_archiver.py,sha256=xrsYbjlqpg4M0Xy1zSjQNcxDXJctom1ZtzR74ZJp-rs,25996
64
+ spl_core/test_utils/artifacts_archiver.py,sha256=z1ckuHkgCpEFpTBc4HDGHbV3lp2-bUpZS0coxZ8BIUw,13368
65
65
  spl_core/test_utils/base_variant_test_runner.py,sha256=Oq27lkJlpB_y-p2_8S23F5zjn1438HW148q-hQNz3EY,3795
66
66
  spl_core/test_utils/spl_build.py,sha256=bSM6hwhTH9aRryvUvtSPDfk_zoZuKEO5g3QXK4SIrco,8442
67
- spl_core-7.14.0.dist-info/METADATA,sha256=ohKP-TUwbk9qndKLblwSRdOxRn6pB2rNgjy7gKZQ3f0,5314
68
- spl_core-7.14.0.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
69
- spl_core-7.14.0.dist-info/entry_points.txt,sha256=18_sdVY93N1GVBiAHxQ_F9ZM-bBvOmVMOMn7PNe2EqU,45
70
- spl_core-7.14.0.dist-info/licenses/LICENSE,sha256=UjjA0o8f5tT3wVm7qodTLAhPWLl6kgVyn9FPAd1VeYY,1099
71
- spl_core-7.14.0.dist-info/RECORD,,
67
+ spl_core-7.14.0rc1.dev1.dist-info/METADATA,sha256=9lPlCYpWcLRc6gerhqZaag7vlvlVPGNItUZEglRZmBg,5313
68
+ spl_core-7.14.0rc1.dev1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
69
+ spl_core-7.14.0rc1.dev1.dist-info/entry_points.txt,sha256=18_sdVY93N1GVBiAHxQ_F9ZM-bBvOmVMOMn7PNe2EqU,45
70
+ spl_core-7.14.0rc1.dev1.dist-info/licenses/LICENSE,sha256=UjjA0o8f5tT3wVm7qodTLAhPWLl6kgVyn9FPAd1VeYY,1099
71
+ spl_core-7.14.0rc1.dev1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.3.0
2
+ Generator: poetry-core 2.2.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any