divbase-lib 0.1.0.dev1__py3-none-any.whl → 0.1.0.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
divbase_lib/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.0.dev1"
1
+ __version__ = "0.1.0.dev3"
@@ -45,6 +45,7 @@ class BcftoolsQueryKwargs(BaseModel):
45
45
  project_id: int
46
46
  project_name: str
47
47
  user_id: int
48
+ job_id: int
48
49
 
49
50
 
50
51
  class SampleMetadataQueryTaskResult(BaseModel):
@@ -1,12 +1,73 @@
1
1
  """
2
2
  Schemas for DivBase's S3 API routes.
3
+
4
+ Pre-signed download URLs do not need to account for single vs multipart as this can be controlled by the client
5
+ using the HTTP range header when downloading (so you only need 1 pre-signed URL per object for download).
6
+
7
+ Pre-signed upload URLs need to account for single vs multipart uploads hence all the extra schemas below.
3
8
  """
4
9
 
10
+ from datetime import datetime
11
+
5
12
  from pydantic import BaseModel, Field
6
13
 
14
+ from divbase_lib.divbase_constants import S3_MULTIPART_CHUNK_SIZE
15
+
16
+ MB = 1024 * 1024
17
+
18
+
19
+ ## list objects models ##
20
+ class ListObjectsRequest(BaseModel):
21
+ """Request model for listing objects in an S3 bucket."""
22
+
23
+ prefix: str | None = Field(None, description="Optional prefix to filter objects by name.")
24
+ next_token: str | None = Field(
25
+ None, description="Token to continue listing files from the end of a previous request."
26
+ )
27
+
28
+
29
+ class ObjectDetails(BaseModel):
30
+ """Details about a single object in an S3 bucket."""
31
+
32
+ name: str = Field(..., description="The name of the object in the bucket.")
33
+ size: int = Field(..., description="The size of the object in bytes.")
34
+ last_modified: datetime = Field(..., description="The date and time the object was last modified.")
35
+ etag: str = Field(..., description="The ETag of the object, which is the MD5 checksum.")
36
+
37
+
38
+ class ListObjectsResponse(BaseModel):
39
+ """Response model for listing objects in an S3 bucket."""
40
+
41
+ objects: list[ObjectDetails] = Field(
42
+ ..., description="A list of objects in the bucket.", min_length=0, max_length=1000
43
+ )
44
+ next_token: str | None = Field(
45
+ None, description="Token for fetching the next page of results. If None, no more results."
46
+ )
47
+
7
48
 
49
+ ## file info models ##
50
+ class ObjectVersionInfo(BaseModel):
51
+ """Detailed information about a single version of an S3 object."""
52
+
53
+ version_id: str = Field(..., description="The version ID of the object.")
54
+ last_modified: datetime = Field(..., description="The date and time the object version was last modified.")
55
+ size: int = Field(..., description="The size of the object in bytes.")
56
+ etag: str = Field(..., description="The ETag of the object, which is the MD5 checksum.")
57
+ is_latest: bool = Field(..., description="Indicates if this is the latest version of the object.")
58
+
59
+
60
+ class ObjectInfoResponse(BaseModel):
61
+ """Response model for detailed information about all versions of a single object stored in S3."""
62
+
63
+ object_name: str = Field(..., description="The name of the object.")
64
+ is_currently_deleted: bool = Field(..., description="True if the latest version of the object is a delete marker.")
65
+ versions: list[ObjectVersionInfo] = Field(..., description="A list of all versions of the object.")
66
+
67
+
68
+ ## download models ##
8
69
  class DownloadObjectRequest(BaseModel):
9
- """Request model to upload a single object using a pre-signed URL."""
70
+ """Request model to download a single object using a pre-signed URL."""
10
71
 
11
72
  name: str = Field(..., description="Name of the object to be downloaded")
12
73
  version_id: str | None = Field(..., description="Version ID of the object, None if latest version")
@@ -20,32 +81,126 @@ class PreSignedDownloadResponse(BaseModel):
20
81
  version_id: str | None = Field(..., description="Version ID of the object, None if latest version")
21
82
 
22
83
 
23
- class UploadObjectRequest(BaseModel):
24
- """Request model to upload a single object using a pre-signed URL."""
84
+ ### Single-part upload models ###
85
+ class UploadSinglePartObjectRequest(BaseModel):
86
+ """Request model to upload a single object as a single part using a pre-signed URL."""
25
87
 
26
88
  name: str = Field(..., description="Name of the object to be uploaded")
27
89
  content_length: int = Field(..., description="Size of the file in bytes")
28
90
  md5_hash: str | None = Field(None, description="Optional MD5 hash of the object for integrity check")
29
91
 
30
92
 
31
- class PreSignedUploadResponse(BaseModel):
32
- """Response model to upload a single object using the pre-signed URL using PUT."""
93
+ class PreSignedSinglePartUploadResponse(BaseModel):
94
+ """Response model to upload a single object as a single part using the pre-signed URL using PUT."""
33
95
 
34
96
  name: str = Field(..., description="Name of the object to be uploaded")
35
97
  pre_signed_url: str = Field(..., description="Pre-signed URL to which the file should be uploaded")
36
98
  put_headers: dict[str, str] = Field(..., description="Headers to be included in the PUT request")
37
99
 
38
100
 
39
- class CheckFileExistsRequest(BaseModel):
40
- """Request model to check if a file already exists in the bucket (using the checksum)"""
101
+ ### Multipart upload models ###
102
+ class CreateMultipartUploadRequest(BaseModel):
103
+ """Request model to create a multipart upload using pre-signed URLs."""
104
+
105
+ name: str = Field(..., description="Name of the object to be uploaded")
106
+ content_length: int = Field(..., description="Size of the file in bytes")
107
+
108
+
109
+ class CreateMultipartUploadResponse(BaseModel):
110
+ """Response model to create a multipart upload using pre-signed URLs."""
111
+
112
+ name: str = Field(..., description="Name of the object to be uploaded")
113
+ upload_id: str = Field(..., description="Upload ID for the multipart upload")
114
+ number_of_parts: int = Field(..., description="Total number of parts required for the upload", ge=1, le=10000)
115
+ part_size: int = Field(
116
+ S3_MULTIPART_CHUNK_SIZE, description="Size of each part in bytes (the last part may be smaller)."
117
+ )
118
+
119
+
120
+ class GetPresignedPartUrlsRequest(BaseModel):
121
+ """
122
+ Request model to get pre-signed URLs for multiple parts of a presigned multipart upload.
123
+
124
+ You can request up to 100 parts at a time.
125
+ Part number indexing is 1-based (with max allowed range: 1 to 10000).
126
+ """
127
+
128
+ name: str = Field(..., description="Name of the object to be uploaded")
129
+ upload_id: str = Field(..., description="Upload ID for the multipart upload")
130
+ parts_range_start: int = Field(..., description="Starting part number", ge=1, le=10000)
131
+ parts_range_end: int = Field(..., description="Ending part number", ge=1, le=10000)
132
+ md5_checksums: list[str] | None = Field(
133
+ None, description="Optional list of MD5 checksums for each part to be uploaded"
134
+ )
135
+
136
+
137
+ class PresignedUploadPartUrlResponse(BaseModel):
138
+ """Response model for a pre-signed URL for a single part of a multipart upload."""
139
+
140
+ part_number: int = Field(..., description="Part number", ge=1, le=10000)
141
+ pre_signed_url: str = Field(..., description="Pre-signed URL for uploading this part")
142
+ headers: dict[str, str] = Field(..., description="Headers to be included in the PUT request for this part")
143
+
144
+
145
+ class UploadedPart(BaseModel):
146
+ """Model representing a part of an object that has been uploaded via multi-part upload."""
147
+
148
+ part_number: int = Field(..., description="Part number", ge=1, le=10000)
149
+ etag: str = Field(description="ETag returned by S3 after uploading the part")
150
+
151
+
152
+ class CompleteMultipartUploadRequest(BaseModel):
153
+ """Request model to complete a multipart upload using pre-signed URLs."""
154
+
155
+ name: str = Field(..., description="Name of the object to be uploaded")
156
+ upload_id: str = Field(..., description="Upload ID for the multipart upload")
157
+ parts: list[UploadedPart] = Field(..., description="List of parts that have been uploaded")
158
+
159
+
160
+ class CompleteMultipartUploadResponse(BaseModel):
161
+ """Response model to complete a multipart upload using pre-signed URLs."""
162
+
163
+ name: str = Field(..., description="Name of the object that was uploaded")
164
+ version_id: str = Field(..., description="Version ID of the uploaded object")
165
+ md5_hash: str = Field(..., description="MD5 hash of the uploaded object")
166
+
167
+
168
+ class AbortMultipartUploadRequest(BaseModel):
169
+ """Request model to abort a multipart upload and clean up parts."""
170
+
171
+ name: str = Field(..., description="Name of the object being uploaded")
172
+ upload_id: str = Field(..., description="Upload ID for the multipart upload to be aborted")
173
+
174
+
175
+ class AbortMultipartUploadResponse(BaseModel):
176
+ """Response model to abort a multipart upload."""
177
+
178
+ name: str = Field(..., description="Name of the object being uploaded")
179
+ upload_id: str = Field(..., description="Upload ID for the multipart upload that was aborted")
180
+
181
+
182
+ class RestoreObjectsResponse(BaseModel):
183
+ """Response model for restoring soft-deleted objects in a bucket."""
41
184
 
42
- object_name: str
43
- md5_checksum: str
185
+ restored: list[str] = Field(
186
+ ...,
187
+ description="List of object names that were successfully restored, this includes objects that were already live",
188
+ )
189
+ not_restored: list[str] = Field(
190
+ ...,
191
+ description=(
192
+ "List of object names that could not be processed.\n"
193
+ "This could be due to several reasons:\n"
194
+ "1. The object does not exist in the bucket (e.g., a typo in the name).\n"
195
+ "2. The object was hard-deleted and is unrecoverable.\n"
196
+ "3. An unexpected server error occurred during the restore attempt."
197
+ ),
198
+ )
44
199
 
45
200
 
46
- class ExistingFileResponse(BaseModel):
47
- """Response model for reporting a file that already exists in the bucket (using it's checksum)"""
201
+ ## checksum models ##
202
+ class FileChecksumResponse(BaseModel):
203
+ """Response model for reporting a file's checksum in the bucket."""
48
204
 
49
- object_name: str
50
- md5_checksum: str
51
- matching_object_name: str | None
205
+ object_name: str = Field(..., description="Name of the object in the bucket")
206
+ md5_checksum: str = Field(..., description="MD5 checksum of the object in the bucket")
@@ -0,0 +1,45 @@
1
+ """
2
+ Constants that both divbase-api and divbase-cli need to agree on.
3
+ """
4
+
5
+ ONE_MiB = 1024 * 1024
6
+
7
+ # When you download a file that has been uploaded in parts, you have
8
+ # to know the part/chunk size used in order to correctly calculate the composite checksum
9
+ S3_MULTIPART_CHUNK_SIZE = 32 * ONE_MiB
10
+
11
+ # At what point you swap from single part to multipart upload to S3.
12
+ # If server and client used the same threshold then makes life easier
13
+ # when validating the checksums of files in s3 as single part and multipart uploads use different ETag formats.
14
+ # (No benefit in constraining the download threshold, so not done here)
15
+ S3_MULTIPART_UPLOAD_THRESHOLD = 96 * ONE_MiB
16
+
17
+ # Max number of items that can be processed in a single API call to divbase-api's S3 routes
18
+ # covers e.g pre-signed urls for upload/download, soft delete and checksum comparisons
19
+ # client has to batch requests if exceeding this limit
20
+ MAX_S3_API_BATCH_SIZE = 100
21
+
22
+ # How long the pre-signed URLs divbase-api creates are valid for
23
+ SINGLE_PART_UPLOAD_URL_EXPIRATION_SECONDS = 3600 # 1 hour
24
+ MULTI_PART_UPLOAD_URL_EXPIRATION_SECONDS = 36000 # 10 hours
25
+ DOWNLOAD_URL_EXPIRATION_SECONDS = 36000 # 10 hours
26
+
27
+ # (Not used anywhere, just making it explicit)
28
+ # This is limited by our fixing of the chunk size and S3's limit to the number of chunks allowed (10,000)
29
+ # 320 GiB if using 32 MiB chunks
30
+ LARGEST_FILE_UPLOADABLE_TO_DIVBASE_BYTES = 10_000 * S3_MULTIPART_CHUNK_SIZE
31
+
32
+ # File types that DivBase supports
33
+ # Whilst we can't realistically limit what file types a user actually uploads,
34
+ # this is here to say what we know should work in DivBase.
35
+ SUPPORTED_DIVBASE_FILE_TYPES = (".tsv", ".vcf.gz", ".csi", ".tbi")
36
+
37
+ # Characters that are not allowed in file names uploaded to DivBase
38
+ # This is to prevent issues when users try to filter/query files on DivBase using these characters
39
+ # or when downloading files (e.g. ":" is used to specify file versions when downloading files
40
+ UNSUPPORTED_CHARACTERS_IN_FILENAMES = (":", "*", "?", "<", ">", "|", "\\")
41
+
42
+ # This prefix is used for all *.vcf.gz results files from a query job/task.
43
+ # After the prefix comes the job id which is a rolling integer.
44
+ # E.g. format: result_of_job_<job-id>.vcf.gz , where <job-id> = 1 and is auto-incremented for every new job.
45
+ QUERY_RESULTS_FILE_PREFIX = "result_of_job_"
divbase_lib/exceptions.py CHANGED
@@ -10,20 +10,6 @@ we ensure that when you manually raise a specific exception the error message lo
10
10
  from pathlib import Path
11
11
 
12
12
 
13
- class ObjectDoesNotExistError(FileNotFoundError):
14
- """Raised when an S3 object/key does not exist in the bucket."""
15
-
16
- def __init__(self, key: str, bucket_name: str):
17
- error_message = f"The file/object '{key}' does not exist in the bucket '{bucket_name}'. "
18
- super().__init__(error_message)
19
- self.key = key
20
- self.bucket = bucket_name
21
- self.error_message = error_message
22
-
23
- def __str__(self):
24
- return self.error_message
25
-
26
-
27
13
  class BcftoolsEnvironmentError(Exception):
28
14
  """Raised when there's an issue with the execution environment (Docker, etc.)."""
29
15
 
@@ -128,5 +114,8 @@ class ChecksumVerificationError(Exception):
128
114
  self.expected_checksum = expected_checksum
129
115
  self.calculated_checksum = calculated_checksum
130
116
 
131
- message = f"Checksum verification failed. Expected: {expected_checksum}, Calculated: {calculated_checksum}"
117
+ message = (
118
+ f"Checksum verification failed. Expected: {expected_checksum}, Calculated: {calculated_checksum}"
119
+ f" The file has been deleted to avoid accidental use of a corrupted file."
120
+ )
132
121
  super().__init__(message)
@@ -13,31 +13,59 @@ from enum import StrEnum
13
13
  from pathlib import Path
14
14
  from typing import Iterator
15
15
 
16
+ from divbase_lib.divbase_constants import S3_MULTIPART_CHUNK_SIZE
16
17
  from divbase_lib.exceptions import ChecksumVerificationError
17
18
 
18
19
  logger = logging.getLogger(__name__)
19
20
 
20
21
 
21
- def _read_file_chunks(file_path: Path, chunk_size: int = 8192) -> Iterator[bytes]:
22
+ class MD5CheckSumFormat(StrEnum):
23
+ HEX = "hex"
24
+ BASE64 = "base64"
25
+
26
+
27
+ def verify_downloaded_checksum(
28
+ file_path: Path,
29
+ expected_checksum: str,
30
+ ) -> None:
31
+ """
32
+ Verify a downloaded file against its S3's ETag.
33
+
34
+ For files uploaded as single part, this is just the MD5 checksum in hex format.
35
+ For files uploaded as multipart, this is a composite checksum of all the parts
36
+ """
37
+ if "-" in expected_checksum:
38
+ calculated_checksum = calculate_composite_md5_s3_etag(file_path)
39
+ else:
40
+ calculated_checksum = calculate_md5_checksum(file_path=file_path, output_format=MD5CheckSumFormat.HEX)
41
+
42
+ if calculated_checksum != expected_checksum:
43
+ raise ChecksumVerificationError(expected_checksum=expected_checksum, calculated_checksum=calculated_checksum)
44
+
45
+
46
+ def _read_file_chunks(file_path: Path, chunk_size: int) -> Iterator[bytes]:
22
47
  """Helper function to read a file in 'chunk_size' sized chunks."""
23
48
 
24
49
  with file_path.open(mode="rb") as infile:
25
50
  yield from iter(lambda: infile.read(chunk_size), b"")
26
51
 
27
52
 
28
- class MD5CheckSumFormat(StrEnum):
29
- HEX = "hex"
30
- BASE64 = "base64"
31
-
32
-
33
- def calculate_md5_checksum(file_path: Path, output_format: MD5CheckSumFormat) -> str:
53
+ def calculate_md5_checksum(
54
+ file_path: Path, output_format: MD5CheckSumFormat, chunk_size: int = S3_MULTIPART_CHUNK_SIZE
55
+ ) -> str:
34
56
  """
35
57
  Calculate the MD5 checksum of a file.
36
58
  Returns the checksum in either hex-encoded (lowercase) or base64-encoded format.
59
+
60
+ Used for:
61
+ - BASE64: The "Content-MD5" header used in uploads to S3.
62
+ - HEX: Verifying downloaded files against S3's ETag for the file.
63
+
64
+ (only works for files which will be uploaded as single part - not composite/multipart uploads)
37
65
  """
38
66
  md5_hash = hashlib.md5()
39
67
 
40
- for chunk in _read_file_chunks(file_path):
68
+ for chunk in _read_file_chunks(file_path=file_path, chunk_size=chunk_size):
41
69
  md5_hash.update(chunk)
42
70
 
43
71
  if output_format == MD5CheckSumFormat.HEX:
@@ -48,13 +76,39 @@ def calculate_md5_checksum(file_path: Path, output_format: MD5CheckSumFormat) ->
48
76
  raise ValueError(f"Unknown output format: {output_format}")
49
77
 
50
78
 
51
- def verify_downloaded_checksum(file_path: Path, expected_checksum: str) -> None:
79
+ def calculate_md5_checksum_for_chunk(file_path: Path, start_byte: int, chunk_size: int) -> str:
80
+ """
81
+ Calculate the base64-encoded MD5 checksum for a specific chunk of a file.
82
+ S3 uses this checksum (Content-MD5 header) when uploading parts of a file.
83
+ """
84
+ md5_hash = hashlib.md5()
85
+ with file_path.open("rb") as f:
86
+ f.seek(start_byte)
87
+ chunk = f.read(chunk_size)
88
+ md5_hash.update(chunk)
89
+ return base64.b64encode(md5_hash.digest()).decode("utf-8")
90
+
91
+
92
+ def calculate_composite_md5_s3_etag(
93
+ file_path: Path,
94
+ chunk_size: int = S3_MULTIPART_CHUNK_SIZE,
95
+ ) -> str:
52
96
  """
53
- Verify a downloaded file against S3's ETag (MD5 checksum in hex format).
97
+ Calculate the composite ETag for a file that was uploaded via multipart upload to S3.
98
+ This is used to validate the downloaded file's integrity.
99
+
100
+ The process involves calculating the MD5 hash of each part, then combining these hashes to form a final ETag.
101
+ So the part size used here must match the part size used during upload.
54
102
  """
55
- calculated_md5 = calculate_md5_checksum(file_path=file_path, output_format=MD5CheckSumFormat.HEX)
56
- if calculated_md5 != expected_checksum:
57
- raise ChecksumVerificationError(expected_checksum=expected_checksum, calculated_checksum=calculated_md5)
103
+ md5_digests = []
104
+ part_count = 0
105
+
106
+ for chunk in _read_file_chunks(file_path=file_path, chunk_size=chunk_size):
107
+ md5_digests.append(hashlib.md5(chunk).digest())
108
+ part_count += 1
109
+
110
+ composite_hash = hashlib.md5(b"".join(md5_digests))
111
+ return f"{composite_hash.hexdigest()}-{part_count}"
58
112
 
59
113
 
60
114
  def convert_checksum_hex_to_base64(hex_checksum: str) -> str:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: divbase-lib
3
- Version: 0.1.0.dev1
3
+ Version: 0.1.0.dev3
4
4
  Summary: Library module for Divbase
5
5
  Project-URL: Homepage, https://divbase.scilifelab.se
6
6
  Project-URL: Documentation, https://scilifelabdatacentre.github.io/divbase
@@ -0,0 +1,14 @@
1
+ divbase_lib/__init__.py,sha256=jQHG8OW4TlfIzPKW4IrX9q58EfEf9pDfm2YgO0ydKaA,27
2
+ divbase_lib/divbase_constants.py,sha256=kvY1_Plvwg5PIgUl_G5sTIhpMwnCBp_MwdICgHJ1ErM,2302
3
+ divbase_lib/exceptions.py,sha256=qruN11zJEzPta_bF3wSzn81zx83X2RfNDVEAZfhMan0,4083
4
+ divbase_lib/s3_checksums.py,sha256=pAaGDsxAoP916gRR_70frJXEl2GpWC7D_DVTsYfcPmg,4135
5
+ divbase_lib/api_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ divbase_lib/api_schemas/auth.py,sha256=RmfoGoRID31r1ZA8O0XiC4Iy2d-gkEdE_75l8EiVxbY,1331
7
+ divbase_lib/api_schemas/project_versions.py,sha256=trv9a_I8CIquCEJEnRXANIKA3Mboe339eu-q_rjaSJE,2353
8
+ divbase_lib/api_schemas/queries.py,sha256=VOWzwegNdjf3HR1dqkfvKuT6qDDnqI-JOSMBVGJ-UuQ,1762
9
+ divbase_lib/api_schemas/s3.py,sha256=ayREJRDMvgcSgSuSwkNX8F04pB-QtsqIXA4nEGa-9i8,9066
10
+ divbase_lib/api_schemas/task_history.py,sha256=BwmnjJl8fvZftDfuE6txUeYR5dv5WYp8GAeamkifvjY,1414
11
+ divbase_lib/api_schemas/vcf_dimensions.py,sha256=o3hKPs_BJMsP4ULikZsuBnDx8CJy9MC66FYahcuSIzg,1276
12
+ divbase_lib-0.1.0.dev3.dist-info/METADATA,sha256=t3o7gpqN8RCjni6n9Bq0U_WYPOmtH_N01FEc4BM1p_k,1564
13
+ divbase_lib-0.1.0.dev3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
14
+ divbase_lib-0.1.0.dev3.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- divbase_lib/__init__.py,sha256=7laO2T6HtGHGvqn4SNMbwiGjkxKXUl3tP2KtQ6BHPiA,27
2
- divbase_lib/exceptions.py,sha256=Ld9_EvV02BP2EudaXbWq5B5YneEedZ7lsKxH6ryk-lA,4442
3
- divbase_lib/s3_checksums.py,sha256=D_jQAYKpUQf8xFs3M65F_zV_sasQFBJUH6hRwXfN_GE,2175
4
- divbase_lib/api_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- divbase_lib/api_schemas/auth.py,sha256=RmfoGoRID31r1ZA8O0XiC4Iy2d-gkEdE_75l8EiVxbY,1331
6
- divbase_lib/api_schemas/project_versions.py,sha256=trv9a_I8CIquCEJEnRXANIKA3Mboe339eu-q_rjaSJE,2353
7
- divbase_lib/api_schemas/queries.py,sha256=bdJttYzZpgnaqg-5Z9BVTlpfutFdoo8EUayw6FSHm8o,1746
8
- divbase_lib/api_schemas/s3.py,sha256=leQRlwnyAiSAMv-4CHdgjT_iPGXkXcpVQBFkcUS-kbs,2006
9
- divbase_lib/api_schemas/task_history.py,sha256=BwmnjJl8fvZftDfuE6txUeYR5dv5WYp8GAeamkifvjY,1414
10
- divbase_lib/api_schemas/vcf_dimensions.py,sha256=o3hKPs_BJMsP4ULikZsuBnDx8CJy9MC66FYahcuSIzg,1276
11
- divbase_lib-0.1.0.dev1.dist-info/METADATA,sha256=VywPx9jbpq8c_k-bWg6cm6F78oZEdQLOcXSOW4nkO-w,1564
12
- divbase_lib-0.1.0.dev1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
13
- divbase_lib-0.1.0.dev1.dist-info/RECORD,,