elaunira-r2index 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,43 +1,11 @@
1
- """
2
- R2 Index Python Library.
1
+ """Python library for uploading and downloading files to/from Cloudflare R2 with the r2index API."""
3
2
 
4
- A Python library for uploading files to Cloudflare R2 and registering them
5
- with the r2index API. Supports both sync and async operations with streaming
6
- checksums for memory-efficient handling of large files.
3
+ from importlib.metadata import version
7
4
 
8
- Usage:
9
- from elaunira.r2index import R2IndexClient, R2Config
10
-
11
- client = R2IndexClient(
12
- api_url="https://r2index.example.com",
13
- api_token="your-bearer-token",
14
- r2_config=R2Config(
15
- access_key_id="...",
16
- secret_access_key="...",
17
- endpoint_url="https://xxx.r2.cloudflarestorage.com",
18
- bucket="your-bucket",
19
- ),
20
- )
21
-
22
- # Upload and register a file
23
- record = client.upload_and_register(
24
- file_path="./myfile.zip",
25
- category="software",
26
- entity="myapp",
27
- remote_path="/releases",
28
- remote_filename="myapp-1.0.0.zip",
29
- remote_version="1.0.0",
30
- )
31
-
32
- Async usage:
33
- from elaunira.r2index import AsyncR2IndexClient, R2Config
34
-
35
- async with AsyncR2IndexClient(...) as client:
36
- record = await client.upload_and_register(...)
37
- """
5
+ __version__ = version("elaunira-r2index")
38
6
 
39
7
  from .async_client import AsyncR2IndexClient
40
- from .async_uploader import AsyncR2Uploader
8
+ from .async_storage import AsyncR2Storage
41
9
  from .checksums import (
42
10
  ChecksumResult,
43
11
  compute_checksums,
@@ -48,6 +16,7 @@ from .client import R2IndexClient
48
16
  from .exceptions import (
49
17
  AuthenticationError,
50
18
  ConflictError,
19
+ DownloadError,
51
20
  NotFoundError,
52
21
  R2IndexError,
53
22
  UploadError,
@@ -72,44 +41,48 @@ from .models import (
72
41
  UserAgentEntry,
73
42
  UserAgentsResponse,
74
43
  )
75
- from .uploader import R2Config, R2Uploader
44
+ from .storage import R2Config, R2Storage, R2TransferConfig
76
45
 
77
46
  __all__ = [
47
+ # Version
48
+ "__version__",
78
49
  # Clients
79
- "R2IndexClient",
80
50
  "AsyncR2IndexClient",
81
- # Uploaders
82
- "R2Uploader",
83
- "AsyncR2Uploader",
51
+ "R2IndexClient",
52
+ # Storage
53
+ "AsyncR2Storage",
84
54
  "R2Config",
55
+ "R2Storage",
56
+ "R2TransferConfig",
85
57
  # Checksums
86
58
  "ChecksumResult",
87
59
  "compute_checksums",
88
60
  "compute_checksums_async",
89
61
  "compute_checksums_from_file_object",
90
62
  # Exceptions
91
- "R2IndexError",
92
63
  "AuthenticationError",
93
- "NotFoundError",
94
- "ValidationError",
95
64
  "ConflictError",
65
+ "DownloadError",
66
+ "NotFoundError",
67
+ "R2IndexError",
96
68
  "UploadError",
69
+ "ValidationError",
97
70
  # Models - File operations
98
- "FileRecord",
99
71
  "FileCreateRequest",
100
- "FileUpdateRequest",
101
72
  "FileListResponse",
73
+ "FileRecord",
74
+ "FileUpdateRequest",
102
75
  "IndexEntry",
103
76
  "RemoteTuple",
104
77
  # Models - Downloads
105
78
  "DownloadRecord",
106
79
  "DownloadRecordRequest",
107
80
  # Models - Analytics
108
- "TimeseriesDataPoint",
109
- "TimeseriesResponse",
110
- "SummaryResponse",
111
81
  "DownloadByIpEntry",
112
82
  "DownloadsByIpResponse",
83
+ "SummaryResponse",
84
+ "TimeseriesDataPoint",
85
+ "TimeseriesResponse",
113
86
  "UserAgentEntry",
114
87
  "UserAgentsResponse",
115
88
  # Models - Other
@@ -7,7 +7,7 @@ from typing import Any
7
7
 
8
8
  import httpx
9
9
 
10
- from .async_uploader import AsyncR2Uploader
10
+ from .async_storage import AsyncR2Storage
11
11
  from .checksums import compute_checksums_async
12
12
  from .exceptions import (
13
13
  AuthenticationError,
@@ -32,7 +32,48 @@ from .models import (
32
32
  TimeseriesResponse,
33
33
  UserAgentsResponse,
34
34
  )
35
- from .uploader import R2Config
35
+ from . import __version__
36
+ from .storage import R2Config, R2TransferConfig
37
+
38
+ CHECKIP_URL = "https://checkip.amazonaws.com"
39
+ DEFAULT_USER_AGENT = f"elaunira-r2index/{__version__}"
40
+
41
+
42
+ def _parse_object_id(object_id: str, bucket: str) -> RemoteTuple:
43
+ """
44
+ Parse an object_id into remote_path, remote_version, and remote_filename.
45
+
46
+ Format: /path/to/object/version/filename.ext
47
+ - remote_filename: last component (filename.ext)
48
+ - remote_version: second-to-last component (version)
49
+ - remote_path: everything before that (/path/to/object)
50
+
51
+ Args:
52
+ object_id: Full object path like /releases/myapp/v1/myapp.zip
53
+ bucket: The S3/R2 bucket name.
54
+
55
+ Returns:
56
+ RemoteTuple with parsed components including bucket.
57
+
58
+ Raises:
59
+ ValueError: If object_id doesn't have enough components.
60
+ """
61
+ parts = object_id.strip("/").split("/")
62
+ if len(parts) < 3:
63
+ raise ValueError(
64
+ f"object_id must have at least 3 components (path/version/filename), got: {object_id}"
65
+ )
66
+
67
+ remote_filename = parts[-1]
68
+ remote_version = parts[-2]
69
+ remote_path = "/" + "/".join(parts[:-2])
70
+
71
+ return RemoteTuple(
72
+ bucket=bucket,
73
+ remote_path=remote_path,
74
+ remote_filename=remote_filename,
75
+ remote_version=remote_version,
76
+ )
36
77
 
37
78
 
38
79
  class AsyncR2IndexClient:
@@ -58,7 +99,7 @@ class AsyncR2IndexClient:
58
99
  self._token = api_token
59
100
  self._timeout = timeout
60
101
  self._r2_config = r2_config
61
- self._uploader: AsyncR2Uploader | None = None
102
+ self._storage: AsyncR2Storage | None = None
62
103
 
63
104
  self._client = httpx.AsyncClient(
64
105
  base_url=self.api_url,
@@ -76,13 +117,13 @@ class AsyncR2IndexClient:
76
117
  """Close the HTTP client."""
77
118
  await self._client.aclose()
78
119
 
79
- def _get_uploader(self) -> AsyncR2Uploader:
120
+ def _get_storage(self) -> AsyncR2Storage:
80
121
  """Get or create the async R2 uploader."""
81
122
  if self._r2_config is None:
82
123
  raise R2IndexError("R2 configuration required for upload operations")
83
- if self._uploader is None:
84
- self._uploader = AsyncR2Uploader(self._r2_config)
85
- return self._uploader
124
+ if self._storage is None:
125
+ self._storage = AsyncR2Storage(self._r2_config)
126
+ return self._storage
86
127
 
87
128
  def _handle_response(self, response: httpx.Response) -> Any:
88
129
  """Handle API response and raise appropriate exceptions."""
@@ -111,6 +152,7 @@ class AsyncR2IndexClient:
111
152
 
112
153
  async def list_files(
113
154
  self,
155
+ bucket: str | None = None,
114
156
  category: str | None = None,
115
157
  entity: str | None = None,
116
158
  tags: list[str] | None = None,
@@ -121,6 +163,7 @@ class AsyncR2IndexClient:
121
163
  List files with optional filters.
122
164
 
123
165
  Args:
166
+ bucket: Filter by bucket.
124
167
  category: Filter by category.
125
168
  entity: Filter by entity.
126
169
  tags: Filter by tags.
@@ -131,6 +174,8 @@ class AsyncR2IndexClient:
131
174
  FileListResponse with files and pagination info.
132
175
  """
133
176
  params: dict[str, Any] = {}
177
+ if bucket:
178
+ params["bucket"] = bucket
134
179
  if category:
135
180
  params["category"] = category
136
181
  if entity:
@@ -213,12 +258,13 @@ class AsyncR2IndexClient:
213
258
  Delete a file by remote tuple.
214
259
 
215
260
  Args:
216
- remote_tuple: The remote path, filename, and version.
261
+ remote_tuple: The bucket, remote path, filename, and version.
217
262
 
218
263
  Raises:
219
264
  NotFoundError: If the file is not found.
220
265
  """
221
266
  params = {
267
+ "bucket": remote_tuple.bucket,
222
268
  "remotePath": remote_tuple.remote_path,
223
269
  "remoteFilename": remote_tuple.remote_filename,
224
270
  "remoteVersion": remote_tuple.remote_version,
@@ -226,8 +272,32 @@ class AsyncR2IndexClient:
226
272
  response = await self._client.delete("/files", params=params)
227
273
  self._handle_response(response)
228
274
 
275
+ async def get_file_by_tuple(self, remote_tuple: RemoteTuple) -> FileRecord:
276
+ """
277
+ Get a file by remote tuple.
278
+
279
+ Args:
280
+ remote_tuple: The bucket, remote path, filename, and version.
281
+
282
+ Returns:
283
+ The FileRecord.
284
+
285
+ Raises:
286
+ NotFoundError: If the file is not found.
287
+ """
288
+ params = {
289
+ "bucket": remote_tuple.bucket,
290
+ "remotePath": remote_tuple.remote_path,
291
+ "remoteFilename": remote_tuple.remote_filename,
292
+ "remoteVersion": remote_tuple.remote_version,
293
+ }
294
+ response = await self._client.get("/files/by-tuple", params=params)
295
+ data = self._handle_response(response)
296
+ return FileRecord.model_validate(data)
297
+
229
298
  async def get_index(
230
299
  self,
300
+ bucket: str | None = None,
231
301
  category: str | None = None,
232
302
  entity: str | None = None,
233
303
  tags: list[str] | None = None,
@@ -236,6 +306,7 @@ class AsyncR2IndexClient:
236
306
  Get file index (lightweight listing).
237
307
 
238
308
  Args:
309
+ bucket: Filter by bucket.
239
310
  category: Filter by category.
240
311
  entity: Filter by entity.
241
312
  tags: Filter by tags.
@@ -244,6 +315,8 @@ class AsyncR2IndexClient:
244
315
  List of IndexEntry objects.
245
316
  """
246
317
  params: dict[str, Any] = {}
318
+ if bucket:
319
+ params["bucket"] = bucket
247
320
  if category:
248
321
  params["category"] = category
249
322
  if entity:
@@ -426,7 +499,8 @@ class AsyncR2IndexClient:
426
499
 
427
500
  async def upload_and_register(
428
501
  self,
429
- file_path: str | Path,
502
+ local_path: str | Path,
503
+ bucket: str,
430
504
  category: str,
431
505
  entity: str,
432
506
  remote_path: str,
@@ -447,7 +521,8 @@ class AsyncR2IndexClient:
447
521
  3. Register with r2index API
448
522
 
449
523
  Args:
450
- file_path: Path to the file to upload.
524
+ local_path: Local path to the file to upload.
525
+ bucket: The S3/R2 bucket name.
451
526
  category: File category.
452
527
  entity: File entity.
453
528
  remote_path: Remote path in R2 (e.g., "/data/files").
@@ -466,18 +541,18 @@ class AsyncR2IndexClient:
466
541
  R2IndexError: If R2 config is not provided.
467
542
  UploadError: If upload fails.
468
543
  """
469
- file_path = Path(file_path)
470
- uploader = self._get_uploader()
544
+ local_path = Path(local_path)
545
+ uploader = self._get_storage()
471
546
 
472
547
  # Step 1: Compute checksums
473
- checksums = await compute_checksums_async(file_path)
548
+ checksums = await compute_checksums_async(local_path)
474
549
 
475
550
  # Step 2: Build R2 object key
476
551
  object_key = f"{remote_path.strip('/')}/{remote_filename}"
477
552
 
478
553
  # Step 3: Upload to R2
479
554
  await uploader.upload_file(
480
- file_path,
555
+ local_path,
481
556
  object_key,
482
557
  content_type=content_type,
483
558
  progress_callback=progress_callback,
@@ -485,6 +560,7 @@ class AsyncR2IndexClient:
485
560
 
486
561
  # Step 4: Register with API
487
562
  create_request = FileCreateRequest(
563
+ bucket=bucket,
488
564
  category=category,
489
565
  entity=entity,
490
566
  remote_path=remote_path,
@@ -501,3 +577,84 @@ class AsyncR2IndexClient:
501
577
  )
502
578
 
503
579
  return await self.create_file(create_request)
580
+
581
+ async def _get_public_ip(self) -> str:
582
+ """Fetch public IP address from checkip.amazonaws.com."""
583
+ async with httpx.AsyncClient() as client:
584
+ response = await client.get(CHECKIP_URL, timeout=10.0)
585
+ return response.text.strip()
586
+
587
+ async def download_and_record(
588
+ self,
589
+ bucket: str,
590
+ object_id: str,
591
+ destination: str | Path,
592
+ ip_address: str | None = None,
593
+ user_agent: str | None = None,
594
+ progress_callback: Callable[[int], None] | None = None,
595
+ transfer_config: R2TransferConfig | None = None,
596
+ ) -> tuple[Path, FileRecord]:
597
+ """
598
+ Download a file from R2 and record the download in the index asynchronously.
599
+
600
+ This is a convenience method that performs:
601
+ 1. Parse object_id into remote_path, remote_version, remote_filename
602
+ 2. Fetch file record from the API using these components
603
+ 3. Download the file from R2
604
+ 4. Record the download in the index for analytics
605
+
606
+ Args:
607
+ bucket: The S3/R2 bucket name.
608
+ object_id: Full S3 object path in format: /path/to/object/version/filename
609
+ Example: /releases/myapp/v1/myapp.zip
610
+ - remote_path: /releases/myapp
611
+ - remote_version: v1
612
+ - remote_filename: myapp.zip
613
+ destination: Local path where the file will be saved.
614
+ ip_address: IP address of the downloader. If not provided, fetched
615
+ from checkip.amazonaws.com.
616
+ user_agent: User agent string. Defaults to "elaunira-r2index/0.1.0".
617
+ progress_callback: Optional callback for download progress.
618
+ transfer_config: Optional transfer configuration for multipart/threading.
619
+
620
+ Returns:
621
+ A tuple of (downloaded file path, file record).
622
+
623
+ Raises:
624
+ R2IndexError: If R2 config is not provided.
625
+ ValueError: If object_id format is invalid.
626
+ NotFoundError: If the file is not found in the index.
627
+ DownloadError: If download fails.
628
+ """
629
+ storage = self._get_storage()
630
+
631
+ # Resolve defaults
632
+ if ip_address is None:
633
+ ip_address = await self._get_public_ip()
634
+ if user_agent is None:
635
+ user_agent = DEFAULT_USER_AGENT
636
+
637
+ # Step 1: Parse object_id into components
638
+ remote_tuple = _parse_object_id(object_id, bucket)
639
+
640
+ # Step 2: Get file record by tuple
641
+ file_record = await self.get_file_by_tuple(remote_tuple)
642
+
643
+ # Step 3: Build R2 object key and download
644
+ object_key = object_id.strip("/")
645
+ downloaded_path = await storage.download_file(
646
+ object_key,
647
+ destination,
648
+ progress_callback=progress_callback,
649
+ transfer_config=transfer_config,
650
+ )
651
+
652
+ # Step 4: Record the download
653
+ download_request = DownloadRecordRequest(
654
+ file_id=file_record.id,
655
+ ip_address=ip_address,
656
+ user_agent=user_agent,
657
+ )
658
+ await self.record_download(download_request)
659
+
660
+ return downloaded_path, file_record
@@ -1,4 +1,4 @@
1
- """Asynchronous R2 uploader using aioboto3."""
1
+ """Asynchronous R2 storage operations using aioboto3."""
2
2
 
3
3
  from collections.abc import Callable
4
4
  from pathlib import Path
@@ -6,20 +6,16 @@ from pathlib import Path
6
6
  import aioboto3
7
7
  from aiobotocore.config import AioConfig
8
8
 
9
- from .exceptions import UploadError
10
- from .uploader import R2Config
9
+ from .exceptions import DownloadError, UploadError
10
+ from .storage import R2Config, R2TransferConfig
11
11
 
12
- # 100MB threshold and part size for multipart uploads
13
- MULTIPART_THRESHOLD = 100 * 1024 * 1024
14
- MULTIPART_PART_SIZE = 100 * 1024 * 1024
15
12
 
16
-
17
- class AsyncR2Uploader:
18
- """Asynchronous R2 uploader using aioboto3."""
13
+ class AsyncR2Storage:
14
+ """Asynchronous R2 storage client using aioboto3."""
19
15
 
20
16
  def __init__(self, config: R2Config) -> None:
21
17
  """
22
- Initialize the async R2 uploader.
18
+ Initialize the async R2 storage client.
23
19
 
24
20
  Args:
25
21
  config: R2 configuration with credentials and endpoint.
@@ -33,17 +29,19 @@ class AsyncR2Uploader:
33
29
  object_key: str,
34
30
  content_type: str | None = None,
35
31
  progress_callback: Callable[[int], None] | None = None,
32
+ transfer_config: R2TransferConfig | None = None,
36
33
  ) -> str:
37
34
  """
38
35
  Upload a file to R2 asynchronously.
39
36
 
40
- Uses multipart upload for files larger than 100MB.
37
+ Uses multipart upload for files larger than the configured threshold.
41
38
 
42
39
  Args:
43
40
  file_path: Path to the file to upload.
44
41
  object_key: The key (path) to store the object under in R2.
45
42
  content_type: Optional content type for the object.
46
43
  progress_callback: Optional callback called with bytes uploaded so far.
44
+ transfer_config: Optional transfer configuration for multipart/threading.
47
45
 
48
46
  Returns:
49
47
  The object key of the uploaded file.
@@ -56,8 +54,9 @@ class AsyncR2Uploader:
56
54
  if not file_path.exists():
57
55
  raise UploadError(f"File not found: {file_path}")
58
56
 
59
- transfer_config = AioConfig(
60
- max_pool_connections=10,
57
+ tc = transfer_config or R2TransferConfig()
58
+ aio_config = AioConfig(
59
+ max_pool_connections=tc.max_concurrency,
61
60
  )
62
61
 
63
62
  extra_args = {}
@@ -71,7 +70,7 @@ class AsyncR2Uploader:
71
70
  aws_secret_access_key=self.config.secret_access_key,
72
71
  endpoint_url=self.config.endpoint_url,
73
72
  region_name=self.config.region,
74
- config=transfer_config,
73
+ config=aio_config,
75
74
  ) as client:
76
75
  callback = None
77
76
  if progress_callback:
@@ -136,6 +135,62 @@ class AsyncR2Uploader:
136
135
  return False
137
136
  raise UploadError(f"Failed to check object existence: {e}") from e
138
137
 
138
+ async def download_file(
139
+ self,
140
+ object_key: str,
141
+ file_path: str | Path,
142
+ progress_callback: Callable[[int], None] | None = None,
143
+ transfer_config: R2TransferConfig | None = None,
144
+ ) -> Path:
145
+ """
146
+ Download a file from R2 asynchronously.
147
+
148
+ Args:
149
+ object_key: The key (path) of the object in R2.
150
+ file_path: Local path where the file will be saved.
151
+ progress_callback: Optional callback called with bytes downloaded so far.
152
+ transfer_config: Optional transfer configuration for multipart/threading.
153
+
154
+ Returns:
155
+ The path to the downloaded file.
156
+
157
+ Raises:
158
+ DownloadError: If the download fails.
159
+ """
160
+ file_path = Path(file_path)
161
+
162
+ # Ensure parent directory exists
163
+ file_path.parent.mkdir(parents=True, exist_ok=True)
164
+
165
+ tc = transfer_config or R2TransferConfig()
166
+ aio_config = AioConfig(
167
+ max_pool_connections=tc.max_concurrency,
168
+ )
169
+
170
+ try:
171
+ async with self._session.client(
172
+ "s3",
173
+ aws_access_key_id=self.config.access_key_id,
174
+ aws_secret_access_key=self.config.secret_access_key,
175
+ endpoint_url=self.config.endpoint_url,
176
+ region_name=self.config.region,
177
+ config=aio_config,
178
+ ) as client:
179
+ callback = None
180
+ if progress_callback:
181
+ callback = _AsyncProgressCallback(progress_callback)
182
+
183
+ await client.download_file(
184
+ self.config.bucket,
185
+ object_key,
186
+ str(file_path),
187
+ Callback=callback,
188
+ )
189
+ except Exception as e:
190
+ raise DownloadError(f"Failed to download file from R2: {e}") from e
191
+
192
+ return file_path
193
+
139
194
 
140
195
  class _AsyncProgressCallback:
141
196
  """Wrapper to track cumulative progress for aioboto3 callback."""
@@ -31,7 +31,48 @@ from .models import (
31
31
  TimeseriesResponse,
32
32
  UserAgentsResponse,
33
33
  )
34
- from .uploader import R2Config, R2Uploader
34
+ from . import __version__
35
+ from .storage import R2Config, R2Storage, R2TransferConfig
36
+
37
+ CHECKIP_URL = "https://checkip.amazonaws.com"
38
+ DEFAULT_USER_AGENT = f"elaunira-r2index/{__version__}"
39
+
40
+
41
+ def _parse_object_id(object_id: str, bucket: str) -> RemoteTuple:
42
+ """
43
+ Parse an object_id into remote_path, remote_version, and remote_filename.
44
+
45
+ Format: /path/to/object/version/filename.ext
46
+ - remote_filename: last component (filename.ext)
47
+ - remote_version: second-to-last component (version)
48
+ - remote_path: everything before that (/path/to/object)
49
+
50
+ Args:
51
+ object_id: Full object path like /releases/myapp/v1/myapp.zip
52
+ bucket: The S3/R2 bucket name.
53
+
54
+ Returns:
55
+ RemoteTuple with parsed components including bucket.
56
+
57
+ Raises:
58
+ ValueError: If object_id doesn't have enough components.
59
+ """
60
+ parts = object_id.strip("/").split("/")
61
+ if len(parts) < 3:
62
+ raise ValueError(
63
+ f"object_id must have at least 3 components (path/version/filename), got: {object_id}"
64
+ )
65
+
66
+ remote_filename = parts[-1]
67
+ remote_version = parts[-2]
68
+ remote_path = "/" + "/".join(parts[:-2])
69
+
70
+ return RemoteTuple(
71
+ bucket=bucket,
72
+ remote_path=remote_path,
73
+ remote_filename=remote_filename,
74
+ remote_version=remote_version,
75
+ )
35
76
 
36
77
 
37
78
  class R2IndexClient:
@@ -57,7 +98,7 @@ class R2IndexClient:
57
98
  self._token = api_token
58
99
  self._timeout = timeout
59
100
  self._r2_config = r2_config
60
- self._uploader: R2Uploader | None = None
101
+ self._storage: R2Storage | None = None
61
102
 
62
103
  self._client = httpx.Client(
63
104
  base_url=self.api_url,
@@ -75,13 +116,13 @@ class R2IndexClient:
75
116
  """Close the HTTP client."""
76
117
  self._client.close()
77
118
 
78
- def _get_uploader(self) -> R2Uploader:
119
+ def _get_storage(self) -> R2Storage:
79
120
  """Get or create the R2 uploader."""
80
121
  if self._r2_config is None:
81
122
  raise R2IndexError("R2 configuration required for upload operations")
82
- if self._uploader is None:
83
- self._uploader = R2Uploader(self._r2_config)
84
- return self._uploader
123
+ if self._storage is None:
124
+ self._storage = R2Storage(self._r2_config)
125
+ return self._storage
85
126
 
86
127
  def _handle_response(self, response: httpx.Response) -> Any:
87
128
  """Handle API response and raise appropriate exceptions."""
@@ -110,6 +151,7 @@ class R2IndexClient:
110
151
 
111
152
  def list_files(
112
153
  self,
154
+ bucket: str | None = None,
113
155
  category: str | None = None,
114
156
  entity: str | None = None,
115
157
  tags: list[str] | None = None,
@@ -120,6 +162,7 @@ class R2IndexClient:
120
162
  List files with optional filters.
121
163
 
122
164
  Args:
165
+ bucket: Filter by bucket.
123
166
  category: Filter by category.
124
167
  entity: Filter by entity.
125
168
  tags: Filter by tags.
@@ -130,6 +173,8 @@ class R2IndexClient:
130
173
  FileListResponse with files and pagination info.
131
174
  """
132
175
  params: dict[str, Any] = {}
176
+ if bucket:
177
+ params["bucket"] = bucket
133
178
  if category:
134
179
  params["category"] = category
135
180
  if entity:
@@ -212,12 +257,13 @@ class R2IndexClient:
212
257
  Delete a file by remote tuple.
213
258
 
214
259
  Args:
215
- remote_tuple: The remote path, filename, and version.
260
+ remote_tuple: The bucket, remote path, filename, and version.
216
261
 
217
262
  Raises:
218
263
  NotFoundError: If the file is not found.
219
264
  """
220
265
  params = {
266
+ "bucket": remote_tuple.bucket,
221
267
  "remotePath": remote_tuple.remote_path,
222
268
  "remoteFilename": remote_tuple.remote_filename,
223
269
  "remoteVersion": remote_tuple.remote_version,
@@ -225,8 +271,32 @@ class R2IndexClient:
225
271
  response = self._client.delete("/files", params=params)
226
272
  self._handle_response(response)
227
273
 
274
+ def get_file_by_tuple(self, remote_tuple: RemoteTuple) -> FileRecord:
275
+ """
276
+ Get a file by remote tuple.
277
+
278
+ Args:
279
+ remote_tuple: The bucket, remote path, filename, and version.
280
+
281
+ Returns:
282
+ The FileRecord.
283
+
284
+ Raises:
285
+ NotFoundError: If the file is not found.
286
+ """
287
+ params = {
288
+ "bucket": remote_tuple.bucket,
289
+ "remotePath": remote_tuple.remote_path,
290
+ "remoteFilename": remote_tuple.remote_filename,
291
+ "remoteVersion": remote_tuple.remote_version,
292
+ }
293
+ response = self._client.get("/files/by-tuple", params=params)
294
+ data = self._handle_response(response)
295
+ return FileRecord.model_validate(data)
296
+
228
297
  def get_index(
229
298
  self,
299
+ bucket: str | None = None,
230
300
  category: str | None = None,
231
301
  entity: str | None = None,
232
302
  tags: list[str] | None = None,
@@ -235,6 +305,7 @@ class R2IndexClient:
235
305
  Get file index (lightweight listing).
236
306
 
237
307
  Args:
308
+ bucket: Filter by bucket.
238
309
  category: Filter by category.
239
310
  entity: Filter by entity.
240
311
  tags: Filter by tags.
@@ -243,6 +314,8 @@ class R2IndexClient:
243
314
  List of IndexEntry objects.
244
315
  """
245
316
  params: dict[str, Any] = {}
317
+ if bucket:
318
+ params["bucket"] = bucket
246
319
  if category:
247
320
  params["category"] = category
248
321
  if entity:
@@ -425,7 +498,8 @@ class R2IndexClient:
425
498
 
426
499
  def upload_and_register(
427
500
  self,
428
- file_path: str | Path,
501
+ local_path: str | Path,
502
+ bucket: str,
429
503
  category: str,
430
504
  entity: str,
431
505
  remote_path: str,
@@ -446,7 +520,8 @@ class R2IndexClient:
446
520
  3. Register with r2index API
447
521
 
448
522
  Args:
449
- file_path: Path to the file to upload.
523
+ local_path: Local path to the file to upload.
524
+ bucket: The S3/R2 bucket name.
450
525
  category: File category.
451
526
  entity: File entity.
452
527
  remote_path: Remote path in R2 (e.g., "/data/files").
@@ -465,18 +540,18 @@ class R2IndexClient:
465
540
  R2IndexError: If R2 config is not provided.
466
541
  UploadError: If upload fails.
467
542
  """
468
- file_path = Path(file_path)
469
- uploader = self._get_uploader()
543
+ local_path = Path(local_path)
544
+ uploader = self._get_storage()
470
545
 
471
546
  # Step 1: Compute checksums
472
- checksums = compute_checksums(file_path)
547
+ checksums = compute_checksums(local_path)
473
548
 
474
549
  # Step 2: Build R2 object key
475
550
  object_key = f"{remote_path.strip('/')}/{remote_filename}"
476
551
 
477
552
  # Step 3: Upload to R2
478
553
  uploader.upload_file(
479
- file_path,
554
+ local_path,
480
555
  object_key,
481
556
  content_type=content_type,
482
557
  progress_callback=progress_callback,
@@ -484,6 +559,7 @@ class R2IndexClient:
484
559
 
485
560
  # Step 4: Register with API
486
561
  create_request = FileCreateRequest(
562
+ bucket=bucket,
487
563
  category=category,
488
564
  entity=entity,
489
565
  remote_path=remote_path,
@@ -500,3 +576,83 @@ class R2IndexClient:
500
576
  )
501
577
 
502
578
  return self.create_file(create_request)
579
+
580
+ def _get_public_ip(self) -> str:
581
+ """Fetch public IP address from checkip.amazonaws.com."""
582
+ response = httpx.get(CHECKIP_URL, timeout=10.0)
583
+ return response.text.strip()
584
+
585
+ def download_and_record(
586
+ self,
587
+ bucket: str,
588
+ object_id: str,
589
+ destination: str | Path,
590
+ ip_address: str | None = None,
591
+ user_agent: str | None = None,
592
+ progress_callback: Callable[[int], None] | None = None,
593
+ transfer_config: R2TransferConfig | None = None,
594
+ ) -> tuple[Path, FileRecord]:
595
+ """
596
+ Download a file from R2 and record the download in the index.
597
+
598
+ This is a convenience method that performs:
599
+ 1. Parse object_id into remote_path, remote_version, remote_filename
600
+ 2. Fetch file record from the API using these components
601
+ 3. Download the file from R2
602
+ 4. Record the download in the index for analytics
603
+
604
+ Args:
605
+ bucket: The S3/R2 bucket name.
606
+ object_id: Full S3 object path in format: /path/to/object/version/filename
607
+ Example: /releases/myapp/v1/myapp.zip
608
+ - remote_path: /releases/myapp
609
+ - remote_version: v1
610
+ - remote_filename: myapp.zip
611
+ destination: Local path where the file will be saved.
612
+ ip_address: IP address of the downloader. If not provided, fetched
613
+ from checkip.amazonaws.com.
614
+ user_agent: User agent string. Defaults to "elaunira-r2index/0.1.0".
615
+ progress_callback: Optional callback for download progress.
616
+ transfer_config: Optional transfer configuration for multipart/threading.
617
+
618
+ Returns:
619
+ A tuple of (downloaded file path, file record).
620
+
621
+ Raises:
622
+ R2IndexError: If R2 config is not provided.
623
+ ValueError: If object_id format is invalid.
624
+ NotFoundError: If the file is not found in the index.
625
+ DownloadError: If download fails.
626
+ """
627
+ storage = self._get_storage()
628
+
629
+ # Resolve defaults
630
+ if ip_address is None:
631
+ ip_address = self._get_public_ip()
632
+ if user_agent is None:
633
+ user_agent = DEFAULT_USER_AGENT
634
+
635
+ # Step 1: Parse object_id into components
636
+ remote_tuple = _parse_object_id(object_id, bucket)
637
+
638
+ # Step 2: Get file record by tuple
639
+ file_record = self.get_file_by_tuple(remote_tuple)
640
+
641
+ # Step 3: Build R2 object key and download
642
+ object_key = object_id.strip("/")
643
+ downloaded_path = storage.download_file(
644
+ object_key,
645
+ destination,
646
+ progress_callback=progress_callback,
647
+ transfer_config=transfer_config,
648
+ )
649
+
650
+ # Step 4: Record the download
651
+ download_request = DownloadRecordRequest(
652
+ file_id=file_record.id,
653
+ ip_address=ip_address,
654
+ user_agent=user_agent,
655
+ )
656
+ self.record_download(download_request)
657
+
658
+ return downloaded_path, file_record
@@ -38,3 +38,9 @@ class UploadError(R2IndexError):
38
38
  """Raised for R2 upload failures."""
39
39
 
40
40
  pass
41
+
42
+
43
+ class DownloadError(R2IndexError):
44
+ """Raised for R2 download failures."""
45
+
46
+ pass
@@ -9,6 +9,7 @@ from pydantic import BaseModel, Field
9
9
  class RemoteTuple(BaseModel):
10
10
  """Remote file identifier tuple."""
11
11
 
12
+ bucket: str
12
13
  remote_path: str
13
14
  remote_filename: str
14
15
  remote_version: str
@@ -17,6 +18,7 @@ class RemoteTuple(BaseModel):
17
18
  class FileCreateRequest(BaseModel):
18
19
  """Request payload for creating/upserting a file record."""
19
20
 
21
+ bucket: str
20
22
  category: str
21
23
  entity: str
22
24
  remote_path: str
@@ -35,6 +37,7 @@ class FileCreateRequest(BaseModel):
35
37
  class FileUpdateRequest(BaseModel):
36
38
  """Request payload for updating a file record."""
37
39
 
40
+ bucket: str | None = None
38
41
  category: str | None = None
39
42
  entity: str | None = None
40
43
  remote_path: str | None = None
@@ -54,6 +57,7 @@ class FileRecord(BaseModel):
54
57
  """File record as returned by the API."""
55
58
 
56
59
  id: str
60
+ bucket: str
57
61
  category: str
58
62
  entity: str
59
63
  remote_path: str
@@ -86,6 +90,7 @@ class IndexEntry(BaseModel):
86
90
  """Single entry in the index response."""
87
91
 
88
92
  id: str
93
+ bucket: str
89
94
  category: str
90
95
  entity: str
91
96
  remote_path: str
File without changes
@@ -1,17 +1,41 @@
1
- """Synchronous R2 uploader using boto3."""
1
+ """Synchronous R2 storage operations using boto3."""
2
2
 
3
+ import os
3
4
  from collections.abc import Callable
4
- from dataclasses import dataclass
5
+ from dataclasses import dataclass, field
5
6
  from pathlib import Path
6
7
 
7
8
  import boto3
8
9
  from boto3.s3.transfer import TransferConfig
9
10
 
10
- from .exceptions import UploadError
11
+ from .exceptions import DownloadError, UploadError
11
12
 
12
- # 100MB threshold and part size for multipart uploads
13
- MULTIPART_THRESHOLD = 100 * 1024 * 1024
14
- MULTIPART_PART_SIZE = 100 * 1024 * 1024
13
+ # Default thresholds and part sizes for multipart transfers
14
+ DEFAULT_MULTIPART_CHUNKSIZE = 100 * 1024 * 1024 # 100MB
15
+ DEFAULT_MULTIPART_THRESHOLD = 100 * 1024 * 1024 # 100MB
16
+
17
+
18
+ def _default_max_concurrency() -> int:
19
+ """Return default max concurrency: 2x CPU cores, minimum 4."""
20
+ cpu_count = os.cpu_count() or 2
21
+ return max(4, cpu_count * 2)
22
+
23
+
24
+ @dataclass
25
+ class R2TransferConfig:
26
+ """Configuration for R2 transfer operations (uploads/downloads)."""
27
+
28
+ multipart_threshold: int = DEFAULT_MULTIPART_THRESHOLD
29
+ """Size threshold (bytes) to trigger multipart transfer. Default 100MB."""
30
+
31
+ multipart_chunksize: int = DEFAULT_MULTIPART_CHUNKSIZE
32
+ """Size of each part (bytes) in multipart transfer. Default 100MB."""
33
+
34
+ max_concurrency: int = field(default_factory=_default_max_concurrency)
35
+ """Number of parallel threads for multipart transfer. Default 2x CPU cores."""
36
+
37
+ use_threads: bool = True
38
+ """Whether to use threads for parallel transfer. Default True."""
15
39
 
16
40
 
17
41
  @dataclass
@@ -25,12 +49,12 @@ class R2Config:
25
49
  region: str = "auto"
26
50
 
27
51
 
28
- class R2Uploader:
29
- """Synchronous R2 uploader using boto3."""
52
+ class R2Storage:
53
+ """Synchronous R2 storage client using boto3."""
30
54
 
31
55
  def __init__(self, config: R2Config) -> None:
32
56
  """
33
- Initialize the R2 uploader.
57
+ Initialize the R2 storage client.
34
58
 
35
59
  Args:
36
60
  config: R2 configuration with credentials and endpoint.
@@ -50,17 +74,19 @@ class R2Uploader:
50
74
  object_key: str,
51
75
  content_type: str | None = None,
52
76
  progress_callback: Callable[[int], None] | None = None,
77
+ transfer_config: R2TransferConfig | None = None,
53
78
  ) -> str:
54
79
  """
55
80
  Upload a file to R2.
56
81
 
57
- Uses multipart upload for files larger than 100MB.
82
+ Uses multipart upload for files larger than the configured threshold.
58
83
 
59
84
  Args:
60
85
  file_path: Path to the file to upload.
61
86
  object_key: The key (path) to store the object under in R2.
62
87
  content_type: Optional content type for the object.
63
88
  progress_callback: Optional callback called with bytes uploaded so far.
89
+ transfer_config: Optional transfer configuration for multipart/threading.
64
90
 
65
91
  Returns:
66
92
  The object key of the uploaded file.
@@ -73,10 +99,12 @@ class R2Uploader:
73
99
  if not file_path.exists():
74
100
  raise UploadError(f"File not found: {file_path}")
75
101
 
76
- transfer_config = TransferConfig(
77
- multipart_threshold=MULTIPART_THRESHOLD,
78
- multipart_chunksize=MULTIPART_PART_SIZE,
79
- use_threads=True,
102
+ tc = transfer_config or R2TransferConfig()
103
+ boto_transfer_config = TransferConfig(
104
+ multipart_threshold=tc.multipart_threshold,
105
+ multipart_chunksize=tc.multipart_chunksize,
106
+ max_concurrency=tc.max_concurrency,
107
+ use_threads=tc.use_threads,
80
108
  )
81
109
 
82
110
  extra_args = {}
@@ -92,7 +120,7 @@ class R2Uploader:
92
120
  str(file_path),
93
121
  self.config.bucket,
94
122
  object_key,
95
- Config=transfer_config,
123
+ Config=boto_transfer_config,
96
124
  ExtraArgs=extra_args if extra_args else None,
97
125
  Callback=callback,
98
126
  )
@@ -134,6 +162,58 @@ class R2Uploader:
134
162
  return False
135
163
  raise UploadError(f"Failed to check object existence: {e}") from e
136
164
 
165
+ def download_file(
166
+ self,
167
+ object_key: str,
168
+ file_path: str | Path,
169
+ progress_callback: Callable[[int], None] | None = None,
170
+ transfer_config: R2TransferConfig | None = None,
171
+ ) -> Path:
172
+ """
173
+ Download a file from R2.
174
+
175
+ Args:
176
+ object_key: The key (path) of the object in R2.
177
+ file_path: Local path where the file will be saved.
178
+ progress_callback: Optional callback called with bytes downloaded so far.
179
+ transfer_config: Optional transfer configuration for multipart/threading.
180
+
181
+ Returns:
182
+ The path to the downloaded file.
183
+
184
+ Raises:
185
+ DownloadError: If the download fails.
186
+ """
187
+ file_path = Path(file_path)
188
+
189
+ # Ensure parent directory exists
190
+ file_path.parent.mkdir(parents=True, exist_ok=True)
191
+
192
+ tc = transfer_config or R2TransferConfig()
193
+ boto_transfer_config = TransferConfig(
194
+ multipart_threshold=tc.multipart_threshold,
195
+ multipart_chunksize=tc.multipart_chunksize,
196
+ max_concurrency=tc.max_concurrency,
197
+ use_threads=tc.use_threads,
198
+ )
199
+
200
+ callback = None
201
+ if progress_callback:
202
+ callback = _ProgressCallback(progress_callback)
203
+
204
+ try:
205
+ self._client.download_file(
206
+ self.config.bucket,
207
+ object_key,
208
+ str(file_path),
209
+ Config=boto_transfer_config,
210
+ Callback=callback,
211
+ )
212
+ except Exception as e:
213
+ raise DownloadError(f"Failed to download file from R2: {e}") from e
214
+
215
+ return file_path
216
+
137
217
 
138
218
  class _ProgressCallback:
139
219
  """Wrapper to track cumulative progress for boto3 callback."""
@@ -0,0 +1,164 @@
1
+ Metadata-Version: 2.4
2
+ Name: elaunira-r2index
3
+ Version: 0.2.0
4
+ Summary: Python library for uploading files to R2 and registering them with the r2index API
5
+ Project-URL: Homepage, https://github.com/elaunira/elaunira-r2-index
6
+ Project-URL: Repository, https://github.com/elaunira/elaunira-r2-index
7
+ Author: Elaunira
8
+ License-Expression: MIT
9
+ Keywords: cloudflare,index,r2,storage,upload
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: License :: OSI Approved :: MIT License
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Programming Language :: Python :: 3.13
16
+ Classifier: Typing :: Typed
17
+ Requires-Python: >=3.12
18
+ Requires-Dist: aioboto3>=13.0.0
19
+ Requires-Dist: boto3>=1.35.0
20
+ Requires-Dist: httpx>=0.27.0
21
+ Requires-Dist: pydantic>=2.10.0
22
+ Provides-Extra: dev
23
+ Requires-Dist: boto3-stubs[s3]>=1.35.0; extra == 'dev'
24
+ Requires-Dist: build>=1.2.0; extra == 'dev'
25
+ Requires-Dist: mypy>=1.15.0; extra == 'dev'
26
+ Requires-Dist: pytest-asyncio>=0.25.0; extra == 'dev'
27
+ Requires-Dist: pytest-httpx>=0.35.0; extra == 'dev'
28
+ Requires-Dist: pytest>=8.0.0; extra == 'dev'
29
+ Requires-Dist: ruff>=0.9.0; extra == 'dev'
30
+ Requires-Dist: twine>=6.0.0; extra == 'dev'
31
+ Description-Content-Type: text/markdown
32
+
33
+ # elaunira-r2index
34
+
35
+ Python library for uploading and downloading files to/from Cloudflare R2 with the r2index API.
36
+
37
+ ## Installation
38
+
39
+ ```bash
40
+ pip install elaunira-r2index
41
+ ```
42
+
43
+ ## Usage
44
+
45
+ ### Sync Client
46
+
47
+ ```python
48
+ from elaunira.r2index import R2IndexClient, R2Config
49
+
50
+ client = R2IndexClient(
51
+ api_url="https://r2index.example.com",
52
+ api_token="your-bearer-token",
53
+ r2_config=R2Config(
54
+ access_key_id="your-r2-access-key-id",
55
+ secret_access_key="your-r2-secret-access-key",
56
+ endpoint_url="https://your-account-id.r2.cloudflarestorage.com",
57
+ bucket="your-bucket-name",
58
+ ),
59
+ )
60
+
61
+ # Upload and register a file
62
+ record = client.upload_and_register(
63
+ local_path="./myfile.zip",
64
+ bucket="my-bucket",
65
+ category="software",
66
+ entity="myapp",
67
+ remote_path="/releases/myapp",
68
+ remote_filename="myapp.zip",
69
+ remote_version="v1",
70
+ tags=["release", "stable"],
71
+ )
72
+
73
+ # Download a file and record the download
74
+ # IP address is auto-detected, user agent defaults to "elaunira-r2index/0.1.0"
75
+ path, record = client.download_and_record(
76
+ bucket="my-bucket",
77
+ object_id="/releases/myapp/v1/myapp.zip",
78
+ destination="./downloads/myfile.zip",
79
+ )
80
+ ```
81
+
82
+ ### Async Client
83
+
84
+ ```python
85
+ from elaunira.r2index import AsyncR2IndexClient, R2Config
86
+
87
+ async with AsyncR2IndexClient(
88
+ api_url="https://r2index.example.com",
89
+ api_token="your-bearer-token",
90
+ r2_config=R2Config(
91
+ access_key_id="your-r2-access-key-id",
92
+ secret_access_key="your-r2-secret-access-key",
93
+ endpoint_url="https://your-account-id.r2.cloudflarestorage.com",
94
+ bucket="your-bucket-name",
95
+ ),
96
+ ) as client:
97
+ # Upload
98
+ record = await client.upload_and_register(
99
+ local_path="./myfile.zip",
100
+ bucket="my-bucket",
101
+ category="software",
102
+ entity="myapp",
103
+ remote_path="/releases/myapp",
104
+ remote_filename="myapp.zip",
105
+ remote_version="v1",
106
+ tags=["release", "stable"],
107
+ )
108
+
109
+ # Download
110
+ path, record = await client.download_and_record(
111
+ bucket="my-bucket",
112
+ object_id="/releases/myapp/v1/myapp.zip",
113
+ destination="./downloads/myfile.zip",
114
+ )
115
+ ```
116
+
117
+ ### Transfer Configuration
118
+
119
+ Control multipart transfer settings with `R2TransferConfig`:
120
+
121
+ ```python
122
+ from elaunira.r2index import R2IndexClient, R2Config, R2TransferConfig
123
+
124
+ client = R2IndexClient(
125
+ api_url="https://r2index.example.com",
126
+ api_token="your-bearer-token",
127
+ r2_config=R2Config(...),
128
+ )
129
+
130
+ # Custom transfer settings
131
+ transfer_config = R2TransferConfig(
132
+ multipart_threshold=100 * 1024 * 1024, # 100MB (default)
133
+ multipart_chunksize=32 * 1024 * 1024, # 32MB chunks
134
+ max_concurrency=64, # 64 parallel threads
135
+ use_threads=True, # Enable threading (default)
136
+ )
137
+
138
+ path, record = client.download_and_record(
139
+ bucket="my-bucket",
140
+ object_id="/data/files/v2/largefile.zip",
141
+ destination="./downloads/largefile.zip",
142
+ transfer_config=transfer_config,
143
+ )
144
+ ```
145
+
146
+ Default `max_concurrency` is 2x the number of CPU cores (minimum 4).
147
+
148
+ ### Progress Tracking
149
+
150
+ ```python
151
+ def on_progress(bytes_transferred: int) -> None:
152
+ print(f"Downloaded: {bytes_transferred / 1024 / 1024:.1f} MB")
153
+
154
+ path, record = client.download_and_record(
155
+ bucket="my-bucket",
156
+ object_id="/releases/myapp/v1/myapp.zip",
157
+ destination="./downloads/myfile.zip",
158
+ progress_callback=on_progress,
159
+ )
160
+ ```
161
+
162
+ ## License
163
+
164
+ MIT
@@ -0,0 +1,13 @@
1
+ elaunira/__init__.py,sha256=qaXVGBU6uIJyveNTEbWux5EcfVSM186PvDwjyxiXLw4,34
2
+ elaunira/r2index/__init__.py,sha256=zbQ929kyFDEoexnvyerpRarQv1VfBKeXyFy3c2U-xAI,2075
3
+ elaunira/r2index/async_client.py,sha256=SfNUPWIcl_hucP0PvzA116uUQUi4sjyF0kEouVVKiLY,20527
4
+ elaunira/r2index/async_storage.py,sha256=wDoxjXbP0SfMxK9DOmcfcfnQIAsfpnOPr0JoN-yo80I,6903
5
+ elaunira/r2index/checksums.py,sha256=tqRTJ7j3pWLJlQ8FQE20JRYk9lXy5YZzymdpYOhsTFo,3281
6
+ elaunira/r2index/client.py,sha256=1bAFXCYP93Zh61PM0gWPzrG6ByCUW01W_MVJCs3FZH0,20072
7
+ elaunira/r2index/exceptions.py,sha256=wgWNUgev9Azbsn4EFTAgVXECjW-TuDR6D1FVo5orTco,885
8
+ elaunira/r2index/models.py,sha256=OfgXYcRFrfYip9GHPAJz9gleWdnpzuanD6adVWAYRGk,4850
9
+ elaunira/r2index/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ elaunira/r2index/storage.py,sha256=3l6w5KS5XeCh8_5rZ7CI2MiPpFi30LOPYUZYi_Rq5Wo,7119
11
+ elaunira_r2index-0.2.0.dist-info/METADATA,sha256=Scdi6AIol__OX4GCEy3P9cQQnBXAKOp9wMhw-9A0zuQ,4716
12
+ elaunira_r2index-0.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
13
+ elaunira_r2index-0.2.0.dist-info/RECORD,,
@@ -1,101 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: elaunira-r2index
3
- Version: 0.1.0
4
- Summary: Python library for uploading files to R2 and registering them with the r2index API
5
- Project-URL: Homepage, https://github.com/elaunira/elaunira-r2-index
6
- Project-URL: Repository, https://github.com/elaunira/elaunira-r2-index
7
- Author: Elaunira
8
- License-Expression: MIT
9
- Keywords: cloudflare,index,r2,storage,upload
10
- Classifier: Development Status :: 4 - Beta
11
- Classifier: Intended Audience :: Developers
12
- Classifier: License :: OSI Approved :: MIT License
13
- Classifier: Programming Language :: Python :: 3
14
- Classifier: Programming Language :: Python :: 3.12
15
- Classifier: Programming Language :: Python :: 3.13
16
- Classifier: Typing :: Typed
17
- Requires-Python: >=3.12
18
- Requires-Dist: aioboto3>=12.0.0
19
- Requires-Dist: boto3>=1.34.0
20
- Requires-Dist: httpx>=0.25.0
21
- Requires-Dist: pydantic>=2.0.0
22
- Provides-Extra: dev
23
- Requires-Dist: boto3-stubs[s3]>=1.34.0; extra == 'dev'
24
- Requires-Dist: build>=1.0.0; extra == 'dev'
25
- Requires-Dist: mypy>=1.8.0; extra == 'dev'
26
- Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
27
- Requires-Dist: pytest-httpx>=0.30.0; extra == 'dev'
28
- Requires-Dist: pytest>=8.0.0; extra == 'dev'
29
- Requires-Dist: ruff>=0.3.0; extra == 'dev'
30
- Requires-Dist: twine>=5.0.0; extra == 'dev'
31
- Description-Content-Type: text/markdown
32
-
33
- # elaunira-r2index
34
-
35
- Python library for uploading files to Cloudflare R2 and registering them with the r2index API.
36
-
37
- ## Installation
38
-
39
- ```bash
40
- pip install elaunira-r2index
41
- ```
42
-
43
- ## Usage
44
-
45
- ### Sync Client
46
-
47
- ```python
48
- from elaunira.r2index import R2IndexClient, R2Config
49
-
50
- client = R2IndexClient(
51
- api_url="https://r2index.example.com",
52
- api_token="your-bearer-token",
53
- r2_config=R2Config(
54
- access_key_id="your-r2-access-key-id",
55
- secret_access_key="your-r2-secret-access-key",
56
- endpoint_url="https://your-account-id.r2.cloudflarestorage.com",
57
- bucket="your-bucket-name",
58
- ),
59
- )
60
-
61
- # Upload and register a file
62
- record = client.upload_and_register(
63
- file_path="./myfile.zip",
64
- category="software",
65
- entity="myapp",
66
- remote_path="/releases",
67
- remote_filename="myapp-1.0.0.zip",
68
- remote_version="1.0.0",
69
- tags=["release", "stable"],
70
- )
71
- ```
72
-
73
- ### Async Client
74
-
75
- ```python
76
- from elaunira.r2index import AsyncR2IndexClient, R2Config
77
-
78
- async with AsyncR2IndexClient(
79
- api_url="https://r2index.example.com",
80
- api_token="your-bearer-token",
81
- r2_config=R2Config(
82
- access_key_id="your-r2-access-key-id",
83
- secret_access_key="your-r2-secret-access-key",
84
- endpoint_url="https://your-account-id.r2.cloudflarestorage.com",
85
- bucket="your-bucket-name",
86
- ),
87
- ) as client:
88
- record = await client.upload_and_register(
89
- file_path="./myfile.zip",
90
- category="software",
91
- entity="myapp",
92
- remote_path="/releases",
93
- remote_filename="myapp-1.0.0.zip",
94
- remote_version="1.0.0",
95
- tags=["release", "stable"],
96
- )
97
- ```
98
-
99
- ## License
100
-
101
- MIT
@@ -1,12 +0,0 @@
1
- elaunira/__init__.py,sha256=qaXVGBU6uIJyveNTEbWux5EcfVSM186PvDwjyxiXLw4,34
2
- elaunira/r2index/__init__.py,sha256=dPjPlDfBIUqzS9STNNtyMF1LyPakXeY5I1ypHEM_jTo,2878
3
- elaunira/r2index/async_client.py,sha256=T8fWZKnJZOTgsDEvdvv67xL-d_QJ5pP1A3LznEZZ6gY,14936
4
- elaunira/r2index/async_uploader.py,sha256=ctf65f2DjT9gckEh7XXpHRp1Ym14zv6vx1zIgN_SJpg,4868
5
- elaunira/r2index/checksums.py,sha256=tqRTJ7j3pWLJlQ8FQE20JRYk9lXy5YZzymdpYOhsTFo,3281
6
- elaunira/r2index/client.py,sha256=LPtE4uq_OKfmyFJKMKM_hdmU8euVAWkCe17T7HAzXL8,14608
7
- elaunira/r2index/exceptions.py,sha256=JTSztfTH-jar2jSKEUmQO8VKfRNWfHTB0WgvwLJCVU8,795
8
- elaunira/r2index/models.py,sha256=ie6uOpamrdmOc9bljym61FurRzpcsoQ1qB4rQQGu7_I,4756
9
- elaunira/r2index/uploader.py,sha256=kKkC6f_YwMIJ-Tict7mn3S0Sdim6pI4rRXF0xvNwjh0,4250
10
- elaunira_r2index-0.1.0.dist-info/METADATA,sha256=zQ8Up7wJ-7g26hwL-Ay3QXxgtTaIs8ljUGVgloupv3M,2923
11
- elaunira_r2index-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
12
- elaunira_r2index-0.1.0.dist-info/RECORD,,