putplace 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of putplace might be problematic. Click here for more details.

putplace/storage.py ADDED
@@ -0,0 +1,456 @@
1
+ """Storage backend abstraction for file content storage."""
2
+
3
+ import logging
4
+ from abc import ABC, abstractmethod
5
+ from pathlib import Path
6
+ from typing import Optional
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ class StorageBackend(ABC):
12
+ """Abstract base class for storage backends."""
13
+
14
+ @abstractmethod
15
+ async def store(self, sha256: str, content: bytes) -> bool:
16
+ """Store file content.
17
+
18
+ Args:
19
+ sha256: SHA256 hash of the file (used as key)
20
+ content: File content bytes
21
+
22
+ Returns:
23
+ True if stored successfully, False otherwise
24
+ """
25
+ pass
26
+
27
+ @abstractmethod
28
+ async def retrieve(self, sha256: str) -> Optional[bytes]:
29
+ """Retrieve file content.
30
+
31
+ Args:
32
+ sha256: SHA256 hash of the file
33
+
34
+ Returns:
35
+ File content bytes or None if not found
36
+ """
37
+ pass
38
+
39
+ @abstractmethod
40
+ async def exists(self, sha256: str) -> bool:
41
+ """Check if file content exists.
42
+
43
+ Args:
44
+ sha256: SHA256 hash of the file
45
+
46
+ Returns:
47
+ True if file exists, False otherwise
48
+ """
49
+ pass
50
+
51
+ @abstractmethod
52
+ async def delete(self, sha256: str) -> bool:
53
+ """Delete file content.
54
+
55
+ Args:
56
+ sha256: SHA256 hash of the file
57
+
58
+ Returns:
59
+ True if deleted successfully, False if not found
60
+ """
61
+ pass
62
+
63
+ @abstractmethod
64
+ def get_storage_path(self, sha256: str) -> str:
65
+ """Get the storage path/URI for a given SHA256.
66
+
67
+ Args:
68
+ sha256: SHA256 hash of the file
69
+
70
+ Returns:
71
+ Full storage path or URI (e.g., "/var/putplace/files/e3/e3b..." or "s3://bucket/files/e3/e3b...")
72
+ """
73
+ pass
74
+
75
+
76
+ class LocalStorage(StorageBackend):
77
+ """Local filesystem storage backend.
78
+
79
+ Stores files in a directory structure: {base_path}/{sha256[:2]}/{sha256}
80
+ This spreads files across 256 subdirectories to avoid too many files in one directory.
81
+ """
82
+
83
+ def __init__(self, base_path: str = "/var/putplace/files"):
84
+ """Initialize local storage.
85
+
86
+ Args:
87
+ base_path: Base directory path for file storage
88
+ """
89
+ self.base_path = Path(base_path)
90
+ logger.info(f"Initialized LocalStorage with base_path: {self.base_path}")
91
+
92
+ def _get_file_path(self, sha256: str) -> Path:
93
+ """Get file path for a given SHA256.
94
+
95
+ Args:
96
+ sha256: SHA256 hash
97
+
98
+ Returns:
99
+ Path object for the file
100
+ """
101
+ # Use first 2 characters as subdirectory to distribute files
102
+ subdir = sha256[:2]
103
+ return self.base_path / subdir / sha256
104
+
105
+ async def store(self, sha256: str, content: bytes) -> bool:
106
+ """Store file content to local filesystem.
107
+
108
+ Args:
109
+ sha256: SHA256 hash of the file
110
+ content: File content bytes
111
+
112
+ Returns:
113
+ True if stored successfully, False otherwise
114
+ """
115
+ try:
116
+ file_path = self._get_file_path(sha256)
117
+
118
+ # Create parent directory if it doesn't exist
119
+ file_path.parent.mkdir(parents=True, exist_ok=True)
120
+
121
+ # Write content to file
122
+ with open(file_path, "wb") as f:
123
+ f.write(content)
124
+
125
+ logger.info(f"Stored file: {sha256} ({len(content)} bytes) at {file_path}")
126
+ return True
127
+
128
+ except (IOError, OSError) as e:
129
+ logger.error(f"Failed to store file {sha256}: {e}")
130
+ return False
131
+
132
+ async def retrieve(self, sha256: str) -> Optional[bytes]:
133
+ """Retrieve file content from local filesystem.
134
+
135
+ Args:
136
+ sha256: SHA256 hash of the file
137
+
138
+ Returns:
139
+ File content bytes or None if not found
140
+ """
141
+ try:
142
+ file_path = self._get_file_path(sha256)
143
+
144
+ if not file_path.exists():
145
+ logger.debug(f"File not found: {sha256}")
146
+ return None
147
+
148
+ with open(file_path, "rb") as f:
149
+ content = f.read()
150
+
151
+ logger.debug(f"Retrieved file: {sha256} ({len(content)} bytes)")
152
+ return content
153
+
154
+ except (IOError, OSError) as e:
155
+ logger.error(f"Failed to retrieve file {sha256}: {e}")
156
+ return None
157
+
158
+ async def exists(self, sha256: str) -> bool:
159
+ """Check if file exists in local filesystem.
160
+
161
+ Args:
162
+ sha256: SHA256 hash of the file
163
+
164
+ Returns:
165
+ True if file exists, False otherwise
166
+ """
167
+ file_path = self._get_file_path(sha256)
168
+ return file_path.exists()
169
+
170
+ async def delete(self, sha256: str) -> bool:
171
+ """Delete file from local filesystem.
172
+
173
+ Args:
174
+ sha256: SHA256 hash of the file
175
+
176
+ Returns:
177
+ True if deleted successfully, False if not found
178
+ """
179
+ try:
180
+ file_path = self._get_file_path(sha256)
181
+
182
+ if not file_path.exists():
183
+ logger.debug(f"File not found for deletion: {sha256}")
184
+ return False
185
+
186
+ file_path.unlink()
187
+ logger.info(f"Deleted file: {sha256}")
188
+
189
+ # Try to remove empty parent directory
190
+ try:
191
+ file_path.parent.rmdir()
192
+ logger.debug(f"Removed empty directory: {file_path.parent}")
193
+ except OSError:
194
+ # Directory not empty or other error, ignore
195
+ pass
196
+
197
+ return True
198
+
199
+ except (IOError, OSError) as e:
200
+ logger.error(f"Failed to delete file {sha256}: {e}")
201
+ return False
202
+
203
+ def get_storage_path(self, sha256: str) -> str:
204
+ """Get the storage path for a given SHA256.
205
+
206
+ Args:
207
+ sha256: SHA256 hash of the file
208
+
209
+ Returns:
210
+ Absolute file path as string
211
+ """
212
+ file_path = self._get_file_path(sha256)
213
+ return str(file_path.absolute())
214
+
215
+
216
+ class S3Storage(StorageBackend):
217
+ """AWS S3 storage backend.
218
+
219
+ Stores files in an S3 bucket with SHA256 as the key.
220
+ """
221
+
222
+ def __init__(
223
+ self,
224
+ bucket_name: str,
225
+ region_name: str = "us-east-1",
226
+ prefix: str = "files/",
227
+ aws_profile: Optional[str] = None,
228
+ aws_access_key_id: Optional[str] = None,
229
+ aws_secret_access_key: Optional[str] = None,
230
+ ):
231
+ """Initialize S3 storage.
232
+
233
+ Args:
234
+ bucket_name: S3 bucket name
235
+ region_name: AWS region name
236
+ prefix: Key prefix for stored files (default: "files/")
237
+ aws_profile: AWS profile name from ~/.aws/credentials (optional)
238
+ aws_access_key_id: AWS access key (optional, not recommended)
239
+ aws_secret_access_key: AWS secret key (optional, not recommended)
240
+
241
+ Note:
242
+ If no credentials are provided, aioboto3 will use the standard AWS credential chain:
243
+ 1. Environment variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
244
+ 2. AWS credentials file (~/.aws/credentials)
245
+ 3. IAM role (if running on EC2/ECS/Lambda) - RECOMMENDED for production
246
+ """
247
+ self.bucket_name = bucket_name
248
+ self.region_name = region_name
249
+ self.prefix = prefix.rstrip("/") + "/"
250
+
251
+ # Import boto3 here to make it optional
252
+ try:
253
+ import aioboto3
254
+
255
+ # Create session with appropriate credentials
256
+ session_kwargs = {}
257
+
258
+ if aws_profile:
259
+ # Use specific AWS profile from ~/.aws/credentials
260
+ session_kwargs["profile_name"] = aws_profile
261
+ logger.info(f"Using AWS profile: {aws_profile}")
262
+ elif aws_access_key_id and aws_secret_access_key:
263
+ # Use explicit credentials (not recommended for production)
264
+ session_kwargs["aws_access_key_id"] = aws_access_key_id
265
+ session_kwargs["aws_secret_access_key"] = aws_secret_access_key
266
+ logger.warning("Using explicit AWS credentials - consider using IAM roles or profiles instead")
267
+ else:
268
+ # Use default credential chain (environment vars, ~/.aws/credentials, or IAM role)
269
+ logger.info("Using default AWS credential chain")
270
+
271
+ self.session = aioboto3.Session(**session_kwargs)
272
+ logger.info(
273
+ f"Initialized S3Storage with bucket: {bucket_name}, region: {region_name}, prefix: {prefix}"
274
+ )
275
+ except ImportError:
276
+ logger.error("aioboto3 library not installed. Install with: pip install aioboto3")
277
+ raise RuntimeError("aioboto3 library required for S3 storage")
278
+
279
+ def _get_s3_key(self, sha256: str) -> str:
280
+ """Get S3 key for a given SHA256.
281
+
282
+ Args:
283
+ sha256: SHA256 hash
284
+
285
+ Returns:
286
+ S3 key string
287
+ """
288
+ # Use first 2 characters as subdirectory for better organization
289
+ subdir = sha256[:2]
290
+ return f"{self.prefix}{subdir}/{sha256}"
291
+
292
+ async def store(self, sha256: str, content: bytes) -> bool:
293
+ """Store file content to S3.
294
+
295
+ Args:
296
+ sha256: SHA256 hash of the file
297
+ content: File content bytes
298
+
299
+ Returns:
300
+ True if stored successfully, False otherwise
301
+ """
302
+ try:
303
+ s3_key = self._get_s3_key(sha256)
304
+
305
+ async with self.session.client("s3", region_name=self.region_name) as s3:
306
+ await s3.put_object(
307
+ Bucket=self.bucket_name,
308
+ Key=s3_key,
309
+ Body=content,
310
+ ContentType="application/octet-stream",
311
+ Metadata={
312
+ "sha256": sha256,
313
+ },
314
+ )
315
+
316
+ logger.info(
317
+ f"Stored file in S3: {sha256} ({len(content)} bytes) at s3://{self.bucket_name}/{s3_key}"
318
+ )
319
+ return True
320
+
321
+ except Exception as e:
322
+ logger.error(f"Failed to store file {sha256} in S3: {e}")
323
+ return False
324
+
325
+ async def retrieve(self, sha256: str) -> Optional[bytes]:
326
+ """Retrieve file content from S3.
327
+
328
+ Args:
329
+ sha256: SHA256 hash of the file
330
+
331
+ Returns:
332
+ File content bytes or None if not found
333
+ """
334
+ try:
335
+ s3_key = self._get_s3_key(sha256)
336
+
337
+ async with self.session.client("s3", region_name=self.region_name) as s3:
338
+ response = await s3.get_object(Bucket=self.bucket_name, Key=s3_key)
339
+ content = await response["Body"].read()
340
+
341
+ logger.debug(f"Retrieved file from S3: {sha256} ({len(content)} bytes)")
342
+ return content
343
+
344
+ except Exception as e:
345
+ # Check if it's a NoSuchKey error (file not found)
346
+ if hasattr(e, "response") and e.response.get("Error", {}).get("Code") == "NoSuchKey":
347
+ logger.debug(f"File not found in S3: {sha256}")
348
+ return None
349
+
350
+ logger.error(f"Failed to retrieve file {sha256} from S3: {e}")
351
+ return None
352
+
353
+ async def exists(self, sha256: str) -> bool:
354
+ """Check if file exists in S3.
355
+
356
+ Args:
357
+ sha256: SHA256 hash of the file
358
+
359
+ Returns:
360
+ True if file exists, False otherwise
361
+ """
362
+ try:
363
+ s3_key = self._get_s3_key(sha256)
364
+
365
+ async with self.session.client("s3", region_name=self.region_name) as s3:
366
+ await s3.head_object(Bucket=self.bucket_name, Key=s3_key)
367
+
368
+ return True
369
+
370
+ except Exception as e:
371
+ # Check if it's a 404 error (file not found)
372
+ if hasattr(e, "response") and e.response.get("ResponseMetadata", {}).get("HTTPStatusCode") == 404:
373
+ return False
374
+
375
+ logger.error(f"Failed to check existence of file {sha256} in S3: {e}")
376
+ return False
377
+
378
+ async def delete(self, sha256: str) -> bool:
379
+ """Delete file from S3.
380
+
381
+ Args:
382
+ sha256: SHA256 hash of the file
383
+
384
+ Returns:
385
+ True if deleted successfully, False if not found
386
+ """
387
+ try:
388
+ # First check if file exists
389
+ if not await self.exists(sha256):
390
+ logger.debug(f"File not found in S3 for deletion: {sha256}")
391
+ return False
392
+
393
+ s3_key = self._get_s3_key(sha256)
394
+
395
+ async with self.session.client("s3", region_name=self.region_name) as s3:
396
+ await s3.delete_object(Bucket=self.bucket_name, Key=s3_key)
397
+
398
+ logger.info(f"Deleted file from S3: {sha256}")
399
+ return True
400
+
401
+ except Exception as e:
402
+ logger.error(f"Failed to delete file {sha256} from S3: {e}")
403
+ return False
404
+
405
+ def get_storage_path(self, sha256: str) -> str:
406
+ """Get the storage path (S3 URI) for a given SHA256.
407
+
408
+ Args:
409
+ sha256: SHA256 hash of the file
410
+
411
+ Returns:
412
+ S3 URI in the format "s3://bucket/key"
413
+ """
414
+ s3_key = self._get_s3_key(sha256)
415
+ return f"s3://{self.bucket_name}/{s3_key}"
416
+
417
+
418
+ def get_storage_backend(backend_type: str, **kwargs) -> StorageBackend:
419
+ """Factory function to get storage backend instance.
420
+
421
+ Args:
422
+ backend_type: Type of storage backend ("local" or "s3")
423
+ **kwargs: Backend-specific configuration
424
+
425
+ Returns:
426
+ StorageBackend instance
427
+
428
+ Raises:
429
+ ValueError: If backend_type is not supported
430
+ """
431
+ if backend_type == "local":
432
+ base_path = kwargs.get("base_path", "/var/putplace/files")
433
+ return LocalStorage(base_path=base_path)
434
+
435
+ elif backend_type == "s3":
436
+ bucket_name = kwargs.get("bucket_name")
437
+ if not bucket_name:
438
+ raise ValueError("bucket_name is required for S3 storage")
439
+
440
+ region_name = kwargs.get("region_name", "us-east-1")
441
+ prefix = kwargs.get("prefix", "files/")
442
+ aws_profile = kwargs.get("aws_profile")
443
+ aws_access_key_id = kwargs.get("aws_access_key_id")
444
+ aws_secret_access_key = kwargs.get("aws_secret_access_key")
445
+
446
+ return S3Storage(
447
+ bucket_name=bucket_name,
448
+ region_name=region_name,
449
+ prefix=prefix,
450
+ aws_profile=aws_profile,
451
+ aws_access_key_id=aws_access_key_id,
452
+ aws_secret_access_key=aws_secret_access_key,
453
+ )
454
+
455
+ else:
456
+ raise ValueError(f"Unsupported storage backend: {backend_type}")
putplace/user_auth.py ADDED
@@ -0,0 +1,52 @@
1
+ """User authentication utilities."""
2
+
3
+ from datetime import datetime, timedelta
4
+ from typing import Optional
5
+
6
+ from argon2 import PasswordHasher
7
+ from argon2.exceptions import VerifyMismatchError
8
+ from jose import JWTError, jwt
9
+
10
+ # Password hashing using Argon2
11
+ pwd_hasher = PasswordHasher()
12
+
13
+ # JWT settings
14
+ SECRET_KEY = "your-secret-key-change-this-in-production" # TODO: Move to config
15
+ ALGORITHM = "HS256"
16
+ ACCESS_TOKEN_EXPIRE_MINUTES = 30
17
+
18
+
19
+ def verify_password(plain_password: str, hashed_password: str) -> bool:
20
+ """Verify a password against its hash."""
21
+ try:
22
+ pwd_hasher.verify(hashed_password, plain_password)
23
+ return True
24
+ except VerifyMismatchError:
25
+ return False
26
+
27
+
28
+ def get_password_hash(password: str) -> str:
29
+ """Hash a password."""
30
+ return pwd_hasher.hash(password)
31
+
32
+
33
+ def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
34
+ """Create a JWT access token."""
35
+ to_encode = data.copy()
36
+ if expires_delta:
37
+ expire = datetime.utcnow() + expires_delta
38
+ else:
39
+ expire = datetime.utcnow() + timedelta(minutes=15)
40
+ to_encode.update({"exp": expire})
41
+ encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
42
+ return encoded_jwt
43
+
44
+
45
+ def decode_access_token(token: str) -> Optional[str]:
46
+ """Decode a JWT token and return the username."""
47
+ try:
48
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
49
+ username: str = payload.get("sub")
50
+ return username
51
+ except JWTError:
52
+ return None
putplace/version.py ADDED
@@ -0,0 +1,6 @@
1
+ """Version information for putplace.
2
+
3
+ IMPORTANT: Keep this version in sync with pyproject.toml!
4
+ """
5
+
6
+ __version__ = "0.4.1"