marqetive-lib 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- marqetive/__init__.py +113 -0
- marqetive/core/__init__.py +5 -0
- marqetive/core/account_factory.py +212 -0
- marqetive/core/base_manager.py +303 -0
- marqetive/core/client.py +108 -0
- marqetive/core/progress.py +291 -0
- marqetive/core/registry.py +257 -0
- marqetive/platforms/__init__.py +55 -0
- marqetive/platforms/base.py +390 -0
- marqetive/platforms/exceptions.py +238 -0
- marqetive/platforms/instagram/__init__.py +7 -0
- marqetive/platforms/instagram/client.py +786 -0
- marqetive/platforms/instagram/exceptions.py +311 -0
- marqetive/platforms/instagram/factory.py +106 -0
- marqetive/platforms/instagram/manager.py +112 -0
- marqetive/platforms/instagram/media.py +669 -0
- marqetive/platforms/linkedin/__init__.py +7 -0
- marqetive/platforms/linkedin/client.py +733 -0
- marqetive/platforms/linkedin/exceptions.py +335 -0
- marqetive/platforms/linkedin/factory.py +130 -0
- marqetive/platforms/linkedin/manager.py +119 -0
- marqetive/platforms/linkedin/media.py +549 -0
- marqetive/platforms/models.py +345 -0
- marqetive/platforms/tiktok/__init__.py +0 -0
- marqetive/platforms/twitter/__init__.py +7 -0
- marqetive/platforms/twitter/client.py +647 -0
- marqetive/platforms/twitter/exceptions.py +311 -0
- marqetive/platforms/twitter/factory.py +151 -0
- marqetive/platforms/twitter/manager.py +121 -0
- marqetive/platforms/twitter/media.py +779 -0
- marqetive/platforms/twitter/threads.py +442 -0
- marqetive/py.typed +0 -0
- marqetive/registry_init.py +66 -0
- marqetive/utils/__init__.py +45 -0
- marqetive/utils/file_handlers.py +438 -0
- marqetive/utils/helpers.py +99 -0
- marqetive/utils/media.py +399 -0
- marqetive/utils/oauth.py +265 -0
- marqetive/utils/retry.py +239 -0
- marqetive/utils/token_validator.py +240 -0
- marqetive_lib-0.1.0.dist-info/METADATA +261 -0
- marqetive_lib-0.1.0.dist-info/RECORD +43 -0
- marqetive_lib-0.1.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,438 @@
|
|
|
1
|
+
"""Async file handlers for downloading, streaming, and managing files.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for:
|
|
4
|
+
- Downloading files from URLs asynchronously
|
|
5
|
+
- Streaming file uploads with progress tracking
|
|
6
|
+
- Temporary file management
|
|
7
|
+
- Async file I/O operations
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
import os
|
|
12
|
+
import tempfile
|
|
13
|
+
from collections.abc import AsyncGenerator, Callable
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Any
|
|
16
|
+
|
|
17
|
+
import aiofiles
|
|
18
|
+
import httpx
|
|
19
|
+
|
|
20
|
+
from marqetive.utils.media import detect_mime_type, format_file_size
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DownloadProgress:
|
|
24
|
+
"""Progress tracker for file downloads.
|
|
25
|
+
|
|
26
|
+
Attributes:
|
|
27
|
+
total_bytes: Total file size in bytes (None if unknown).
|
|
28
|
+
downloaded_bytes: Number of bytes downloaded so far.
|
|
29
|
+
percentage: Download progress as percentage (0-100).
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self) -> None:
|
|
33
|
+
"""Initialize download progress tracker."""
|
|
34
|
+
self.total_bytes: int | None = None
|
|
35
|
+
self.downloaded_bytes: int = 0
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def percentage(self) -> float:
|
|
39
|
+
"""Get download progress as percentage."""
|
|
40
|
+
if self.total_bytes is None or self.total_bytes == 0:
|
|
41
|
+
return 0.0
|
|
42
|
+
return (self.downloaded_bytes / self.total_bytes) * 100
|
|
43
|
+
|
|
44
|
+
def update(self, chunk_size: int) -> None:
|
|
45
|
+
"""Update download progress.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
chunk_size: Size of chunk that was just downloaded.
|
|
49
|
+
"""
|
|
50
|
+
self.downloaded_bytes += chunk_size
|
|
51
|
+
|
|
52
|
+
def __repr__(self) -> str:
|
|
53
|
+
"""String representation of progress."""
|
|
54
|
+
if self.total_bytes:
|
|
55
|
+
total_mb = self.total_bytes / (1024 * 1024)
|
|
56
|
+
downloaded_mb = self.downloaded_bytes / (1024 * 1024)
|
|
57
|
+
return (
|
|
58
|
+
f"DownloadProgress({downloaded_mb:.2f}MB / {total_mb:.2f}MB, "
|
|
59
|
+
f"{self.percentage:.1f}%)"
|
|
60
|
+
)
|
|
61
|
+
return f"DownloadProgress({self.downloaded_bytes} bytes)"
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async def download_file(
|
|
65
|
+
url: str,
|
|
66
|
+
destination: str | None = None,
|
|
67
|
+
*,
|
|
68
|
+
chunk_size: int = 8192,
|
|
69
|
+
progress_callback: Callable[[DownloadProgress], None] | None = None,
|
|
70
|
+
timeout: float = 300.0,
|
|
71
|
+
) -> str:
|
|
72
|
+
"""Download a file from URL asynchronously.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
url: URL to download from.
|
|
76
|
+
destination: Path where file should be saved. If None, uses temp file.
|
|
77
|
+
chunk_size: Size of chunks to download (default: 8KB).
|
|
78
|
+
progress_callback: Optional callback function called with progress updates.
|
|
79
|
+
timeout: Request timeout in seconds (default: 5 minutes).
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
Path to the downloaded file.
|
|
83
|
+
|
|
84
|
+
Raises:
|
|
85
|
+
httpx.HTTPError: If download fails.
|
|
86
|
+
IOError: If file write fails.
|
|
87
|
+
|
|
88
|
+
Example:
|
|
89
|
+
>>> async def on_progress(progress):
|
|
90
|
+
... print(f"Downloaded: {progress.percentage:.1f}%")
|
|
91
|
+
>>>
|
|
92
|
+
>>> file_path = await download_file(
|
|
93
|
+
... "https://example.com/image.jpg",
|
|
94
|
+
... destination="/tmp/image.jpg",
|
|
95
|
+
... progress_callback=on_progress
|
|
96
|
+
... )
|
|
97
|
+
"""
|
|
98
|
+
# Create temp file if no destination specified
|
|
99
|
+
if destination is None:
|
|
100
|
+
temp_fd, destination = tempfile.mkstemp()
|
|
101
|
+
os.close(temp_fd) # Close fd, we'll use aiofiles
|
|
102
|
+
|
|
103
|
+
progress = DownloadProgress()
|
|
104
|
+
|
|
105
|
+
async with (
|
|
106
|
+
httpx.AsyncClient(timeout=timeout) as client,
|
|
107
|
+
client.stream("GET", url) as response,
|
|
108
|
+
):
|
|
109
|
+
response.raise_for_status()
|
|
110
|
+
|
|
111
|
+
# Get total file size if available
|
|
112
|
+
content_length = response.headers.get("content-length")
|
|
113
|
+
if content_length:
|
|
114
|
+
progress.total_bytes = int(content_length)
|
|
115
|
+
|
|
116
|
+
# Download and write file
|
|
117
|
+
async with aiofiles.open(destination, "wb") as f:
|
|
118
|
+
async for chunk in response.aiter_bytes(chunk_size=chunk_size):
|
|
119
|
+
await f.write(chunk)
|
|
120
|
+
progress.update(len(chunk))
|
|
121
|
+
|
|
122
|
+
# Call progress callback if provided
|
|
123
|
+
if progress_callback:
|
|
124
|
+
progress_callback(progress)
|
|
125
|
+
|
|
126
|
+
return destination
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
async def download_to_memory(
|
|
130
|
+
url: str,
|
|
131
|
+
*,
|
|
132
|
+
max_size: int | None = None,
|
|
133
|
+
timeout: float = 60.0,
|
|
134
|
+
) -> bytes:
|
|
135
|
+
"""Download a file into memory.
|
|
136
|
+
|
|
137
|
+
Useful for small files that need to be processed immediately.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
url: URL to download from.
|
|
141
|
+
max_size: Maximum allowed file size in bytes (raises ValueError if exceeded).
|
|
142
|
+
timeout: Request timeout in seconds (default: 1 minute).
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
File content as bytes.
|
|
146
|
+
|
|
147
|
+
Raises:
|
|
148
|
+
httpx.HTTPError: If download fails.
|
|
149
|
+
ValueError: If file exceeds max_size.
|
|
150
|
+
|
|
151
|
+
Example:
|
|
152
|
+
>>> content = await download_to_memory(
|
|
153
|
+
... "https://example.com/small_file.json",
|
|
154
|
+
... max_size=1024 * 1024 # 1MB limit
|
|
155
|
+
... )
|
|
156
|
+
"""
|
|
157
|
+
async with httpx.AsyncClient(timeout=timeout) as client:
|
|
158
|
+
response = await client.get(url)
|
|
159
|
+
response.raise_for_status()
|
|
160
|
+
|
|
161
|
+
content = response.content
|
|
162
|
+
|
|
163
|
+
# Check size limit
|
|
164
|
+
if max_size and len(content) > max_size:
|
|
165
|
+
raise ValueError(
|
|
166
|
+
f"File size {len(content)} bytes exceeds maximum of {max_size} bytes"
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
return content
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
async def stream_file_upload(
|
|
173
|
+
file_path: str,
|
|
174
|
+
*,
|
|
175
|
+
chunk_size: int = 1024 * 1024,
|
|
176
|
+
progress_callback: Callable[[int, int], None] | None = None,
|
|
177
|
+
) -> AsyncGenerator[bytes, None]:
|
|
178
|
+
"""Stream file for upload with progress tracking.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
file_path: Path to file to upload.
|
|
182
|
+
chunk_size: Size of chunks to read (default: 1MB).
|
|
183
|
+
progress_callback: Optional callback(bytes_read, total_bytes).
|
|
184
|
+
|
|
185
|
+
Yields:
|
|
186
|
+
Chunks of file content.
|
|
187
|
+
|
|
188
|
+
Raises:
|
|
189
|
+
FileNotFoundError: If file doesn't exist.
|
|
190
|
+
|
|
191
|
+
Example:
|
|
192
|
+
>>> async def on_progress(bytes_read, total_bytes):
|
|
193
|
+
... pct = (bytes_read / total_bytes) * 100
|
|
194
|
+
... print(f"Uploaded: {pct:.1f}%")
|
|
195
|
+
>>>
|
|
196
|
+
>>> async for chunk in stream_file_upload(
|
|
197
|
+
... "/path/to/file.mp4",
|
|
198
|
+
... progress_callback=on_progress
|
|
199
|
+
... ):
|
|
200
|
+
... await upload_chunk(chunk)
|
|
201
|
+
"""
|
|
202
|
+
if not os.path.exists(file_path):
|
|
203
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
204
|
+
|
|
205
|
+
file_size = os.path.getsize(file_path)
|
|
206
|
+
bytes_read = 0
|
|
207
|
+
|
|
208
|
+
async with aiofiles.open(file_path, "rb") as f:
|
|
209
|
+
while True:
|
|
210
|
+
chunk = await f.read(chunk_size)
|
|
211
|
+
if not chunk:
|
|
212
|
+
break
|
|
213
|
+
|
|
214
|
+
bytes_read += len(chunk)
|
|
215
|
+
|
|
216
|
+
if progress_callback:
|
|
217
|
+
progress_callback(bytes_read, file_size)
|
|
218
|
+
|
|
219
|
+
yield chunk
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
async def read_file_bytes(file_path: str) -> bytes:
|
|
223
|
+
"""Read entire file into memory asynchronously.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
file_path: Path to file.
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
File content as bytes.
|
|
230
|
+
|
|
231
|
+
Raises:
|
|
232
|
+
FileNotFoundError: If file doesn't exist.
|
|
233
|
+
|
|
234
|
+
Example:
|
|
235
|
+
>>> content = await read_file_bytes('/path/to/file.bin')
|
|
236
|
+
>>> print(len(content))
|
|
237
|
+
"""
|
|
238
|
+
if not os.path.exists(file_path):
|
|
239
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
240
|
+
|
|
241
|
+
async with aiofiles.open(file_path, "rb") as f:
|
|
242
|
+
return await f.read()
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
async def write_file_bytes(file_path: str, content: bytes) -> None:
|
|
246
|
+
"""Write bytes to file asynchronously.
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
file_path: Path where file should be written.
|
|
250
|
+
content: Content to write.
|
|
251
|
+
|
|
252
|
+
Raises:
|
|
253
|
+
IOError: If write fails.
|
|
254
|
+
|
|
255
|
+
Example:
|
|
256
|
+
>>> await write_file_bytes('/path/to/output.bin', b'some data')
|
|
257
|
+
"""
|
|
258
|
+
# Ensure parent directory exists
|
|
259
|
+
parent_dir = Path(file_path).parent
|
|
260
|
+
parent_dir.mkdir(parents=True, exist_ok=True)
|
|
261
|
+
|
|
262
|
+
async with aiofiles.open(file_path, "wb") as f:
|
|
263
|
+
await f.write(content)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
async def copy_file_async(source: str, destination: str) -> None:
|
|
267
|
+
"""Copy file asynchronously.
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
source: Source file path.
|
|
271
|
+
destination: Destination file path.
|
|
272
|
+
|
|
273
|
+
Raises:
|
|
274
|
+
FileNotFoundError: If source doesn't exist.
|
|
275
|
+
IOError: If copy fails.
|
|
276
|
+
|
|
277
|
+
Example:
|
|
278
|
+
>>> await copy_file_async('/path/to/source.txt', '/path/to/dest.txt')
|
|
279
|
+
"""
|
|
280
|
+
if not os.path.exists(source):
|
|
281
|
+
raise FileNotFoundError(f"Source file not found: {source}")
|
|
282
|
+
|
|
283
|
+
# Ensure destination directory exists
|
|
284
|
+
dest_dir = Path(destination).parent
|
|
285
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
286
|
+
|
|
287
|
+
# Read from source and write to destination
|
|
288
|
+
async with aiofiles.open(source, "rb") as src:
|
|
289
|
+
content = await src.read()
|
|
290
|
+
|
|
291
|
+
async with aiofiles.open(destination, "wb") as dest:
|
|
292
|
+
await dest.write(content)
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
class TempFileManager:
|
|
296
|
+
"""Manager for temporary files with automatic cleanup.
|
|
297
|
+
|
|
298
|
+
Usage:
|
|
299
|
+
>>> async with TempFileManager() as tmp:
|
|
300
|
+
... file_path = tmp.create("prefix_")
|
|
301
|
+
... # Use file_path...
|
|
302
|
+
... # File automatically deleted on exit
|
|
303
|
+
"""
|
|
304
|
+
|
|
305
|
+
def __init__(self, *, suffix: str = "", prefix: str = "marqetive_") -> None:
|
|
306
|
+
"""Initialize temp file manager.
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
suffix: Suffix for temp files.
|
|
310
|
+
prefix: Prefix for temp files (default: 'marqetive_').
|
|
311
|
+
"""
|
|
312
|
+
self.suffix = suffix
|
|
313
|
+
self.prefix = prefix
|
|
314
|
+
self.temp_files: list[str] = []
|
|
315
|
+
|
|
316
|
+
def create(self, custom_prefix: str | None = None) -> str:
|
|
317
|
+
"""Create a new temporary file.
|
|
318
|
+
|
|
319
|
+
Args:
|
|
320
|
+
custom_prefix: Optional custom prefix (overrides default).
|
|
321
|
+
|
|
322
|
+
Returns:
|
|
323
|
+
Path to temporary file.
|
|
324
|
+
"""
|
|
325
|
+
prefix = custom_prefix or self.prefix
|
|
326
|
+
fd, path = tempfile.mkstemp(suffix=self.suffix, prefix=prefix)
|
|
327
|
+
os.close(fd) # Close file descriptor
|
|
328
|
+
self.temp_files.append(path)
|
|
329
|
+
return path
|
|
330
|
+
|
|
331
|
+
async def cleanup(self) -> None:
|
|
332
|
+
"""Remove all temporary files created by this manager."""
|
|
333
|
+
for file_path in self.temp_files:
|
|
334
|
+
try:
|
|
335
|
+
if os.path.exists(file_path):
|
|
336
|
+
os.remove(file_path)
|
|
337
|
+
except OSError:
|
|
338
|
+
# Ignore errors during cleanup
|
|
339
|
+
pass
|
|
340
|
+
self.temp_files.clear()
|
|
341
|
+
|
|
342
|
+
async def __aenter__(self) -> "TempFileManager":
|
|
343
|
+
"""Enter async context."""
|
|
344
|
+
return self
|
|
345
|
+
|
|
346
|
+
async def __aexit__(self, *args: Any) -> None:
|
|
347
|
+
"""Exit async context and cleanup."""
|
|
348
|
+
await self.cleanup()
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
async def get_file_info(file_path: str) -> dict[str, Any]:
|
|
352
|
+
"""Get information about a file.
|
|
353
|
+
|
|
354
|
+
Args:
|
|
355
|
+
file_path: Path to file.
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
Dictionary with file information.
|
|
359
|
+
|
|
360
|
+
Raises:
|
|
361
|
+
FileNotFoundError: If file doesn't exist.
|
|
362
|
+
|
|
363
|
+
Example:
|
|
364
|
+
>>> info = await get_file_info('/path/to/file.jpg')
|
|
365
|
+
>>> print(f"Size: {info['size_formatted']}")
|
|
366
|
+
>>> print(f"MIME: {info['mime_type']}")
|
|
367
|
+
"""
|
|
368
|
+
if not os.path.exists(file_path):
|
|
369
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
370
|
+
|
|
371
|
+
stat = os.stat(file_path)
|
|
372
|
+
|
|
373
|
+
return {
|
|
374
|
+
"path": file_path,
|
|
375
|
+
"name": os.path.basename(file_path),
|
|
376
|
+
"size": stat.st_size,
|
|
377
|
+
"size_formatted": format_file_size(stat.st_size),
|
|
378
|
+
"mime_type": detect_mime_type(file_path),
|
|
379
|
+
"modified": stat.st_mtime,
|
|
380
|
+
"extension": Path(file_path).suffix.lower(),
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
async def ensure_file_accessible(file_path: str) -> bool:
|
|
385
|
+
"""Check if file exists and is readable.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
file_path: Path to check.
|
|
389
|
+
|
|
390
|
+
Returns:
|
|
391
|
+
True if file is accessible, False otherwise.
|
|
392
|
+
|
|
393
|
+
Example:
|
|
394
|
+
>>> if await ensure_file_accessible('/path/to/file.txt'):
|
|
395
|
+
... print("File is accessible")
|
|
396
|
+
"""
|
|
397
|
+
try:
|
|
398
|
+
if not os.path.exists(file_path):
|
|
399
|
+
return False
|
|
400
|
+
|
|
401
|
+
# Try to open for reading
|
|
402
|
+
async with aiofiles.open(file_path, "rb") as f:
|
|
403
|
+
await f.read(1) # Try to read 1 byte
|
|
404
|
+
|
|
405
|
+
return True
|
|
406
|
+
except OSError:
|
|
407
|
+
return False
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
async def wait_for_file(
|
|
411
|
+
file_path: str, *, timeout: float = 30.0, check_interval: float = 0.5
|
|
412
|
+
) -> bool:
|
|
413
|
+
"""Wait for a file to become available.
|
|
414
|
+
|
|
415
|
+
Useful when waiting for external processes to create files.
|
|
416
|
+
|
|
417
|
+
Args:
|
|
418
|
+
file_path: Path to file.
|
|
419
|
+
timeout: Maximum time to wait in seconds.
|
|
420
|
+
check_interval: How often to check in seconds.
|
|
421
|
+
|
|
422
|
+
Returns:
|
|
423
|
+
True if file became available, False if timeout.
|
|
424
|
+
|
|
425
|
+
Example:
|
|
426
|
+
>>> if await wait_for_file('/tmp/output.mp4', timeout=60):
|
|
427
|
+
... print("File is ready!")
|
|
428
|
+
"""
|
|
429
|
+
elapsed = 0.0
|
|
430
|
+
|
|
431
|
+
while elapsed < timeout:
|
|
432
|
+
if await ensure_file_accessible(file_path):
|
|
433
|
+
return True
|
|
434
|
+
|
|
435
|
+
await asyncio.sleep(check_interval)
|
|
436
|
+
elapsed += check_interval
|
|
437
|
+
|
|
438
|
+
return False
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""Helper functions for common API operations."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
from urllib.parse import parse_qs, urlencode, urlparse
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def format_response(
|
|
8
|
+
data: dict[str, Any], *, pretty: bool = False, indent: int = 2
|
|
9
|
+
) -> str:
|
|
10
|
+
"""Format API response data as a string.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
data: The response data dictionary
|
|
14
|
+
pretty: Whether to format with indentation (default: False)
|
|
15
|
+
indent: Number of spaces for indentation if pretty=True (default: 2)
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
Formatted string representation of the response
|
|
19
|
+
|
|
20
|
+
Example:
|
|
21
|
+
>>> data = {"user": "john", "status": "active"}
|
|
22
|
+
>>> print(format_response(data, pretty=True))
|
|
23
|
+
{
|
|
24
|
+
"user": "john",
|
|
25
|
+
"status": "active"
|
|
26
|
+
}
|
|
27
|
+
"""
|
|
28
|
+
import json
|
|
29
|
+
|
|
30
|
+
if pretty:
|
|
31
|
+
return json.dumps(data, indent=indent, sort_keys=True)
|
|
32
|
+
return json.dumps(data)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def parse_query_params(url: str) -> dict[str, Any]:
|
|
36
|
+
"""Parse query parameters from a URL.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
url: The URL string to parse
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
Dictionary of query parameters
|
|
43
|
+
|
|
44
|
+
Example:
|
|
45
|
+
>>> url = "https://api.example.com/users?page=1&limit=10"
|
|
46
|
+
>>> params = parse_query_params(url)
|
|
47
|
+
>>> print(params)
|
|
48
|
+
{'page': ['1'], 'limit': ['10']}
|
|
49
|
+
"""
|
|
50
|
+
parsed = urlparse(url)
|
|
51
|
+
return dict(parse_qs(parsed.query))
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def build_query_string(params: dict[str, Any]) -> str:
|
|
55
|
+
"""Build a query string from a dictionary of parameters.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
params: Dictionary of query parameters
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
URL-encoded query string
|
|
62
|
+
|
|
63
|
+
Example:
|
|
64
|
+
>>> params = {"page": 1, "limit": 10, "sort": "name"}
|
|
65
|
+
>>> query = build_query_string(params)
|
|
66
|
+
>>> print(query)
|
|
67
|
+
page=1&limit=10&sort=name
|
|
68
|
+
"""
|
|
69
|
+
return urlencode(params)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def merge_headers(
|
|
73
|
+
default_headers: dict[str, str] | None = None,
|
|
74
|
+
custom_headers: dict[str, str] | None = None,
|
|
75
|
+
) -> dict[str, str]:
|
|
76
|
+
"""Merge default and custom headers.
|
|
77
|
+
|
|
78
|
+
Custom headers take precedence over default headers.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
default_headers: Default headers dictionary
|
|
82
|
+
custom_headers: Custom headers to merge
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
Merged headers dictionary
|
|
86
|
+
|
|
87
|
+
Example:
|
|
88
|
+
>>> defaults = {"Content-Type": "application/json"}
|
|
89
|
+
>>> custom = {"Authorization": "Bearer token"}
|
|
90
|
+
>>> headers = merge_headers(defaults, custom)
|
|
91
|
+
>>> print(headers)
|
|
92
|
+
{'Content-Type': 'application/json', 'Authorization': 'Bearer token'}
|
|
93
|
+
"""
|
|
94
|
+
result = {}
|
|
95
|
+
if default_headers:
|
|
96
|
+
result.update(default_headers)
|
|
97
|
+
if custom_headers:
|
|
98
|
+
result.update(custom_headers)
|
|
99
|
+
return result
|