pltr-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pltr/__init__.py +1 -0
- pltr/auth/__init__.py +1 -0
- pltr/auth/base.py +49 -0
- pltr/auth/manager.py +129 -0
- pltr/auth/oauth.py +83 -0
- pltr/auth/storage.py +87 -0
- pltr/auth/token.py +55 -0
- pltr/cli.py +54 -0
- pltr/commands/__init__.py +1 -0
- pltr/commands/configure.py +151 -0
- pltr/commands/dataset.py +98 -0
- pltr/commands/verify.py +185 -0
- pltr/config/__init__.py +1 -0
- pltr/config/profiles.py +124 -0
- pltr/config/settings.py +103 -0
- pltr/services/__init__.py +1 -0
- pltr/services/base.py +62 -0
- pltr/services/dataset.py +90 -0
- pltr/services/dataset_full.py +302 -0
- pltr/services/dataset_v2.py +128 -0
- pltr/utils/__init__.py +1 -0
- pltr/utils/formatting.py +331 -0
- pltr/utils/progress.py +328 -0
- pltr_cli-0.1.0.dist-info/METADATA +203 -0
- pltr_cli-0.1.0.dist-info/RECORD +28 -0
- pltr_cli-0.1.0.dist-info/WHEEL +4 -0
- pltr_cli-0.1.0.dist-info/entry_points.txt +2 -0
- pltr_cli-0.1.0.dist-info/licenses/LICENSE +21 -0
pltr/utils/formatting.py
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Output formatting utilities for CLI commands.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import csv
|
|
7
|
+
from typing import Any, Dict, List, Optional, Union
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from io import StringIO
|
|
10
|
+
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
from rich.table import Table
|
|
13
|
+
from rich import print as rich_print
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class OutputFormatter:
|
|
17
|
+
"""Handles different output formats for CLI commands."""
|
|
18
|
+
|
|
19
|
+
def __init__(self, console: Optional[Console] = None):
|
|
20
|
+
"""
|
|
21
|
+
Initialize formatter.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
console: Rich console instance (creates one if not provided)
|
|
25
|
+
"""
|
|
26
|
+
self.console = console or Console()
|
|
27
|
+
|
|
28
|
+
def format_output(
|
|
29
|
+
self,
|
|
30
|
+
data: Union[Dict[str, Any], List[Dict[str, Any]]],
|
|
31
|
+
format_type: str = "table",
|
|
32
|
+
output_file: Optional[str] = None,
|
|
33
|
+
) -> Optional[str]:
|
|
34
|
+
"""
|
|
35
|
+
Format data according to specified format.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
data: Data to format
|
|
39
|
+
format_type: Output format ('table', 'json', 'csv')
|
|
40
|
+
output_file: Optional file path to write output
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Formatted string if no output file specified
|
|
44
|
+
"""
|
|
45
|
+
if format_type == "json":
|
|
46
|
+
return self._format_json(data, output_file)
|
|
47
|
+
elif format_type == "csv":
|
|
48
|
+
return self._format_csv(data, output_file)
|
|
49
|
+
elif format_type == "table":
|
|
50
|
+
return self._format_table(data, output_file)
|
|
51
|
+
else:
|
|
52
|
+
raise ValueError(f"Unsupported format type: {format_type}")
|
|
53
|
+
|
|
54
|
+
def _format_json(
|
|
55
|
+
self, data: Any, output_file: Optional[str] = None
|
|
56
|
+
) -> Optional[str]:
|
|
57
|
+
"""Format data as JSON."""
|
|
58
|
+
# Convert datetime objects to strings for JSON serialization
|
|
59
|
+
data_serializable = self._make_json_serializable(data)
|
|
60
|
+
json_str = json.dumps(data_serializable, indent=2, default=str)
|
|
61
|
+
|
|
62
|
+
if output_file:
|
|
63
|
+
with open(output_file, "w") as f:
|
|
64
|
+
f.write(json_str)
|
|
65
|
+
return None
|
|
66
|
+
else:
|
|
67
|
+
rich_print(json_str)
|
|
68
|
+
return json_str
|
|
69
|
+
|
|
70
|
+
def _format_csv(
|
|
71
|
+
self,
|
|
72
|
+
data: Union[Dict[str, Any], List[Dict[str, Any]]],
|
|
73
|
+
output_file: Optional[str] = None,
|
|
74
|
+
) -> Optional[str]:
|
|
75
|
+
"""Format data as CSV."""
|
|
76
|
+
if isinstance(data, dict):
|
|
77
|
+
data = [data]
|
|
78
|
+
|
|
79
|
+
if not data:
|
|
80
|
+
csv_str = ""
|
|
81
|
+
else:
|
|
82
|
+
# Get all unique keys for the CSV header
|
|
83
|
+
fieldnames_set: set[str] = set()
|
|
84
|
+
for item in data:
|
|
85
|
+
fieldnames_set.update(item.keys())
|
|
86
|
+
fieldnames = sorted(fieldnames_set)
|
|
87
|
+
|
|
88
|
+
output = StringIO()
|
|
89
|
+
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
90
|
+
writer.writeheader()
|
|
91
|
+
|
|
92
|
+
for item in data:
|
|
93
|
+
# Convert complex objects to strings
|
|
94
|
+
row = {}
|
|
95
|
+
for key in fieldnames:
|
|
96
|
+
value = item.get(key)
|
|
97
|
+
if isinstance(value, (dict, list)):
|
|
98
|
+
row[key] = json.dumps(value)
|
|
99
|
+
elif value is None:
|
|
100
|
+
row[key] = ""
|
|
101
|
+
else:
|
|
102
|
+
row[key] = str(value)
|
|
103
|
+
writer.writerow(row)
|
|
104
|
+
|
|
105
|
+
csv_str = output.getvalue()
|
|
106
|
+
|
|
107
|
+
if output_file:
|
|
108
|
+
with open(output_file, "w") as f:
|
|
109
|
+
f.write(csv_str)
|
|
110
|
+
return None
|
|
111
|
+
else:
|
|
112
|
+
print(csv_str, end="")
|
|
113
|
+
return csv_str
|
|
114
|
+
|
|
115
|
+
def _format_table(
|
|
116
|
+
self,
|
|
117
|
+
data: Union[Dict[str, Any], List[Dict[str, Any]]],
|
|
118
|
+
output_file: Optional[str] = None,
|
|
119
|
+
) -> Optional[str]:
|
|
120
|
+
"""Format data as a rich table."""
|
|
121
|
+
if isinstance(data, dict):
|
|
122
|
+
data = [data]
|
|
123
|
+
|
|
124
|
+
if not data:
|
|
125
|
+
if output_file:
|
|
126
|
+
with open(output_file, "w") as f:
|
|
127
|
+
f.write("No data to display\n")
|
|
128
|
+
return None
|
|
129
|
+
else:
|
|
130
|
+
self.console.print("No data to display")
|
|
131
|
+
return "No data to display"
|
|
132
|
+
|
|
133
|
+
# Create table
|
|
134
|
+
table = Table(show_header=True, header_style="bold blue")
|
|
135
|
+
|
|
136
|
+
# Get all unique columns
|
|
137
|
+
columns_set: set[str] = set()
|
|
138
|
+
for item in data:
|
|
139
|
+
columns_set.update(item.keys())
|
|
140
|
+
columns = sorted(columns_set)
|
|
141
|
+
|
|
142
|
+
# Add columns to table
|
|
143
|
+
for column in columns:
|
|
144
|
+
table.add_column(column, overflow="fold")
|
|
145
|
+
|
|
146
|
+
# Add rows
|
|
147
|
+
for item in data:
|
|
148
|
+
row = []
|
|
149
|
+
for column in columns:
|
|
150
|
+
value = item.get(column)
|
|
151
|
+
if isinstance(value, (dict, list)):
|
|
152
|
+
# Format complex objects as JSON
|
|
153
|
+
row.append(json.dumps(value, indent=2))
|
|
154
|
+
elif value is None:
|
|
155
|
+
row.append("")
|
|
156
|
+
elif isinstance(value, datetime):
|
|
157
|
+
row.append(value.isoformat())
|
|
158
|
+
else:
|
|
159
|
+
row.append(str(value))
|
|
160
|
+
table.add_row(*row)
|
|
161
|
+
|
|
162
|
+
if output_file:
|
|
163
|
+
# For file output, convert to plain text
|
|
164
|
+
with open(output_file, "w") as f:
|
|
165
|
+
console = Console(file=f, force_terminal=False)
|
|
166
|
+
console.print(table)
|
|
167
|
+
return None
|
|
168
|
+
else:
|
|
169
|
+
self.console.print(table)
|
|
170
|
+
return str(table)
|
|
171
|
+
|
|
172
|
+
def _make_json_serializable(self, data: Any) -> Any:
|
|
173
|
+
"""Convert data to JSON-serializable format."""
|
|
174
|
+
if isinstance(data, dict):
|
|
175
|
+
return {k: self._make_json_serializable(v) for k, v in data.items()}
|
|
176
|
+
elif isinstance(data, list):
|
|
177
|
+
return [self._make_json_serializable(item) for item in data]
|
|
178
|
+
elif isinstance(data, datetime):
|
|
179
|
+
return data.isoformat()
|
|
180
|
+
else:
|
|
181
|
+
return data
|
|
182
|
+
|
|
183
|
+
def format_dataset_list(
|
|
184
|
+
self,
|
|
185
|
+
datasets: List[Dict[str, Any]],
|
|
186
|
+
format_type: str = "table",
|
|
187
|
+
output_file: Optional[str] = None,
|
|
188
|
+
) -> Optional[str]:
|
|
189
|
+
"""
|
|
190
|
+
Format dataset list with specific columns.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
datasets: List of dataset dictionaries
|
|
194
|
+
format_type: Output format
|
|
195
|
+
output_file: Optional output file path
|
|
196
|
+
|
|
197
|
+
Returns:
|
|
198
|
+
Formatted string if no output file specified
|
|
199
|
+
"""
|
|
200
|
+
# Select and order key columns for dataset display
|
|
201
|
+
formatted_datasets = []
|
|
202
|
+
for dataset in datasets:
|
|
203
|
+
formatted_dataset = {
|
|
204
|
+
"RID": dataset.get("rid", ""),
|
|
205
|
+
"Name": dataset.get("name", ""),
|
|
206
|
+
"Created": self._format_datetime(dataset.get("created_time")),
|
|
207
|
+
"Size": self._format_file_size(dataset.get("size_bytes")),
|
|
208
|
+
"Description": dataset.get("description", "")[:50] + "..."
|
|
209
|
+
if dataset.get("description", "")
|
|
210
|
+
else "",
|
|
211
|
+
}
|
|
212
|
+
formatted_datasets.append(formatted_dataset)
|
|
213
|
+
|
|
214
|
+
return self.format_output(formatted_datasets, format_type, output_file)
|
|
215
|
+
|
|
216
|
+
def format_dataset_detail(
|
|
217
|
+
self,
|
|
218
|
+
dataset: Dict[str, Any],
|
|
219
|
+
format_type: str = "table",
|
|
220
|
+
output_file: Optional[str] = None,
|
|
221
|
+
) -> Optional[str]:
|
|
222
|
+
"""
|
|
223
|
+
Format detailed dataset information.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
dataset: Dataset dictionary
|
|
227
|
+
format_type: Output format
|
|
228
|
+
output_file: Optional output file path
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
Formatted string if no output file specified
|
|
232
|
+
"""
|
|
233
|
+
if format_type == "table":
|
|
234
|
+
# For table format, show key-value pairs (only show fields that exist)
|
|
235
|
+
details = []
|
|
236
|
+
|
|
237
|
+
if dataset.get("rid"):
|
|
238
|
+
details.append({"Property": "RID", "Value": dataset["rid"]})
|
|
239
|
+
if dataset.get("name"):
|
|
240
|
+
details.append({"Property": "Name", "Value": dataset["name"]})
|
|
241
|
+
if dataset.get("parent_folder_rid"):
|
|
242
|
+
details.append(
|
|
243
|
+
{"Property": "Parent Folder", "Value": dataset["parent_folder_rid"]}
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
# Add any other fields that might exist
|
|
247
|
+
for key, value in dataset.items():
|
|
248
|
+
if (
|
|
249
|
+
key not in ["rid", "name", "parent_folder_rid"]
|
|
250
|
+
and value is not None
|
|
251
|
+
and value != ""
|
|
252
|
+
):
|
|
253
|
+
details.append(
|
|
254
|
+
{"Property": key.replace("_", " ").title(), "Value": str(value)}
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
return self.format_output(details, format_type, output_file)
|
|
258
|
+
else:
|
|
259
|
+
return self.format_output(dataset, format_type, output_file)
|
|
260
|
+
|
|
261
|
+
def format_file_list(
|
|
262
|
+
self,
|
|
263
|
+
files: List[Dict[str, Any]],
|
|
264
|
+
format_type: str = "table",
|
|
265
|
+
output_file: Optional[str] = None,
|
|
266
|
+
) -> Optional[str]:
|
|
267
|
+
"""
|
|
268
|
+
Format file list with specific columns.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
files: List of file dictionaries
|
|
272
|
+
format_type: Output format
|
|
273
|
+
output_file: Optional output file path
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Formatted string if no output file specified
|
|
277
|
+
"""
|
|
278
|
+
# Format files for display
|
|
279
|
+
formatted_files = []
|
|
280
|
+
for file in files:
|
|
281
|
+
formatted_file = {
|
|
282
|
+
"Path": file.get("path", ""),
|
|
283
|
+
"Size": self._format_file_size(file.get("size_bytes")),
|
|
284
|
+
"Last Modified": self._format_datetime(file.get("last_modified")),
|
|
285
|
+
"Transaction": file.get("transaction_rid", "")[:12] + "..."
|
|
286
|
+
if file.get("transaction_rid")
|
|
287
|
+
else "",
|
|
288
|
+
}
|
|
289
|
+
formatted_files.append(formatted_file)
|
|
290
|
+
|
|
291
|
+
return self.format_output(formatted_files, format_type, output_file)
|
|
292
|
+
|
|
293
|
+
def _format_datetime(self, dt: Any) -> str:
|
|
294
|
+
"""Format datetime for display."""
|
|
295
|
+
if dt is None:
|
|
296
|
+
return ""
|
|
297
|
+
if isinstance(dt, str):
|
|
298
|
+
return dt
|
|
299
|
+
if isinstance(dt, datetime):
|
|
300
|
+
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
301
|
+
return str(dt)
|
|
302
|
+
|
|
303
|
+
def _format_file_size(self, size_bytes: Optional[int]) -> str:
|
|
304
|
+
"""Format file size in human-readable format."""
|
|
305
|
+
if size_bytes is None:
|
|
306
|
+
return ""
|
|
307
|
+
|
|
308
|
+
if size_bytes < 1024:
|
|
309
|
+
return f"{size_bytes} B"
|
|
310
|
+
elif size_bytes < 1024**2:
|
|
311
|
+
return f"{size_bytes / 1024:.1f} KB"
|
|
312
|
+
elif size_bytes < 1024**3:
|
|
313
|
+
return f"{size_bytes / (1024**2):.1f} MB"
|
|
314
|
+
else:
|
|
315
|
+
return f"{size_bytes / (1024**3):.1f} GB"
|
|
316
|
+
|
|
317
|
+
def print_success(self, message: str):
|
|
318
|
+
"""Print success message with formatting."""
|
|
319
|
+
self.console.print(f"✅ {message}", style="green")
|
|
320
|
+
|
|
321
|
+
def print_error(self, message: str):
|
|
322
|
+
"""Print error message with formatting."""
|
|
323
|
+
self.console.print(f"❌ {message}", style="red")
|
|
324
|
+
|
|
325
|
+
def print_warning(self, message: str):
|
|
326
|
+
"""Print warning message with formatting."""
|
|
327
|
+
self.console.print(f"⚠️ {message}", style="yellow")
|
|
328
|
+
|
|
329
|
+
def print_info(self, message: str):
|
|
330
|
+
"""Print info message with formatting."""
|
|
331
|
+
self.console.print(f"ℹ️ {message}", style="blue")
|
pltr/utils/progress.py
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Progress bar utilities for long-running operations.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Optional, Iterator, Any, Union
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from contextlib import contextmanager
|
|
8
|
+
|
|
9
|
+
from rich.progress import (
|
|
10
|
+
Progress,
|
|
11
|
+
TextColumn,
|
|
12
|
+
BarColumn,
|
|
13
|
+
TaskProgressColumn,
|
|
14
|
+
TimeRemainingColumn,
|
|
15
|
+
FileSizeColumn,
|
|
16
|
+
TotalFileSizeColumn,
|
|
17
|
+
TransferSpeedColumn,
|
|
18
|
+
SpinnerColumn,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class FileProgressTracker:
|
|
23
|
+
"""Progress tracker for file operations."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, show_speed: bool = True):
|
|
26
|
+
"""
|
|
27
|
+
Initialize progress tracker.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
show_speed: Whether to show transfer speed
|
|
31
|
+
"""
|
|
32
|
+
self.show_speed = show_speed
|
|
33
|
+
self._progress: Optional[Progress] = None
|
|
34
|
+
|
|
35
|
+
@contextmanager
|
|
36
|
+
def track_upload(
|
|
37
|
+
self, file_path: Union[str, Path], description: Optional[str] = None
|
|
38
|
+
) -> Iterator[Any]:
|
|
39
|
+
"""
|
|
40
|
+
Context manager for tracking file upload progress.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
file_path: Path to file being uploaded
|
|
44
|
+
description: Optional description for progress bar
|
|
45
|
+
|
|
46
|
+
Yields:
|
|
47
|
+
Progress update function
|
|
48
|
+
"""
|
|
49
|
+
file_path = Path(file_path)
|
|
50
|
+
total_size = file_path.stat().st_size
|
|
51
|
+
description = description or f"Uploading {file_path.name}"
|
|
52
|
+
|
|
53
|
+
columns = [
|
|
54
|
+
TextColumn("[bold blue]{task.description}"),
|
|
55
|
+
BarColumn(),
|
|
56
|
+
TaskProgressColumn(),
|
|
57
|
+
FileSizeColumn(),
|
|
58
|
+
TotalFileSizeColumn(),
|
|
59
|
+
TimeRemainingColumn(),
|
|
60
|
+
]
|
|
61
|
+
|
|
62
|
+
if self.show_speed:
|
|
63
|
+
columns.append(TransferSpeedColumn())
|
|
64
|
+
|
|
65
|
+
with Progress(*columns) as progress:
|
|
66
|
+
self._progress = progress
|
|
67
|
+
task_id = progress.add_task(description, total=total_size)
|
|
68
|
+
|
|
69
|
+
def update_progress(bytes_transferred: int):
|
|
70
|
+
"""Update progress with bytes transferred."""
|
|
71
|
+
progress.update(task_id, completed=bytes_transferred)
|
|
72
|
+
|
|
73
|
+
try:
|
|
74
|
+
yield update_progress
|
|
75
|
+
finally:
|
|
76
|
+
self._progress = None
|
|
77
|
+
|
|
78
|
+
@contextmanager
|
|
79
|
+
def track_download(
|
|
80
|
+
self,
|
|
81
|
+
target_path: Union[str, Path],
|
|
82
|
+
total_size: Optional[int] = None,
|
|
83
|
+
description: Optional[str] = None,
|
|
84
|
+
) -> Iterator[Any]:
|
|
85
|
+
"""
|
|
86
|
+
Context manager for tracking file download progress.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
target_path: Path where file will be saved
|
|
90
|
+
total_size: Total file size in bytes (if known)
|
|
91
|
+
description: Optional description for progress bar
|
|
92
|
+
|
|
93
|
+
Yields:
|
|
94
|
+
Progress update function
|
|
95
|
+
"""
|
|
96
|
+
target_path = Path(target_path)
|
|
97
|
+
description = description or f"Downloading {target_path.name}"
|
|
98
|
+
|
|
99
|
+
columns = [
|
|
100
|
+
TextColumn("[bold green]{task.description}"),
|
|
101
|
+
BarColumn(),
|
|
102
|
+
TaskProgressColumn(),
|
|
103
|
+
]
|
|
104
|
+
|
|
105
|
+
if total_size:
|
|
106
|
+
columns.extend(
|
|
107
|
+
[
|
|
108
|
+
FileSizeColumn(),
|
|
109
|
+
TotalFileSizeColumn(),
|
|
110
|
+
TimeRemainingColumn(),
|
|
111
|
+
]
|
|
112
|
+
)
|
|
113
|
+
if self.show_speed:
|
|
114
|
+
columns.append(TransferSpeedColumn())
|
|
115
|
+
|
|
116
|
+
with Progress(*columns) as progress:
|
|
117
|
+
self._progress = progress
|
|
118
|
+
task_id = progress.add_task(description, total=total_size)
|
|
119
|
+
|
|
120
|
+
def update_progress(bytes_transferred: int):
|
|
121
|
+
"""Update progress with bytes transferred."""
|
|
122
|
+
progress.update(task_id, completed=bytes_transferred)
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
yield update_progress
|
|
126
|
+
finally:
|
|
127
|
+
self._progress = None
|
|
128
|
+
|
|
129
|
+
@contextmanager
|
|
130
|
+
def track_operation(
|
|
131
|
+
self, description: str, total: Optional[int] = None
|
|
132
|
+
) -> Iterator[Any]:
|
|
133
|
+
"""
|
|
134
|
+
Context manager for tracking general operations.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
description: Description of the operation
|
|
138
|
+
total: Total number of items (if known)
|
|
139
|
+
|
|
140
|
+
Yields:
|
|
141
|
+
Progress update function
|
|
142
|
+
"""
|
|
143
|
+
columns = [
|
|
144
|
+
TextColumn("[bold yellow]{task.description}"),
|
|
145
|
+
BarColumn(),
|
|
146
|
+
TaskProgressColumn(),
|
|
147
|
+
]
|
|
148
|
+
|
|
149
|
+
if total:
|
|
150
|
+
columns.append(TimeRemainingColumn())
|
|
151
|
+
|
|
152
|
+
with Progress(*columns) as progress:
|
|
153
|
+
self._progress = progress
|
|
154
|
+
task_id = progress.add_task(description, total=total)
|
|
155
|
+
|
|
156
|
+
def update_progress(completed: int):
|
|
157
|
+
"""Update progress with completed items."""
|
|
158
|
+
progress.update(task_id, completed=completed)
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
yield update_progress
|
|
162
|
+
finally:
|
|
163
|
+
self._progress = None
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class SpinnerProgressTracker:
|
|
167
|
+
"""Simple spinner for indeterminate operations."""
|
|
168
|
+
|
|
169
|
+
def __init__(self):
|
|
170
|
+
"""Initialize spinner tracker."""
|
|
171
|
+
self._progress: Optional[Progress] = None
|
|
172
|
+
|
|
173
|
+
@contextmanager
|
|
174
|
+
def track_spinner(self, description: str) -> Iterator[None]:
|
|
175
|
+
"""
|
|
176
|
+
Context manager for showing a spinner during operations.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
description: Description of the operation
|
|
180
|
+
|
|
181
|
+
Yields:
|
|
182
|
+
None (operation runs in context)
|
|
183
|
+
"""
|
|
184
|
+
columns = [
|
|
185
|
+
SpinnerColumn(),
|
|
186
|
+
TextColumn("[bold cyan]{task.description}"),
|
|
187
|
+
]
|
|
188
|
+
|
|
189
|
+
with Progress(*columns, transient=True) as progress:
|
|
190
|
+
self._progress = progress
|
|
191
|
+
progress.add_task(description)
|
|
192
|
+
|
|
193
|
+
try:
|
|
194
|
+
yield
|
|
195
|
+
finally:
|
|
196
|
+
self._progress = None
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def create_file_chunks(
|
|
200
|
+
file_path: Union[str, Path], chunk_size: int = 8192
|
|
201
|
+
) -> Iterator[bytes]:
|
|
202
|
+
"""
|
|
203
|
+
Create file chunks for streaming upload with progress tracking.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
file_path: Path to file to read
|
|
207
|
+
chunk_size: Size of each chunk in bytes
|
|
208
|
+
|
|
209
|
+
Yields:
|
|
210
|
+
File chunks as bytes
|
|
211
|
+
"""
|
|
212
|
+
file_path = Path(file_path)
|
|
213
|
+
with open(file_path, "rb") as f:
|
|
214
|
+
while True:
|
|
215
|
+
chunk = f.read(chunk_size)
|
|
216
|
+
if not chunk:
|
|
217
|
+
break
|
|
218
|
+
yield chunk
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
class ProgressCallbackAdapter:
|
|
222
|
+
"""Adapter to convert progress callbacks to different formats."""
|
|
223
|
+
|
|
224
|
+
def __init__(self, update_callback, total_size: Optional[int] = None):
|
|
225
|
+
"""
|
|
226
|
+
Initialize adapter.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
update_callback: Callback function to update progress
|
|
230
|
+
total_size: Total size for percentage calculations
|
|
231
|
+
"""
|
|
232
|
+
self.update_callback = update_callback
|
|
233
|
+
self.total_size = total_size
|
|
234
|
+
self.bytes_transferred = 0
|
|
235
|
+
|
|
236
|
+
def __call__(self, chunk_size: int):
|
|
237
|
+
"""
|
|
238
|
+
Update progress with new chunk.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
chunk_size: Size of transferred chunk
|
|
242
|
+
"""
|
|
243
|
+
self.bytes_transferred += chunk_size
|
|
244
|
+
self.update_callback(self.bytes_transferred)
|
|
245
|
+
|
|
246
|
+
def get_percentage(self) -> Optional[float]:
|
|
247
|
+
"""
|
|
248
|
+
Get current progress percentage.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
Percentage complete (0-100) or None if total size unknown
|
|
252
|
+
"""
|
|
253
|
+
if self.total_size and self.total_size > 0:
|
|
254
|
+
return (self.bytes_transferred / self.total_size) * 100
|
|
255
|
+
return None
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
# Utility functions for common progress patterns
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def with_upload_progress(
|
|
262
|
+
file_path: Union[str, Path], description: Optional[str] = None
|
|
263
|
+
) -> FileProgressTracker:
|
|
264
|
+
"""
|
|
265
|
+
Create a progress tracker configured for file uploads.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
file_path: Path to file being uploaded
|
|
269
|
+
description: Optional description override
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
Configured FileProgressTracker
|
|
273
|
+
"""
|
|
274
|
+
return FileProgressTracker(show_speed=True)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def with_download_progress(show_speed: bool = True) -> FileProgressTracker:
|
|
278
|
+
"""
|
|
279
|
+
Create a progress tracker configured for file downloads.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
show_speed: Whether to show transfer speed
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Configured FileProgressTracker
|
|
286
|
+
"""
|
|
287
|
+
return FileProgressTracker(show_speed=show_speed)
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def with_spinner(description: str = "Working...") -> SpinnerProgressTracker:
|
|
291
|
+
"""
|
|
292
|
+
Create a spinner for indeterminate operations.
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
description: Description of the operation
|
|
296
|
+
|
|
297
|
+
Returns:
|
|
298
|
+
Configured SpinnerProgressTracker
|
|
299
|
+
"""
|
|
300
|
+
return SpinnerProgressTracker()
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
# Example usage patterns
|
|
304
|
+
"""
|
|
305
|
+
# File upload with progress
|
|
306
|
+
tracker = with_upload_progress("data.csv")
|
|
307
|
+
with tracker.track_upload("data.csv") as progress:
|
|
308
|
+
# Upload file chunks
|
|
309
|
+
for chunk in create_file_chunks("data.csv"):
|
|
310
|
+
# ... upload chunk ...
|
|
311
|
+
progress(len(chunk))
|
|
312
|
+
|
|
313
|
+
# File download with progress
|
|
314
|
+
tracker = with_download_progress()
|
|
315
|
+
with tracker.track_download("output.csv", total_size=1024000) as progress:
|
|
316
|
+
# Download file
|
|
317
|
+
bytes_downloaded = 0
|
|
318
|
+
while not complete:
|
|
319
|
+
# ... download chunk ...
|
|
320
|
+
bytes_downloaded += chunk_size
|
|
321
|
+
progress(bytes_downloaded)
|
|
322
|
+
|
|
323
|
+
# Indeterminate operation
|
|
324
|
+
spinner = with_spinner("Processing dataset...")
|
|
325
|
+
with spinner.track_spinner("Processing dataset..."):
|
|
326
|
+
# ... long running operation ...
|
|
327
|
+
pass
|
|
328
|
+
"""
|