cloudscope 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudscope/__init__.py +3 -0
- cloudscope/__main__.py +12 -0
- cloudscope/app.py +100 -0
- cloudscope/auth/__init__.py +0 -0
- cloudscope/auth/aws.py +42 -0
- cloudscope/auth/drive_oauth.py +77 -0
- cloudscope/auth/gcp.py +42 -0
- cloudscope/backends/__init__.py +0 -0
- cloudscope/backends/base.py +98 -0
- cloudscope/backends/drive.py +568 -0
- cloudscope/backends/gcs.py +270 -0
- cloudscope/backends/registry.py +23 -0
- cloudscope/backends/s3.py +281 -0
- cloudscope/config.py +70 -0
- cloudscope/models/__init__.py +0 -0
- cloudscope/models/cloud_file.py +48 -0
- cloudscope/models/sync_state.py +87 -0
- cloudscope/models/transfer.py +46 -0
- cloudscope/sync/__init__.py +0 -0
- cloudscope/sync/differ.py +165 -0
- cloudscope/sync/engine.py +214 -0
- cloudscope/sync/plan.py +46 -0
- cloudscope/sync/resolver.py +64 -0
- cloudscope/sync/state.py +140 -0
- cloudscope/transfer/__init__.py +0 -0
- cloudscope/transfer/manager.py +150 -0
- cloudscope/transfer/progress.py +20 -0
- cloudscope/tui/__init__.py +0 -0
- cloudscope/tui/commands.py +47 -0
- cloudscope/tui/modals/__init__.py +0 -0
- cloudscope/tui/modals/confirm_dialog.py +93 -0
- cloudscope/tui/modals/download_dialog.py +111 -0
- cloudscope/tui/modals/new_folder.py +96 -0
- cloudscope/tui/modals/sync_dialog.py +142 -0
- cloudscope/tui/modals/upload_dialog.py +109 -0
- cloudscope/tui/screens/__init__.py +0 -0
- cloudscope/tui/screens/auth_setup.py +154 -0
- cloudscope/tui/screens/browse.py +282 -0
- cloudscope/tui/screens/settings.py +222 -0
- cloudscope/tui/screens/sync_config.py +245 -0
- cloudscope/tui/styles/cloudscope.tcss +336 -0
- cloudscope/tui/widgets/__init__.py +0 -0
- cloudscope/tui/widgets/app_footer.py +46 -0
- cloudscope/tui/widgets/breadcrumb.py +39 -0
- cloudscope/tui/widgets/cloud_tree.py +146 -0
- cloudscope/tui/widgets/file_table.py +113 -0
- cloudscope/tui/widgets/preview_panel.py +59 -0
- cloudscope/tui/widgets/status_bar.py +27 -0
- cloudscope/tui/widgets/transfer_panel.py +54 -0
- cloudscope-0.1.0.dist-info/METADATA +22 -0
- cloudscope-0.1.0.dist-info/RECORD +53 -0
- cloudscope-0.1.0.dist-info/WHEEL +4 -0
- cloudscope-0.1.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,568 @@
|
|
|
1
|
+
"""Google Drive backend implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import io
|
|
7
|
+
from collections import OrderedDict
|
|
8
|
+
from collections.abc import AsyncIterator
|
|
9
|
+
from datetime import datetime, timezone
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from googleapiclient.discovery import build
|
|
14
|
+
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
|
|
15
|
+
|
|
16
|
+
from cloudscope.auth.drive_oauth import get_drive_credentials
|
|
17
|
+
from cloudscope.backends.base import (
|
|
18
|
+
AuthenticationError,
|
|
19
|
+
CloudScopeError,
|
|
20
|
+
NetworkError,
|
|
21
|
+
NotFoundError,
|
|
22
|
+
ProgressCallback,
|
|
23
|
+
)
|
|
24
|
+
from cloudscope.backends.registry import register_backend
|
|
25
|
+
from cloudscope.models.cloud_file import CloudFile, CloudFileType
|
|
26
|
+
|
|
27
|
+
# Google Workspace MIME types that need export instead of direct download
|
|
28
|
+
WORKSPACE_MIME_TYPES: dict[str, dict[str, str]] = {
|
|
29
|
+
"application/vnd.google-apps.document": {
|
|
30
|
+
"default": "application/pdf",
|
|
31
|
+
"pdf": "application/pdf",
|
|
32
|
+
"docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
33
|
+
"txt": "text/plain",
|
|
34
|
+
},
|
|
35
|
+
"application/vnd.google-apps.spreadsheet": {
|
|
36
|
+
"default": "text/csv",
|
|
37
|
+
"csv": "text/csv",
|
|
38
|
+
"xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
39
|
+
"pdf": "application/pdf",
|
|
40
|
+
},
|
|
41
|
+
"application/vnd.google-apps.presentation": {
|
|
42
|
+
"default": "application/pdf",
|
|
43
|
+
"pdf": "application/pdf",
|
|
44
|
+
"pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
|
45
|
+
},
|
|
46
|
+
"application/vnd.google-apps.drawing": {
|
|
47
|
+
"default": "image/png",
|
|
48
|
+
"png": "image/png",
|
|
49
|
+
"pdf": "application/pdf",
|
|
50
|
+
"svg": "image/svg+xml",
|
|
51
|
+
},
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
# File fields to request from Drive API
|
|
55
|
+
FILE_FIELDS = "id, name, mimeType, size, modifiedTime, md5Checksum, parents, trashed"
|
|
56
|
+
LIST_FIELDS = f"nextPageToken, files({FILE_FIELDS})"
|
|
57
|
+
|
|
58
|
+
# Max items in the path-to-ID cache
|
|
59
|
+
PATH_CACHE_SIZE = 1024
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class GoogleDriveBackend:
|
|
63
|
+
"""CloudBackend implementation for Google Drive."""
|
|
64
|
+
|
|
65
|
+
def __init__(
|
|
66
|
+
self,
|
|
67
|
+
client_secrets_path: str | None = None,
|
|
68
|
+
) -> None:
|
|
69
|
+
self._client_secrets_path = client_secrets_path
|
|
70
|
+
self._service: Any = None
|
|
71
|
+
self._path_cache: OrderedDict[str, str] = OrderedDict()
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def backend_type(self) -> str:
|
|
75
|
+
return "drive"
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def display_name(self) -> str:
|
|
79
|
+
return "Google Drive"
|
|
80
|
+
|
|
81
|
+
# --- Connection ---
|
|
82
|
+
|
|
83
|
+
async def connect(self) -> None:
|
|
84
|
+
try:
|
|
85
|
+
credentials = await asyncio.to_thread(
|
|
86
|
+
get_drive_credentials, self._client_secrets_path
|
|
87
|
+
)
|
|
88
|
+
self._service = await asyncio.to_thread(
|
|
89
|
+
build, "drive", "v3", credentials=credentials
|
|
90
|
+
)
|
|
91
|
+
# Validate by fetching about info
|
|
92
|
+
await asyncio.to_thread(
|
|
93
|
+
lambda: self._service.about().get(fields="user").execute()
|
|
94
|
+
)
|
|
95
|
+
except FileNotFoundError:
|
|
96
|
+
raise
|
|
97
|
+
except Exception as e:
|
|
98
|
+
err_str = str(e).lower()
|
|
99
|
+
if "credential" in err_str or "token" in err_str or "unauthorized" in err_str:
|
|
100
|
+
raise AuthenticationError(f"Google Drive authentication failed: {e}") from e
|
|
101
|
+
raise NetworkError(f"Failed to connect to Google Drive: {e}") from e
|
|
102
|
+
|
|
103
|
+
async def disconnect(self) -> None:
|
|
104
|
+
self._service = None
|
|
105
|
+
self._path_cache.clear()
|
|
106
|
+
|
|
107
|
+
async def is_connected(self) -> bool:
|
|
108
|
+
if self._service is None:
|
|
109
|
+
return False
|
|
110
|
+
try:
|
|
111
|
+
await asyncio.to_thread(
|
|
112
|
+
lambda: self._service.about().get(fields="user").execute()
|
|
113
|
+
)
|
|
114
|
+
return True
|
|
115
|
+
except Exception:
|
|
116
|
+
return False
|
|
117
|
+
|
|
118
|
+
# --- Container listing ---
|
|
119
|
+
|
|
120
|
+
async def list_containers(self) -> list[str]:
|
|
121
|
+
"""List available drives (My Drive + shared drives)."""
|
|
122
|
+
self._ensure_connected()
|
|
123
|
+
|
|
124
|
+
def _list() -> list[str]:
|
|
125
|
+
drives = ["My Drive"]
|
|
126
|
+
try:
|
|
127
|
+
result = self._service.drives().list(pageSize=100).execute()
|
|
128
|
+
for drive in result.get("drives", []):
|
|
129
|
+
drives.append(drive["name"])
|
|
130
|
+
except Exception:
|
|
131
|
+
pass # Shared drives may not be available
|
|
132
|
+
return drives
|
|
133
|
+
|
|
134
|
+
return await asyncio.to_thread(_list)
|
|
135
|
+
|
|
136
|
+
# --- Browsing ---
|
|
137
|
+
|
|
138
|
+
async def list_files(
|
|
139
|
+
self,
|
|
140
|
+
container: str,
|
|
141
|
+
prefix: str = "",
|
|
142
|
+
recursive: bool = False,
|
|
143
|
+
) -> list[CloudFile]:
|
|
144
|
+
self._ensure_connected()
|
|
145
|
+
|
|
146
|
+
def _list() -> list[CloudFile]:
|
|
147
|
+
parent_id = self._resolve_parent_id(container, prefix)
|
|
148
|
+
query = f"'{parent_id}' in parents and trashed = false"
|
|
149
|
+
|
|
150
|
+
files: list[CloudFile] = []
|
|
151
|
+
page_token: str | None = None
|
|
152
|
+
|
|
153
|
+
while True:
|
|
154
|
+
result = (
|
|
155
|
+
self._service.files()
|
|
156
|
+
.list(
|
|
157
|
+
q=query,
|
|
158
|
+
fields=LIST_FIELDS,
|
|
159
|
+
pageSize=1000,
|
|
160
|
+
pageToken=page_token,
|
|
161
|
+
orderBy="folder,name",
|
|
162
|
+
)
|
|
163
|
+
.execute()
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
for item in result.get("files", []):
|
|
167
|
+
cloud_file = self._item_to_cloud_file(item, prefix)
|
|
168
|
+
files.append(cloud_file)
|
|
169
|
+
|
|
170
|
+
if recursive and cloud_file.is_folder:
|
|
171
|
+
# For recursive listing, we'd need to recurse
|
|
172
|
+
# but this is handled by list_files_recursive
|
|
173
|
+
pass
|
|
174
|
+
|
|
175
|
+
page_token = result.get("nextPageToken")
|
|
176
|
+
if not page_token:
|
|
177
|
+
break
|
|
178
|
+
|
|
179
|
+
return files
|
|
180
|
+
|
|
181
|
+
return await asyncio.to_thread(_list)
|
|
182
|
+
|
|
183
|
+
async def stat(self, container: str, path: str) -> CloudFile:
|
|
184
|
+
self._ensure_connected()
|
|
185
|
+
|
|
186
|
+
def _stat() -> CloudFile:
|
|
187
|
+
file_id = self._resolve_path_to_id(container, path)
|
|
188
|
+
if not file_id:
|
|
189
|
+
raise NotFoundError(f"drive://{container}/{path} not found")
|
|
190
|
+
result = (
|
|
191
|
+
self._service.files()
|
|
192
|
+
.get(fileId=file_id, fields=FILE_FIELDS)
|
|
193
|
+
.execute()
|
|
194
|
+
)
|
|
195
|
+
parent_prefix = path.rsplit("/", 1)[0] if "/" in path else ""
|
|
196
|
+
return self._item_to_cloud_file(result, parent_prefix)
|
|
197
|
+
|
|
198
|
+
return await asyncio.to_thread(_stat)
|
|
199
|
+
|
|
200
|
+
async def exists(self, container: str, path: str) -> bool:
|
|
201
|
+
try:
|
|
202
|
+
await self.stat(container, path)
|
|
203
|
+
return True
|
|
204
|
+
except NotFoundError:
|
|
205
|
+
return False
|
|
206
|
+
|
|
207
|
+
# --- Transfer ---
|
|
208
|
+
|
|
209
|
+
async def download(
|
|
210
|
+
self,
|
|
211
|
+
container: str,
|
|
212
|
+
remote_path: str,
|
|
213
|
+
local_path: str,
|
|
214
|
+
progress_callback: ProgressCallback | None = None,
|
|
215
|
+
) -> None:
|
|
216
|
+
self._ensure_connected()
|
|
217
|
+
|
|
218
|
+
def _download() -> None:
|
|
219
|
+
file_id = self._resolve_path_to_id(container, remote_path)
|
|
220
|
+
if not file_id:
|
|
221
|
+
raise NotFoundError(f"drive://{container}/{remote_path} not found")
|
|
222
|
+
|
|
223
|
+
# Get file metadata to check MIME type
|
|
224
|
+
meta = self._service.files().get(fileId=file_id, fields="mimeType,size").execute()
|
|
225
|
+
mime_type = meta.get("mimeType", "")
|
|
226
|
+
total_size = int(meta.get("size", 0))
|
|
227
|
+
|
|
228
|
+
Path(local_path).parent.mkdir(parents=True, exist_ok=True)
|
|
229
|
+
|
|
230
|
+
if mime_type in WORKSPACE_MIME_TYPES:
|
|
231
|
+
# Export Workspace files
|
|
232
|
+
export_mime = WORKSPACE_MIME_TYPES[mime_type]["default"]
|
|
233
|
+
request = self._service.files().export_media(
|
|
234
|
+
fileId=file_id, mimeType=export_mime
|
|
235
|
+
)
|
|
236
|
+
else:
|
|
237
|
+
request = self._service.files().get_media(fileId=file_id)
|
|
238
|
+
|
|
239
|
+
with open(local_path, "wb") as f:
|
|
240
|
+
downloader = MediaIoBaseDownload(f, request)
|
|
241
|
+
done = False
|
|
242
|
+
while not done:
|
|
243
|
+
status, done = downloader.next_chunk()
|
|
244
|
+
if progress_callback and status:
|
|
245
|
+
transferred = int(status.progress() * total_size) if total_size else 0
|
|
246
|
+
progress_callback(transferred, total_size)
|
|
247
|
+
|
|
248
|
+
if progress_callback:
|
|
249
|
+
final_size = Path(local_path).stat().st_size
|
|
250
|
+
progress_callback(final_size, final_size)
|
|
251
|
+
|
|
252
|
+
await asyncio.to_thread(_download)
|
|
253
|
+
|
|
254
|
+
async def upload(
|
|
255
|
+
self,
|
|
256
|
+
container: str,
|
|
257
|
+
local_path: str,
|
|
258
|
+
remote_path: str,
|
|
259
|
+
progress_callback: ProgressCallback | None = None,
|
|
260
|
+
) -> CloudFile:
|
|
261
|
+
self._ensure_connected()
|
|
262
|
+
|
|
263
|
+
def _upload() -> dict:
|
|
264
|
+
# Resolve parent folder
|
|
265
|
+
parent_prefix = remote_path.rsplit("/", 1)[0] if "/" in remote_path else ""
|
|
266
|
+
file_name = remote_path.rsplit("/", 1)[-1]
|
|
267
|
+
parent_id = self._resolve_parent_id(container, parent_prefix)
|
|
268
|
+
|
|
269
|
+
file_metadata: dict[str, Any] = {
|
|
270
|
+
"name": file_name,
|
|
271
|
+
"parents": [parent_id],
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
media = MediaFileUpload(local_path, resumable=True)
|
|
275
|
+
total_size = Path(local_path).stat().st_size
|
|
276
|
+
|
|
277
|
+
request = self._service.files().create(
|
|
278
|
+
body=file_metadata,
|
|
279
|
+
media_body=media,
|
|
280
|
+
fields=FILE_FIELDS,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
response = None
|
|
284
|
+
while response is None:
|
|
285
|
+
status, response = request.next_chunk()
|
|
286
|
+
if progress_callback and status:
|
|
287
|
+
transferred = int(status.progress() * total_size)
|
|
288
|
+
progress_callback(transferred, total_size)
|
|
289
|
+
|
|
290
|
+
if progress_callback:
|
|
291
|
+
progress_callback(total_size, total_size)
|
|
292
|
+
|
|
293
|
+
# Invalidate cache for the parent
|
|
294
|
+
self._invalidate_cache_prefix(parent_prefix)
|
|
295
|
+
|
|
296
|
+
return response
|
|
297
|
+
|
|
298
|
+
result = await asyncio.to_thread(_upload)
|
|
299
|
+
parent_prefix = remote_path.rsplit("/", 1)[0] if "/" in remote_path else ""
|
|
300
|
+
return self._item_to_cloud_file(result, parent_prefix)
|
|
301
|
+
|
|
302
|
+
# --- Mutation ---
|
|
303
|
+
|
|
304
|
+
async def delete(self, container: str, path: str) -> None:
|
|
305
|
+
self._ensure_connected()
|
|
306
|
+
|
|
307
|
+
def _delete() -> None:
|
|
308
|
+
file_id = self._resolve_path_to_id(container, path)
|
|
309
|
+
if not file_id:
|
|
310
|
+
raise NotFoundError(f"drive://{container}/{path} not found")
|
|
311
|
+
self._service.files().delete(fileId=file_id).execute()
|
|
312
|
+
self._invalidate_cache_prefix(path)
|
|
313
|
+
|
|
314
|
+
await asyncio.to_thread(_delete)
|
|
315
|
+
|
|
316
|
+
async def create_folder(self, container: str, path: str) -> CloudFile:
|
|
317
|
+
self._ensure_connected()
|
|
318
|
+
|
|
319
|
+
def _create() -> dict:
|
|
320
|
+
parent_prefix = path.rsplit("/", 1)[0] if "/" in path else ""
|
|
321
|
+
folder_name = path.rsplit("/", 1)[-1]
|
|
322
|
+
parent_id = self._resolve_parent_id(container, parent_prefix)
|
|
323
|
+
|
|
324
|
+
file_metadata = {
|
|
325
|
+
"name": folder_name,
|
|
326
|
+
"mimeType": "application/vnd.google-apps.folder",
|
|
327
|
+
"parents": [parent_id],
|
|
328
|
+
}
|
|
329
|
+
result = (
|
|
330
|
+
self._service.files()
|
|
331
|
+
.create(body=file_metadata, fields=FILE_FIELDS)
|
|
332
|
+
.execute()
|
|
333
|
+
)
|
|
334
|
+
self._invalidate_cache_prefix(parent_prefix)
|
|
335
|
+
return result
|
|
336
|
+
|
|
337
|
+
result = await asyncio.to_thread(_create)
|
|
338
|
+
parent_prefix = path.rsplit("/", 1)[0] if "/" in path else ""
|
|
339
|
+
return self._item_to_cloud_file(result, parent_prefix)
|
|
340
|
+
|
|
341
|
+
async def move(self, container: str, src: str, dst: str) -> CloudFile:
|
|
342
|
+
self._ensure_connected()
|
|
343
|
+
|
|
344
|
+
def _move() -> dict:
|
|
345
|
+
file_id = self._resolve_path_to_id(container, src)
|
|
346
|
+
if not file_id:
|
|
347
|
+
raise NotFoundError(f"drive://{container}/{src} not found")
|
|
348
|
+
|
|
349
|
+
# Get current parents
|
|
350
|
+
file_meta = self._service.files().get(fileId=file_id, fields="parents").execute()
|
|
351
|
+
previous_parents = ",".join(file_meta.get("parents", []))
|
|
352
|
+
|
|
353
|
+
dst_parent_prefix = dst.rsplit("/", 1)[0] if "/" in dst else ""
|
|
354
|
+
new_name = dst.rsplit("/", 1)[-1]
|
|
355
|
+
new_parent_id = self._resolve_parent_id(container, dst_parent_prefix)
|
|
356
|
+
|
|
357
|
+
result = (
|
|
358
|
+
self._service.files()
|
|
359
|
+
.update(
|
|
360
|
+
fileId=file_id,
|
|
361
|
+
addParents=new_parent_id,
|
|
362
|
+
removeParents=previous_parents,
|
|
363
|
+
body={"name": new_name},
|
|
364
|
+
fields=FILE_FIELDS,
|
|
365
|
+
)
|
|
366
|
+
.execute()
|
|
367
|
+
)
|
|
368
|
+
self._invalidate_cache_prefix(src)
|
|
369
|
+
self._invalidate_cache_prefix(dst)
|
|
370
|
+
return result
|
|
371
|
+
|
|
372
|
+
result = await asyncio.to_thread(_move)
|
|
373
|
+
dst_parent = dst.rsplit("/", 1)[0] if "/" in dst else ""
|
|
374
|
+
return self._item_to_cloud_file(result, dst_parent)
|
|
375
|
+
|
|
376
|
+
async def copy(self, container: str, src: str, dst: str) -> CloudFile:
|
|
377
|
+
self._ensure_connected()
|
|
378
|
+
|
|
379
|
+
def _copy() -> dict:
|
|
380
|
+
file_id = self._resolve_path_to_id(container, src)
|
|
381
|
+
if not file_id:
|
|
382
|
+
raise NotFoundError(f"drive://{container}/{src} not found")
|
|
383
|
+
|
|
384
|
+
dst_parent_prefix = dst.rsplit("/", 1)[0] if "/" in dst else ""
|
|
385
|
+
new_name = dst.rsplit("/", 1)[-1]
|
|
386
|
+
new_parent_id = self._resolve_parent_id(container, dst_parent_prefix)
|
|
387
|
+
|
|
388
|
+
result = (
|
|
389
|
+
self._service.files()
|
|
390
|
+
.copy(
|
|
391
|
+
fileId=file_id,
|
|
392
|
+
body={"name": new_name, "parents": [new_parent_id]},
|
|
393
|
+
fields=FILE_FIELDS,
|
|
394
|
+
)
|
|
395
|
+
.execute()
|
|
396
|
+
)
|
|
397
|
+
self._invalidate_cache_prefix(dst)
|
|
398
|
+
return result
|
|
399
|
+
|
|
400
|
+
result = await asyncio.to_thread(_copy)
|
|
401
|
+
dst_parent = dst.rsplit("/", 1)[0] if "/" in dst else ""
|
|
402
|
+
return self._item_to_cloud_file(result, dst_parent)
|
|
403
|
+
|
|
404
|
+
# --- Sync support ---
|
|
405
|
+
|
|
406
|
+
async def list_files_recursive(
|
|
407
|
+
self, container: str, prefix: str = ""
|
|
408
|
+
) -> AsyncIterator[CloudFile]:
|
|
409
|
+
files = await self.list_files(container, prefix)
|
|
410
|
+
for f in files:
|
|
411
|
+
if f.is_folder:
|
|
412
|
+
async for child in self.list_files_recursive(container, f.path):
|
|
413
|
+
yield child
|
|
414
|
+
else:
|
|
415
|
+
yield f
|
|
416
|
+
|
|
417
|
+
# --- Path-to-ID resolution ---
|
|
418
|
+
|
|
419
|
+
def _resolve_parent_id(self, container: str, prefix: str) -> str:
|
|
420
|
+
"""Resolve a path prefix to a Drive folder ID."""
|
|
421
|
+
if not prefix:
|
|
422
|
+
return self._get_root_id(container)
|
|
423
|
+
|
|
424
|
+
cached = self._cache_get(f"{container}:{prefix}")
|
|
425
|
+
if cached:
|
|
426
|
+
return cached
|
|
427
|
+
|
|
428
|
+
# Walk each path component
|
|
429
|
+
parts = prefix.strip("/").split("/")
|
|
430
|
+
current_id = self._get_root_id(container)
|
|
431
|
+
|
|
432
|
+
for i, part in enumerate(parts):
|
|
433
|
+
partial_path = "/".join(parts[: i + 1])
|
|
434
|
+
cached = self._cache_get(f"{container}:{partial_path}")
|
|
435
|
+
if cached:
|
|
436
|
+
current_id = cached
|
|
437
|
+
continue
|
|
438
|
+
|
|
439
|
+
query = (
|
|
440
|
+
f"name = '{_escape_query(part)}' and "
|
|
441
|
+
f"'{current_id}' in parents and "
|
|
442
|
+
f"mimeType = 'application/vnd.google-apps.folder' and "
|
|
443
|
+
f"trashed = false"
|
|
444
|
+
)
|
|
445
|
+
result = (
|
|
446
|
+
self._service.files()
|
|
447
|
+
.list(q=query, fields="files(id, name)", pageSize=1)
|
|
448
|
+
.execute()
|
|
449
|
+
)
|
|
450
|
+
files = result.get("files", [])
|
|
451
|
+
if not files:
|
|
452
|
+
raise NotFoundError(f"Folder not found: {partial_path}")
|
|
453
|
+
current_id = files[0]["id"]
|
|
454
|
+
self._cache_set(f"{container}:{partial_path}", current_id)
|
|
455
|
+
|
|
456
|
+
return current_id
|
|
457
|
+
|
|
458
|
+
def _resolve_path_to_id(self, container: str, path: str) -> str | None:
|
|
459
|
+
"""Resolve a full file path to its Drive file ID."""
|
|
460
|
+
cached = self._cache_get(f"{container}:{path}")
|
|
461
|
+
if cached:
|
|
462
|
+
return cached
|
|
463
|
+
|
|
464
|
+
parent_prefix = path.rsplit("/", 1)[0] if "/" in path else ""
|
|
465
|
+
file_name = path.rsplit("/", 1)[-1]
|
|
466
|
+
|
|
467
|
+
try:
|
|
468
|
+
parent_id = self._resolve_parent_id(container, parent_prefix)
|
|
469
|
+
except NotFoundError:
|
|
470
|
+
return None
|
|
471
|
+
|
|
472
|
+
query = (
|
|
473
|
+
f"name = '{_escape_query(file_name)}' and "
|
|
474
|
+
f"'{parent_id}' in parents and "
|
|
475
|
+
f"trashed = false"
|
|
476
|
+
)
|
|
477
|
+
result = (
|
|
478
|
+
self._service.files()
|
|
479
|
+
.list(q=query, fields="files(id)", pageSize=1)
|
|
480
|
+
.execute()
|
|
481
|
+
)
|
|
482
|
+
files = result.get("files", [])
|
|
483
|
+
if not files:
|
|
484
|
+
return None
|
|
485
|
+
file_id = files[0]["id"]
|
|
486
|
+
self._cache_set(f"{container}:{path}", file_id)
|
|
487
|
+
return file_id
|
|
488
|
+
|
|
489
|
+
def _get_root_id(self, container: str) -> str:
|
|
490
|
+
"""Get the root folder ID for a container."""
|
|
491
|
+
if container == "My Drive":
|
|
492
|
+
return "root"
|
|
493
|
+
# Shared drives
|
|
494
|
+
cached = self._cache_get(f"__drive__:{container}")
|
|
495
|
+
if cached:
|
|
496
|
+
return cached
|
|
497
|
+
result = (
|
|
498
|
+
self._service.drives()
|
|
499
|
+
.list(q=f"name = '{_escape_query(container)}'", pageSize=1)
|
|
500
|
+
.execute()
|
|
501
|
+
)
|
|
502
|
+
drives = result.get("drives", [])
|
|
503
|
+
if not drives:
|
|
504
|
+
raise NotFoundError(f"Drive not found: {container}")
|
|
505
|
+
drive_id = drives[0]["id"]
|
|
506
|
+
self._cache_set(f"__drive__:{container}", drive_id)
|
|
507
|
+
return drive_id
|
|
508
|
+
|
|
509
|
+
# --- Cache management ---
|
|
510
|
+
|
|
511
|
+
def _cache_get(self, key: str) -> str | None:
|
|
512
|
+
if key in self._path_cache:
|
|
513
|
+
self._path_cache.move_to_end(key)
|
|
514
|
+
return self._path_cache[key]
|
|
515
|
+
return None
|
|
516
|
+
|
|
517
|
+
def _cache_set(self, key: str, value: str) -> None:
|
|
518
|
+
self._path_cache[key] = value
|
|
519
|
+
self._path_cache.move_to_end(key)
|
|
520
|
+
while len(self._path_cache) > PATH_CACHE_SIZE:
|
|
521
|
+
self._path_cache.popitem(last=False)
|
|
522
|
+
|
|
523
|
+
def _invalidate_cache_prefix(self, prefix: str) -> None:
|
|
524
|
+
"""Remove all cache entries that start with the given prefix."""
|
|
525
|
+
to_remove = [k for k in self._path_cache if prefix in k]
|
|
526
|
+
for k in to_remove:
|
|
527
|
+
del self._path_cache[k]
|
|
528
|
+
|
|
529
|
+
# --- Helpers ---
|
|
530
|
+
|
|
531
|
+
def _item_to_cloud_file(self, item: dict, parent_prefix: str) -> CloudFile:
|
|
532
|
+
"""Convert a Drive API file resource to a CloudFile."""
|
|
533
|
+
mime = item.get("mimeType", "")
|
|
534
|
+
is_folder = mime == "application/vnd.google-apps.folder"
|
|
535
|
+
name = item["name"]
|
|
536
|
+
path = f"{parent_prefix}/{name}".lstrip("/") if parent_prefix else name
|
|
537
|
+
|
|
538
|
+
modified = None
|
|
539
|
+
if "modifiedTime" in item:
|
|
540
|
+
modified = datetime.fromisoformat(
|
|
541
|
+
item["modifiedTime"].replace("Z", "+00:00")
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
# Cache the ID
|
|
545
|
+
self._cache_set(f":{path}", item["id"])
|
|
546
|
+
|
|
547
|
+
return CloudFile(
|
|
548
|
+
name=name,
|
|
549
|
+
path=path,
|
|
550
|
+
file_type=CloudFileType.FOLDER if is_folder else CloudFileType.FILE,
|
|
551
|
+
size=int(item.get("size", 0)),
|
|
552
|
+
last_modified=modified,
|
|
553
|
+
checksum=item.get("md5Checksum"),
|
|
554
|
+
content_type=mime if not is_folder else None,
|
|
555
|
+
native_id=item["id"],
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
def _ensure_connected(self) -> None:
|
|
559
|
+
if self._service is None:
|
|
560
|
+
raise CloudScopeError("Not connected. Call connect() first.")
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
def _escape_query(value: str) -> str:
|
|
564
|
+
"""Escape single quotes in Drive API query strings."""
|
|
565
|
+
return value.replace("\\", "\\\\").replace("'", "\\'")
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
register_backend("drive", GoogleDriveBackend)
|