async-mega-py 2.0.1.dev0__tar.gz → 2.0.4.dev0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/PKG-INFO +13 -8
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/README.md +12 -7
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/pyproject.toml +1 -1
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/api.py +3 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/cli/__init__.py +24 -7
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/client.py +32 -31
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/core.py +14 -19
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/data_structures.py +8 -0
- async_mega_py-2.0.4.dev0/src/mega/download.py +152 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/filesystem.py +25 -5
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/progress.py +52 -34
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/transfer_it.py +23 -33
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/utils.py +32 -32
- async_mega_py-2.0.1.dev0/src/mega/download.py +0 -87
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/LICENSE +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/__init__.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/__main__.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/auth.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/chunker.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/cli/app.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/crypto.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/env.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/errors.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/py.typed +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/upload.py +0 -0
- {async_mega_py-2.0.1.dev0 → async_mega_py-2.0.4.dev0}/src/mega/vault.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: async-mega-py
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.4.dev0
|
|
4
4
|
Summary: Python library for the Mega.nz and Transfer.it API
|
|
5
5
|
Keywords: api,downloader,mega,mega.nz,transfer.it
|
|
6
6
|
Author: NTFSvolume
|
|
@@ -167,8 +167,8 @@ await mega.download_public_file(public_handle, public_key, output_dir)
|
|
|
167
167
|
|
|
168
168
|
# Download a public folder
|
|
169
169
|
url = "https://mega.co.nz/#F!utYjgSTQ!OM4U3V5v_W4N5edSo0wolg1D5H0fwSrLD3oLnLuS9pc"
|
|
170
|
-
public_handle, public_key = mega.parse_folder_url(url)
|
|
171
|
-
success, fails = await mega.download_public_folder(public_handle, public_key, output_dir)
|
|
170
|
+
public_handle, public_key, selected_node = mega.parse_folder_url(url)
|
|
171
|
+
success, fails = await mega.download_public_folder(public_handle, public_key, output_dir, selected_node)
|
|
172
172
|
print(f"Download of '{url!s}' finished. Successful downloads {len(success)}, failed {len(fails)}")
|
|
173
173
|
|
|
174
174
|
# Import a file from URL
|
|
@@ -351,21 +351,26 @@ uv tool install async-mega-py[cli]
|
|
|
351
351
|
mega-py --help
|
|
352
352
|
```
|
|
353
353
|
|
|
354
|
-
```
|
|
355
|
-
|
|
354
|
+
```powershell
|
|
355
|
+
Usage: mega-py [OPTIONS] COMMAND [ARGS]...
|
|
356
|
+
|
|
357
|
+
CLI app for the Mega.nz and Transfer.it. Set MEGA_NZ_EMAIL and MEGA_NZ_PASSWORD
|
|
358
|
+
enviroment variables to use them as credentials for Mega
|
|
356
359
|
|
|
357
360
|
╭─ Options ──────────────────────────────────────────────────────────────────────╮
|
|
358
|
-
│ --
|
|
361
|
+
│ --verbose -v Increase verbosity (-v shows debug logs, -vv │
|
|
362
|
+
│ shows HTTP traffic) │
|
|
363
|
+
│ --help Show this message and exit. │
|
|
359
364
|
╰────────────────────────────────────────────────────────────────────────────────╯
|
|
360
365
|
╭─ Commands ─────────────────────────────────────────────────────────────────────╮
|
|
361
366
|
│ download Download a public file or folder by its URL (transfer.it / mega.nz) │
|
|
362
367
|
│ dump Dump a copy of your filesystem to disk │
|
|
363
368
|
│ stats Show account stats │
|
|
364
369
|
│ upload Upload a file to your account │
|
|
365
|
-
|
|
370
|
+
╰────────────────────────────────────────────────────────────────────────────────╯
|
|
366
371
|
```
|
|
367
372
|
|
|
368
373
|
> [!TIP]
|
|
369
|
-
> The CLI app does
|
|
374
|
+
> The CLI app does *not* accept login credentials, but you can still use your account by setting up the `MEGA_NZ_EMAIL` and `MEGA_NZ_PASSWORD` enviroment variables
|
|
370
375
|
>
|
|
371
376
|
> It will also read them from an `.env` file (if found)
|
|
@@ -131,8 +131,8 @@ await mega.download_public_file(public_handle, public_key, output_dir)
|
|
|
131
131
|
|
|
132
132
|
# Download a public folder
|
|
133
133
|
url = "https://mega.co.nz/#F!utYjgSTQ!OM4U3V5v_W4N5edSo0wolg1D5H0fwSrLD3oLnLuS9pc"
|
|
134
|
-
public_handle, public_key = mega.parse_folder_url(url)
|
|
135
|
-
success, fails = await mega.download_public_folder(public_handle, public_key, output_dir)
|
|
134
|
+
public_handle, public_key, selected_node = mega.parse_folder_url(url)
|
|
135
|
+
success, fails = await mega.download_public_folder(public_handle, public_key, output_dir, selected_node)
|
|
136
136
|
print(f"Download of '{url!s}' finished. Successful downloads {len(success)}, failed {len(fails)}")
|
|
137
137
|
|
|
138
138
|
# Import a file from URL
|
|
@@ -315,21 +315,26 @@ uv tool install async-mega-py[cli]
|
|
|
315
315
|
mega-py --help
|
|
316
316
|
```
|
|
317
317
|
|
|
318
|
-
```
|
|
319
|
-
|
|
318
|
+
```powershell
|
|
319
|
+
Usage: mega-py [OPTIONS] COMMAND [ARGS]...
|
|
320
|
+
|
|
321
|
+
CLI app for the Mega.nz and Transfer.it. Set MEGA_NZ_EMAIL and MEGA_NZ_PASSWORD
|
|
322
|
+
enviroment variables to use them as credentials for Mega
|
|
320
323
|
|
|
321
324
|
╭─ Options ──────────────────────────────────────────────────────────────────────╮
|
|
322
|
-
│ --
|
|
325
|
+
│ --verbose -v Increase verbosity (-v shows debug logs, -vv │
|
|
326
|
+
│ shows HTTP traffic) │
|
|
327
|
+
│ --help Show this message and exit. │
|
|
323
328
|
╰────────────────────────────────────────────────────────────────────────────────╯
|
|
324
329
|
╭─ Commands ─────────────────────────────────────────────────────────────────────╮
|
|
325
330
|
│ download Download a public file or folder by its URL (transfer.it / mega.nz) │
|
|
326
331
|
│ dump Dump a copy of your filesystem to disk │
|
|
327
332
|
│ stats Show account stats │
|
|
328
333
|
│ upload Upload a file to your account │
|
|
329
|
-
|
|
334
|
+
╰────────────────────────────────────────────────────────────────────────────────╯
|
|
330
335
|
```
|
|
331
336
|
|
|
332
337
|
> [!TIP]
|
|
333
|
-
> The CLI app does
|
|
338
|
+
> The CLI app does *not* accept login credentials, but you can still use your account by setting up the `MEGA_NZ_EMAIL` and `MEGA_NZ_PASSWORD` enviroment variables
|
|
334
339
|
>
|
|
335
340
|
> It will also read them from an `.env` file (if found)
|
|
@@ -5,6 +5,7 @@ import contextlib
|
|
|
5
5
|
import dataclasses
|
|
6
6
|
import logging
|
|
7
7
|
from collections.abc import Mapping, Sequence
|
|
8
|
+
from contextvars import ContextVar
|
|
8
9
|
from functools import wraps
|
|
9
10
|
from types import MappingProxyType
|
|
10
11
|
from typing import TYPE_CHECKING, Any, ClassVar, Literal, ParamSpec, Self, TypeVar
|
|
@@ -28,6 +29,7 @@ _UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:137.0) Gecko/20100101 Firefo
|
|
|
28
29
|
_DEFAULT_HEADERS: MappingProxyType[str, str] = MappingProxyType({"User-Agent": _UA})
|
|
29
30
|
|
|
30
31
|
|
|
32
|
+
LOG_HTTP_TRAFFIC: ContextVar[bool] = ContextVar("LOG_HTTP_TRAFFIC", default=False)
|
|
31
33
|
logger = logging.getLogger(__name__)
|
|
32
34
|
|
|
33
35
|
|
|
@@ -84,6 +86,7 @@ class MegaAPI:
|
|
|
84
86
|
self.__session = session
|
|
85
87
|
self._auto_close_session = session is None
|
|
86
88
|
self._rate_limiter = AsyncLimiter(100, 60)
|
|
89
|
+
logger.disabled = not LOG_HTTP_TRAFFIC.get()
|
|
87
90
|
|
|
88
91
|
@property
|
|
89
92
|
def entrypoint(self) -> yarl.URL:
|
|
@@ -10,6 +10,7 @@ import typer
|
|
|
10
10
|
import yarl
|
|
11
11
|
|
|
12
12
|
from mega import __version__, env, progress
|
|
13
|
+
from mega.api import LOG_HTTP_TRAFFIC
|
|
13
14
|
from mega.cli.app import CLIApp
|
|
14
15
|
from mega.client import MegaNzClient
|
|
15
16
|
from mega.transfer_it import TransferItClient
|
|
@@ -22,8 +23,22 @@ if TYPE_CHECKING:
|
|
|
22
23
|
logger = logging.getLogger("mega")
|
|
23
24
|
|
|
24
25
|
|
|
25
|
-
def verbose(
|
|
26
|
-
|
|
26
|
+
def verbose(
|
|
27
|
+
verbose: Annotated[
|
|
28
|
+
int,
|
|
29
|
+
typer.Option(
|
|
30
|
+
"-v",
|
|
31
|
+
"--verbose",
|
|
32
|
+
count=True,
|
|
33
|
+
help="Increase verbosity (-v shows debug logs, -vv shows HTTP traffic)",
|
|
34
|
+
),
|
|
35
|
+
] = 0,
|
|
36
|
+
) -> None:
|
|
37
|
+
if verbose > 1:
|
|
38
|
+
LOG_HTTP_TRAFFIC.set(True)
|
|
39
|
+
|
|
40
|
+
level = logging.DEBUG if verbose else logging.INFO
|
|
41
|
+
setup_logger(level)
|
|
27
42
|
|
|
28
43
|
|
|
29
44
|
app = CLIApp(
|
|
@@ -51,8 +66,10 @@ async def transfer_it(url: str, output_dir: Path) -> None:
|
|
|
51
66
|
with progress.new_progress():
|
|
52
67
|
transfer_id = client.parse_url(url)
|
|
53
68
|
logger.info(f"Downloading '{url}'")
|
|
54
|
-
|
|
55
|
-
logger.info(
|
|
69
|
+
results = await client.download_transfer(transfer_id, output_dir)
|
|
70
|
+
logger.info(
|
|
71
|
+
f"Download of '{url}' finished. Successful = {len(results.success)}, failed = {len(results.fails)}"
|
|
72
|
+
)
|
|
56
73
|
|
|
57
74
|
|
|
58
75
|
@app.command()
|
|
@@ -112,10 +129,10 @@ async def download_file(mega: MegaNzClient, url: str, output: Path) -> None:
|
|
|
112
129
|
|
|
113
130
|
|
|
114
131
|
async def download_folder(mega: MegaNzClient, url: str, output: Path) -> None:
|
|
115
|
-
public_handle, public_key = mega.parse_folder_url(url)
|
|
132
|
+
public_handle, public_key, root_node = mega.parse_folder_url(url)
|
|
116
133
|
logger.info(f"Downloading {url}")
|
|
117
|
-
|
|
118
|
-
logger.info(f"Download of {url} finished. Successful
|
|
134
|
+
results = await mega.download_public_folder(public_handle, public_key, output, root_node)
|
|
135
|
+
logger.info(f"Download of '{url}' finished. Successful = {len(results.success)}, failed = {len(results.fails)}")
|
|
119
136
|
|
|
120
137
|
|
|
121
138
|
def main() -> None:
|
|
@@ -6,6 +6,8 @@ import logging
|
|
|
6
6
|
from pathlib import Path, PurePosixPath
|
|
7
7
|
from typing import TYPE_CHECKING, Any
|
|
8
8
|
|
|
9
|
+
import aiohttp
|
|
10
|
+
|
|
9
11
|
from mega import progress
|
|
10
12
|
from mega.core import MegaCore
|
|
11
13
|
from mega.crypto import (
|
|
@@ -18,8 +20,9 @@ from mega.crypto import (
|
|
|
18
20
|
encrypt_key,
|
|
19
21
|
)
|
|
20
22
|
from mega.data_structures import AccountStats, Attributes, Crypto, FileInfo, Node, NodeID, NodeType, UserResponse
|
|
23
|
+
from mega.download import DownloadResults
|
|
21
24
|
from mega.filesystem import FileSystem
|
|
22
|
-
from mega.utils import
|
|
25
|
+
from mega.utils import Site, async_map
|
|
23
26
|
|
|
24
27
|
from .errors import MegaNzError, RequestError, ValidationError
|
|
25
28
|
|
|
@@ -33,6 +36,9 @@ __all__ = ["MegaNzClient"]
|
|
|
33
36
|
logger = logging.getLogger(__name__)
|
|
34
37
|
|
|
35
38
|
|
|
39
|
+
_DOMAIN = Site.MEGA.value
|
|
40
|
+
|
|
41
|
+
|
|
36
42
|
class MegaNzClient(MegaCore):
|
|
37
43
|
"""Interface with all the public methods of the API"""
|
|
38
44
|
|
|
@@ -95,7 +101,7 @@ class MegaNzClient(MegaCore):
|
|
|
95
101
|
|
|
96
102
|
public_handle = await self._get_public_handle(file.id)
|
|
97
103
|
public_key = a32_to_base64(file._crypto.full_key)
|
|
98
|
-
return f"{
|
|
104
|
+
return f"{_DOMAIN}/file/{public_handle}#{public_key}"
|
|
99
105
|
|
|
100
106
|
async def get_folder_link(self, folder: Node) -> str:
|
|
101
107
|
if folder.type is not NodeType.FOLDER:
|
|
@@ -105,7 +111,7 @@ class MegaNzClient(MegaCore):
|
|
|
105
111
|
raise RequestError("")
|
|
106
112
|
public_handle = await self._get_public_handle(folder.id)
|
|
107
113
|
public_key = a32_to_base64(folder._crypto.share_key)
|
|
108
|
-
return f"{
|
|
114
|
+
return f"{_DOMAIN}/folder/{public_handle}#{public_key}"
|
|
109
115
|
|
|
110
116
|
async def get_id_from_public_handle(self, public_handle: NodeID) -> str:
|
|
111
117
|
resp: GetNodesResponse = await self._api.post(
|
|
@@ -149,6 +155,7 @@ class MegaNzClient(MegaCore):
|
|
|
149
155
|
return await self.get_folder_link(fs[node.id])
|
|
150
156
|
|
|
151
157
|
async def get_public_filesystem(self, public_handle: NodeID, public_key: str) -> FileSystem:
|
|
158
|
+
logger.info(f"Getting filesystem for {public_handle}...")
|
|
152
159
|
folder: GetNodesResponse = await self._api.post(
|
|
153
160
|
{
|
|
154
161
|
"a": "f",
|
|
@@ -165,11 +172,8 @@ class MegaNzClient(MegaCore):
|
|
|
165
172
|
async def download(self, node: Node, output_dir: str | PathLike[str] | None = None) -> Path:
|
|
166
173
|
"""Download a file by it's file object."""
|
|
167
174
|
file_info = await self._request_file_info(node.id)
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
node._crypto,
|
|
171
|
-
output_folder=output_dir,
|
|
172
|
-
)
|
|
175
|
+
output_path = Path(output_dir or ".") / node.attributes.name
|
|
176
|
+
return await self._download_file(file_info, node._crypto, output_path)
|
|
173
177
|
|
|
174
178
|
async def download_public_file(
|
|
175
179
|
self,
|
|
@@ -180,18 +184,18 @@ class MegaNzClient(MegaCore):
|
|
|
180
184
|
full_key = b64_to_a32(public_key)
|
|
181
185
|
crypto = Crypto.decompose(full_key)
|
|
182
186
|
file_info = await self._request_file_info(public_handle, is_public=True)
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
)
|
|
187
|
+
attrs = decrypt_attr(b64_url_decode(file_info._at), crypto.key)
|
|
188
|
+
output_name = Attributes.parse(attrs).name
|
|
189
|
+
output_path = Path(output_dir or ".") / output_name
|
|
190
|
+
return await self._download_file(file_info, crypto, output_path)
|
|
188
191
|
|
|
189
192
|
async def download_public_folder(
|
|
190
193
|
self,
|
|
191
194
|
public_handle: NodeID,
|
|
192
195
|
public_key: str,
|
|
193
196
|
output_dir: str | PathLike[str] | None = None,
|
|
194
|
-
|
|
197
|
+
root_id: NodeID | None = None,
|
|
198
|
+
) -> DownloadResults:
|
|
195
199
|
"""Recursively download all files from a public folder, preserving its internal directory structure.
|
|
196
200
|
|
|
197
201
|
Returns:
|
|
@@ -201,30 +205,27 @@ class MegaNzClient(MegaCore):
|
|
|
201
205
|
fs = await self.get_public_filesystem(public_handle, public_key)
|
|
202
206
|
|
|
203
207
|
base_path = Path(output_dir or ".")
|
|
204
|
-
folder_url = f"{
|
|
208
|
+
folder_url = f"{_DOMAIN}/folder/{public_handle}#{public_key}"
|
|
205
209
|
|
|
206
|
-
async def
|
|
210
|
+
async def download(file: Node) -> tuple[NodeID, Path | Exception]:
|
|
207
211
|
web_url = folder_url + f"/file/{file.id}"
|
|
208
|
-
|
|
212
|
+
output_path = base_path / fs.relative_path(file.id)
|
|
209
213
|
try:
|
|
210
214
|
file_info = await self._request_file_info(file.id, public_handle)
|
|
211
|
-
|
|
212
|
-
except Exception as exc:
|
|
213
|
-
logger.error(f'Unable to download {web_url} to "{output_folder}" ({exc})')
|
|
214
|
-
raise
|
|
215
|
+
result = await self._download_file(file_info, file._crypto, output_path)
|
|
215
216
|
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
217
|
+
except Exception as exc:
|
|
218
|
+
if isinstance(exc, aiohttp.ClientResponseError):
|
|
219
|
+
msg = f"[{exc.status}] {exc.message}"
|
|
220
|
+
else:
|
|
221
|
+
msg = f"({type(exc).__name__})"
|
|
222
|
+
logger.error(f'Unable to download {web_url} to "{output_path}" {msg}')
|
|
223
|
+
result = exc
|
|
220
224
|
|
|
221
|
-
|
|
222
|
-
success: list[Path] = []
|
|
223
|
-
fails: list[Exception] = [
|
|
224
|
-
result for result in results if isinstance(result, Exception) or (success.append(result) and False)
|
|
225
|
-
]
|
|
225
|
+
return file.id, result
|
|
226
226
|
|
|
227
|
-
|
|
227
|
+
results = await async_map(download, fs.files_from(root_id))
|
|
228
|
+
return DownloadResults.split(dict(results))
|
|
228
229
|
|
|
229
230
|
async def upload(self, file_path: str | PathLike[str], dest_node_id: NodeID | None = None) -> Node:
|
|
230
231
|
if not dest_node_id:
|
|
@@ -5,7 +5,7 @@ import logging
|
|
|
5
5
|
import random
|
|
6
6
|
import re
|
|
7
7
|
from pathlib import Path
|
|
8
|
-
from typing import TYPE_CHECKING, Any,
|
|
8
|
+
from typing import TYPE_CHECKING, Any, NamedTuple
|
|
9
9
|
|
|
10
10
|
import yarl
|
|
11
11
|
from Crypto.Cipher import AES
|
|
@@ -15,13 +15,11 @@ from mega.api import AbstractApiClient
|
|
|
15
15
|
from mega.crypto import (
|
|
16
16
|
a32_to_base64,
|
|
17
17
|
a32_to_bytes,
|
|
18
|
-
b64_url_decode,
|
|
19
18
|
b64_url_encode,
|
|
20
|
-
decrypt_attr,
|
|
21
19
|
encrypt_attr,
|
|
22
20
|
encrypt_key,
|
|
23
21
|
)
|
|
24
|
-
from mega.data_structures import
|
|
22
|
+
from mega.data_structures import Crypto, FileInfo, FileInfoSerialized, Node, NodeID
|
|
25
23
|
from mega.errors import MegaNzError, RequestError, ValidationError
|
|
26
24
|
from mega.filesystem import UserFileSystem
|
|
27
25
|
from mega.utils import Site, random_u32int_array, transform_v1_url
|
|
@@ -40,15 +38,18 @@ logger = logging.getLogger(__name__)
|
|
|
40
38
|
|
|
41
39
|
class ParsedPublicURL(NamedTuple):
|
|
42
40
|
is_folder: bool
|
|
43
|
-
public_handle:
|
|
41
|
+
public_handle: NodeID
|
|
44
42
|
public_key: str
|
|
45
|
-
|
|
46
|
-
|
|
43
|
+
selected_folder: NodeID | None = None
|
|
44
|
+
selected_file: NodeID | None = None
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def selected_node(self) -> NodeID | None:
|
|
48
|
+
return self.selected_folder or self.selected_file
|
|
47
49
|
|
|
48
50
|
|
|
49
51
|
class MegaCore(AbstractApiClient):
|
|
50
52
|
__slots__ = ("_filesystem", "_lock", "_vault")
|
|
51
|
-
_primary_url: ClassVar[str] = "https://mega.nz"
|
|
52
53
|
|
|
53
54
|
def __init__(self, session: aiohttp.ClientSession | None = None) -> None:
|
|
54
55
|
super().__init__(session)
|
|
@@ -89,18 +90,18 @@ class MegaCore(AbstractApiClient):
|
|
|
89
90
|
logger.info("Login complete!")
|
|
90
91
|
|
|
91
92
|
@classmethod
|
|
92
|
-
def parse_file_url(cls, url: str | yarl.URL) -> tuple[
|
|
93
|
+
def parse_file_url(cls, url: str | yarl.URL) -> tuple[NodeID, str]:
|
|
93
94
|
result = cls.parse_url(url)
|
|
94
95
|
if result.is_folder:
|
|
95
96
|
raise ValueError("This is a folder URL: {url}")
|
|
96
97
|
return result.public_handle, result.public_key
|
|
97
98
|
|
|
98
99
|
@classmethod
|
|
99
|
-
def parse_folder_url(cls, url: str | yarl.URL) -> tuple[str,
|
|
100
|
+
def parse_folder_url(cls, url: str | yarl.URL) -> tuple[NodeID, str, NodeID | None]:
|
|
100
101
|
result = cls.parse_url(url)
|
|
101
102
|
if not result.is_folder:
|
|
102
103
|
raise ValueError("This is a file URL: {url}")
|
|
103
|
-
return result.public_handle, result.public_key
|
|
104
|
+
return result.public_handle, result.public_key, result.selected_node
|
|
104
105
|
|
|
105
106
|
@staticmethod
|
|
106
107
|
def parse_url(url: str | yarl.URL) -> ParsedPublicURL:
|
|
@@ -200,8 +201,7 @@ class MegaCore(AbstractApiClient):
|
|
|
200
201
|
self,
|
|
201
202
|
file_info: FileInfo,
|
|
202
203
|
crypto: Crypto,
|
|
203
|
-
output_folder: str | PathLike[str]
|
|
204
|
-
output_name: str | None = None,
|
|
204
|
+
output_folder: str | PathLike[str],
|
|
205
205
|
) -> Path:
|
|
206
206
|
# Seems to happens sometime... When this occurs, files are
|
|
207
207
|
# inaccessible also in the official web app.
|
|
@@ -209,12 +209,7 @@ class MegaCore(AbstractApiClient):
|
|
|
209
209
|
if not file_info.url:
|
|
210
210
|
raise RequestError("File not accessible anymore")
|
|
211
211
|
|
|
212
|
-
|
|
213
|
-
attrs = decrypt_attr(b64_url_decode(file_info._at), crypto.key)
|
|
214
|
-
output_name = Attributes.parse(attrs).name
|
|
215
|
-
|
|
216
|
-
output_path = Path(output_folder or Path()) / output_name
|
|
217
|
-
|
|
212
|
+
output_path = Path(output_folder)
|
|
218
213
|
async with self._api.get(file_info.url) as response:
|
|
219
214
|
with progress.new_task(output_path.name, file_info.size, "DOWN"):
|
|
220
215
|
return await download.encrypted_stream(
|
|
@@ -222,6 +222,14 @@ class Node(_DictDumper):
|
|
|
222
222
|
_a: str
|
|
223
223
|
_crypto: Crypto
|
|
224
224
|
|
|
225
|
+
@property
|
|
226
|
+
def is_file(self) -> bool:
|
|
227
|
+
return self.type is NodeType.FILE
|
|
228
|
+
|
|
229
|
+
@property
|
|
230
|
+
def is_folder(self) -> bool:
|
|
231
|
+
return self.type is NodeType.FOLDER
|
|
232
|
+
|
|
225
233
|
@classmethod
|
|
226
234
|
def parse(cls, node: NodeSerialized) -> Node:
|
|
227
235
|
owner = node.get("u", "")
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import contextlib
|
|
5
|
+
import errno
|
|
6
|
+
import logging
|
|
7
|
+
import shutil
|
|
8
|
+
import tempfile
|
|
9
|
+
import weakref
|
|
10
|
+
from collections.abc import Iterator, Mapping
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from types import MappingProxyType
|
|
13
|
+
from typing import IO, TYPE_CHECKING, Final, Generic, Self, TypeVar
|
|
14
|
+
|
|
15
|
+
from mega import progress
|
|
16
|
+
from mega.chunker import MegaChunker
|
|
17
|
+
from mega.crypto import get_chunks
|
|
18
|
+
from mega.data_structures import NodeID
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from collections.abc import AsyncGenerator
|
|
22
|
+
|
|
23
|
+
import aiohttp
|
|
24
|
+
|
|
25
|
+
_T = TypeVar("_T")
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class WeakAsyncLocks(Generic[_T]):
|
|
30
|
+
"""A WeakValueDictionary wrapper for asyncio.Locks.
|
|
31
|
+
|
|
32
|
+
Unused locks are automatically garbage collected. When trying to retrieve a
|
|
33
|
+
lock that does not exists, a new lock will be created.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
__slots__ = ("__locks",)
|
|
37
|
+
|
|
38
|
+
def __init__(self) -> None:
|
|
39
|
+
self.__locks: Final = weakref.WeakValueDictionary[_T, asyncio.Lock]()
|
|
40
|
+
|
|
41
|
+
def __getitem__(self, key: _T, /) -> asyncio.Lock:
|
|
42
|
+
lock = self.__locks.get(key)
|
|
43
|
+
if lock is None:
|
|
44
|
+
self.__locks[key] = lock = asyncio.Lock()
|
|
45
|
+
return lock
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
_LOCKS: WeakAsyncLocks[Path] = WeakAsyncLocks()
|
|
49
|
+
_CHUNK_SIZE = 1024 * 1024 * 5 # 5MB
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
async def encrypted_stream(
|
|
53
|
+
stream: aiohttp.StreamReader,
|
|
54
|
+
output_path: Path,
|
|
55
|
+
file_size: int,
|
|
56
|
+
iv: tuple[int, int],
|
|
57
|
+
meta_mac: tuple[int, int],
|
|
58
|
+
key: tuple[int, int, int, int],
|
|
59
|
+
) -> Path:
|
|
60
|
+
async with _LOCKS[output_path]:
|
|
61
|
+
if await asyncio.to_thread(output_path.exists):
|
|
62
|
+
raise FileExistsError(errno.EEXIST, output_path)
|
|
63
|
+
|
|
64
|
+
chunker = MegaChunker(iv, key, meta_mac)
|
|
65
|
+
progress_hook = progress.current_hook.get()
|
|
66
|
+
async with _new_temp_download(output_path) as output:
|
|
67
|
+
for _, chunk_size in get_chunks(file_size):
|
|
68
|
+
encrypted_chunk = await stream.readexactly(chunk_size)
|
|
69
|
+
chunk = chunker.read(encrypted_chunk)
|
|
70
|
+
output.write(chunk)
|
|
71
|
+
progress_hook(len(chunk))
|
|
72
|
+
|
|
73
|
+
chunker.check_integrity()
|
|
74
|
+
|
|
75
|
+
return output_path
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
async def stream(stream: aiohttp.StreamReader, output_path: Path) -> Path:
|
|
79
|
+
async with _LOCKS[output_path]:
|
|
80
|
+
if await asyncio.to_thread(output_path.exists):
|
|
81
|
+
raise FileExistsError(errno.EEXIST, output_path)
|
|
82
|
+
|
|
83
|
+
progress_hook = progress.current_hook.get()
|
|
84
|
+
async with _new_temp_download(output_path) as output:
|
|
85
|
+
async for chunk in stream.iter_chunked(_CHUNK_SIZE):
|
|
86
|
+
output.write(chunk)
|
|
87
|
+
progress_hook(len(chunk))
|
|
88
|
+
|
|
89
|
+
return output_path
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@contextlib.asynccontextmanager
|
|
93
|
+
async def _new_temp_download(output_path: Path) -> AsyncGenerator[IO[bytes]]:
|
|
94
|
+
# We need NamedTemporaryFile to not delete on file.close() but on context exit, which is not supported until python 3.12
|
|
95
|
+
temp_file = tempfile.NamedTemporaryFile(prefix="mega_py_", delete=False)
|
|
96
|
+
logger.debug(f'Created temp file "{temp_file.name!s}" for "{output_path!s}"')
|
|
97
|
+
try:
|
|
98
|
+
yield temp_file
|
|
99
|
+
|
|
100
|
+
def move():
|
|
101
|
+
temp_file.close()
|
|
102
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
103
|
+
shutil.move(temp_file.name, output_path)
|
|
104
|
+
logger.debug(f'Moved temp file "{temp_file.name!s}" to "{output_path!s}"')
|
|
105
|
+
|
|
106
|
+
await asyncio.to_thread(move)
|
|
107
|
+
|
|
108
|
+
finally:
|
|
109
|
+
|
|
110
|
+
def delete():
|
|
111
|
+
if not temp_file.closed:
|
|
112
|
+
temp_file.close()
|
|
113
|
+
Path(temp_file.name).unlink(missing_ok=True)
|
|
114
|
+
|
|
115
|
+
await asyncio.to_thread(delete)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class DownloadResults(Mapping[NodeID, Path | Exception]):
|
|
119
|
+
success: MappingProxyType[NodeID, Path]
|
|
120
|
+
fails: MappingProxyType[NodeID, Exception]
|
|
121
|
+
|
|
122
|
+
def __init__(self, success: Mapping[NodeID, Path], fails: Mapping[NodeID, Exception]) -> None:
|
|
123
|
+
if not success.keys().isdisjoint(fails.keys()):
|
|
124
|
+
raise ValueError("A NodeID cannot be in both success and fails")
|
|
125
|
+
self.success = MappingProxyType(success)
|
|
126
|
+
self.fails = MappingProxyType(fails)
|
|
127
|
+
|
|
128
|
+
def __getitem__(self, value: NodeID) -> Path | Exception:
|
|
129
|
+
try:
|
|
130
|
+
return self.success[value]
|
|
131
|
+
except KeyError:
|
|
132
|
+
return self.fails[value]
|
|
133
|
+
|
|
134
|
+
def __iter__(self) -> Iterator[NodeID]:
|
|
135
|
+
yield from self.success
|
|
136
|
+
yield from self.fails
|
|
137
|
+
|
|
138
|
+
def __len__(self) -> int:
|
|
139
|
+
return len(self.success) + len(self.fails)
|
|
140
|
+
|
|
141
|
+
@classmethod
|
|
142
|
+
def split(cls, results: Mapping[NodeID, Path | Exception]) -> Self:
|
|
143
|
+
success: dict[NodeID, Path] = {}
|
|
144
|
+
fails: dict[NodeID, Exception] = {}
|
|
145
|
+
|
|
146
|
+
for node_id, result in results.items():
|
|
147
|
+
if isinstance(result, Exception):
|
|
148
|
+
fails[node_id] = result
|
|
149
|
+
else:
|
|
150
|
+
success[node_id] = result
|
|
151
|
+
|
|
152
|
+
return cls(success, fails)
|
|
@@ -7,7 +7,6 @@ from __future__ import annotations
|
|
|
7
7
|
|
|
8
8
|
import dataclasses
|
|
9
9
|
import errno
|
|
10
|
-
from collections.abc import Generator
|
|
11
10
|
from pathlib import PurePosixPath
|
|
12
11
|
from types import MappingProxyType
|
|
13
12
|
from typing import TYPE_CHECKING, Any, NamedTuple, Self
|
|
@@ -34,7 +33,7 @@ def _resolve_paths(walker: _NodeWalker, *roots: Node) -> Generator[_NodeLookup]:
|
|
|
34
33
|
node_path = current_path / node.attributes.name
|
|
35
34
|
yield node.id, node_path
|
|
36
35
|
|
|
37
|
-
if
|
|
36
|
+
if not node.is_file:
|
|
38
37
|
yield from walk(node.id, node_path)
|
|
39
38
|
|
|
40
39
|
for root in roots:
|
|
@@ -136,7 +135,7 @@ class SimpleFileSystem(_NodeWalker, _DictDumper):
|
|
|
136
135
|
case NodeType.TRASH:
|
|
137
136
|
trash_bin = node
|
|
138
137
|
case _:
|
|
139
|
-
raise RuntimeError
|
|
138
|
+
raise RuntimeError # pyright: ignore[reportUnreachable]
|
|
140
139
|
|
|
141
140
|
return cls(
|
|
142
141
|
root=root,
|
|
@@ -234,14 +233,14 @@ class FileSystem(SimpleFileSystem):
|
|
|
234
233
|
def files(self) -> Iterable[Node]:
|
|
235
234
|
"""All files that are NOT deleted (recursive)"""
|
|
236
235
|
for node in self:
|
|
237
|
-
if node.
|
|
236
|
+
if node.is_file and node.id not in self._deleted:
|
|
238
237
|
yield node
|
|
239
238
|
|
|
240
239
|
@property
|
|
241
240
|
def folders(self) -> Iterable[Node]:
|
|
242
241
|
"""All folders that are NOT deleted (recursive)"""
|
|
243
242
|
for node in self:
|
|
244
|
-
if node.
|
|
243
|
+
if node.is_folder and node.id not in self._deleted:
|
|
245
244
|
yield node
|
|
246
245
|
|
|
247
246
|
def dirmap(self, node_id: str, *, recursive: bool = False) -> dict[NodeID, PurePosixPath]:
|
|
@@ -305,6 +304,27 @@ class FileSystem(SimpleFileSystem):
|
|
|
305
304
|
assert nodes
|
|
306
305
|
return self[nodes[0]]
|
|
307
306
|
|
|
307
|
+
def files_from(self, node_id: NodeID | None) -> Iterable[Node]:
|
|
308
|
+
"""
|
|
309
|
+
Yield every file that is reachable from `node_id`.
|
|
310
|
+
|
|
311
|
+
- If `node_id` is `None`: yield all non deleted files on the file system.
|
|
312
|
+
- If `node_id` points to a file: yield only that file.
|
|
313
|
+
- Any other case: yield all files within that node (recursively).
|
|
314
|
+
|
|
315
|
+
"""
|
|
316
|
+
if not node_id:
|
|
317
|
+
yield from self.files
|
|
318
|
+
return
|
|
319
|
+
|
|
320
|
+
root = self[node_id]
|
|
321
|
+
if root.is_file:
|
|
322
|
+
yield root
|
|
323
|
+
else:
|
|
324
|
+
for child in self.iterdir(root.id, recursive=True):
|
|
325
|
+
if child.is_file:
|
|
326
|
+
yield child
|
|
327
|
+
|
|
308
328
|
def dump(self, *, simple: bool = False) -> dict[str, Any]:
|
|
309
329
|
"""Get a JSONable dict representation of this object"""
|
|
310
330
|
dump = super(FileSystem, self).dump()
|
|
@@ -3,14 +3,16 @@ from __future__ import annotations
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import contextlib
|
|
5
5
|
from contextvars import ContextVar
|
|
6
|
-
from typing import TYPE_CHECKING, Any, Literal, Protocol, TypeAlias
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Literal, Protocol, TypeAlias, TypeVar
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
9
|
from collections.abc import Callable, Generator
|
|
10
10
|
from types import TracebackType
|
|
11
11
|
|
|
12
|
-
from rich.progress import Progress
|
|
12
|
+
from rich.progress import Progress, Task
|
|
13
|
+
from rich.text import Text
|
|
13
14
|
|
|
15
|
+
_T = TypeVar("_T")
|
|
14
16
|
ProgressHook: TypeAlias = Callable[[float], None]
|
|
15
17
|
|
|
16
18
|
class ProgressHookContext(Protocol):
|
|
@@ -33,12 +35,13 @@ current_hook: ContextVar[ProgressHook] = ContextVar("current_hook", default=lamb
|
|
|
33
35
|
|
|
34
36
|
|
|
35
37
|
@contextlib.contextmanager
|
|
36
|
-
def
|
|
37
|
-
|
|
38
|
+
def _enter_context(context_var: ContextVar[_T], value: _T) -> Generator[None]:
|
|
39
|
+
"""Context manager for context vars"""
|
|
40
|
+
token = context_var.set(value)
|
|
38
41
|
try:
|
|
39
42
|
yield
|
|
40
43
|
finally:
|
|
41
|
-
|
|
44
|
+
context_var.reset(token)
|
|
42
45
|
|
|
43
46
|
|
|
44
47
|
@contextlib.contextmanager
|
|
@@ -48,12 +51,8 @@ def new_task(description: str, total: float, kind: Literal["UP", "DOWN"]) -> Gen
|
|
|
48
51
|
yield
|
|
49
52
|
return
|
|
50
53
|
|
|
51
|
-
with factory(
|
|
52
|
-
|
|
53
|
-
try:
|
|
54
|
-
yield
|
|
55
|
-
finally:
|
|
56
|
-
current_hook.reset(token)
|
|
54
|
+
with factory(description, total, kind) as new_hook, _enter_context(current_hook, new_hook):
|
|
55
|
+
yield
|
|
57
56
|
|
|
58
57
|
|
|
59
58
|
@contextlib.contextmanager
|
|
@@ -66,45 +65,64 @@ def new_progress() -> Generator[None]:
|
|
|
66
65
|
def hook_factory(*args, **kwargs):
|
|
67
66
|
return _new_rich_task(progress, *args, **kwargs)
|
|
68
67
|
|
|
69
|
-
with
|
|
68
|
+
with (
|
|
69
|
+
progress,
|
|
70
|
+
_enter_context(_PROGRESS_HOOK_FACTORY, hook_factory),
|
|
71
|
+
):
|
|
70
72
|
yield
|
|
71
73
|
|
|
72
74
|
|
|
73
|
-
def _truncate_desc(desc: str, length: int = 30, placeholder: str = "...") -> str:
|
|
74
|
-
if len(desc) < length:
|
|
75
|
-
return desc
|
|
76
|
-
|
|
77
|
-
return f"{desc[: length - len(placeholder)]}{placeholder}"
|
|
78
|
-
|
|
79
|
-
|
|
80
75
|
def _new_rich_progress() -> Progress | None:
|
|
81
76
|
try:
|
|
77
|
+
from rich import get_console
|
|
82
78
|
from rich.progress import (
|
|
83
79
|
BarColumn,
|
|
84
80
|
DownloadColumn,
|
|
85
81
|
Progress,
|
|
86
82
|
SpinnerColumn,
|
|
83
|
+
TextColumn,
|
|
87
84
|
TimeRemainingColumn,
|
|
88
85
|
TransferSpeedColumn,
|
|
89
86
|
)
|
|
87
|
+
from rich.table import Column
|
|
90
88
|
except ImportError:
|
|
91
89
|
return None
|
|
92
90
|
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
)
|
|
91
|
+
console = get_console()
|
|
92
|
+
|
|
93
|
+
class AutoTruncatedTextColumn(TextColumn):
|
|
94
|
+
def render(self, task: Task) -> Text:
|
|
95
|
+
text = super().render(task)
|
|
96
|
+
width = console.width
|
|
97
|
+
available_witdh = min((width * 60 // 100), (width - 65))
|
|
98
|
+
desc_limit = max(available_witdh, 8)
|
|
99
|
+
text.truncate(desc_limit, overflow="ellipsis")
|
|
100
|
+
return text
|
|
101
|
+
|
|
102
|
+
return Progress(
|
|
103
|
+
"[{task.fields[kind]}]",
|
|
104
|
+
SpinnerColumn(),
|
|
105
|
+
AutoTruncatedTextColumn("{task.description}"),
|
|
106
|
+
BarColumn(
|
|
107
|
+
bar_width=None,
|
|
108
|
+
),
|
|
109
|
+
"[progress.percentage]{task.percentage:>6.1f}%",
|
|
110
|
+
"•",
|
|
111
|
+
DownloadColumn(
|
|
112
|
+
table_column=Column(justify="right", no_wrap=True),
|
|
113
|
+
),
|
|
114
|
+
"•",
|
|
115
|
+
TransferSpeedColumn(table_column=Column(justify="right", no_wrap=True)),
|
|
116
|
+
"•",
|
|
117
|
+
TimeRemainingColumn(
|
|
118
|
+
compact=True,
|
|
119
|
+
elapsed_when_finished=True,
|
|
120
|
+
table_column=Column(justify="right", no_wrap=True),
|
|
121
|
+
),
|
|
122
|
+
transient=True,
|
|
123
|
+
console=console,
|
|
124
|
+
expand=True,
|
|
125
|
+
)
|
|
108
126
|
|
|
109
127
|
|
|
110
128
|
@contextlib.contextmanager
|
|
@@ -3,24 +3,24 @@ from __future__ import annotations
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import dataclasses
|
|
5
5
|
import logging
|
|
6
|
-
from pathlib import Path
|
|
6
|
+
from pathlib import Path
|
|
7
7
|
from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias
|
|
8
8
|
|
|
9
|
+
import aiohttp
|
|
9
10
|
import yarl
|
|
10
11
|
|
|
11
12
|
from mega import download, progress
|
|
12
13
|
from mega.api import AbstractApiClient, MegaAPI
|
|
13
14
|
from mega.crypto import b64_to_a32, b64_url_decode, decrypt_attr
|
|
14
|
-
from mega.data_structures import Attributes, Crypto, Node, NodeType
|
|
15
|
+
from mega.data_structures import Attributes, Crypto, Node, NodeID, NodeType
|
|
16
|
+
from mega.download import DownloadResults
|
|
15
17
|
from mega.filesystem import FileSystem
|
|
16
|
-
from mega.utils import Site,
|
|
18
|
+
from mega.utils import Site, async_map
|
|
17
19
|
|
|
18
20
|
if TYPE_CHECKING:
|
|
19
21
|
from collections.abc import Iterable
|
|
20
22
|
from os import PathLike
|
|
21
23
|
|
|
22
|
-
import aiohttp
|
|
23
|
-
|
|
24
24
|
from mega.data_structures import GetNodesResponse, NodeSerialized
|
|
25
25
|
|
|
26
26
|
|
|
@@ -94,7 +94,8 @@ class TransferItClient(AbstractApiClient):
|
|
|
94
94
|
self,
|
|
95
95
|
transfer_id: TransferID,
|
|
96
96
|
output_dir: str | PathLike[str] | None = None,
|
|
97
|
-
|
|
97
|
+
root_id: NodeID | None = None,
|
|
98
|
+
) -> DownloadResults:
|
|
98
99
|
"""Recursively download all files from a transfer, preserving its internal directory structure.
|
|
99
100
|
|
|
100
101
|
Returns:
|
|
@@ -103,42 +104,31 @@ class TransferItClient(AbstractApiClient):
|
|
|
103
104
|
"""
|
|
104
105
|
fs = await self.get_filesystem(transfer_id)
|
|
105
106
|
|
|
106
|
-
base_path = Path(output_dir or ".")
|
|
107
|
+
base_path = Path(output_dir or ".") / f"transfer.it ({transfer_id})"
|
|
107
108
|
folder_url = f"https://transfer.it/t/{transfer_id}"
|
|
108
109
|
|
|
109
|
-
async def
|
|
110
|
+
async def download(file: Node) -> tuple[NodeID, Path | Exception]:
|
|
110
111
|
web_url = folder_url + f"#{file.id}"
|
|
111
|
-
|
|
112
|
+
output_path = base_path / fs.relative_path(file.id)
|
|
112
113
|
dl_link = self.create_download_url(transfer_id, file)
|
|
113
114
|
try:
|
|
114
|
-
|
|
115
|
+
result = await self._download_file(dl_link, output_path)
|
|
115
116
|
except Exception as exc:
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
yield (worker(file, path))
|
|
117
|
+
if isinstance(exc, aiohttp.ClientResponseError):
|
|
118
|
+
msg = f"[{exc.status}] {exc.message}"
|
|
119
|
+
else:
|
|
120
|
+
msg = f"({type(exc).__name__})"
|
|
121
|
+
logger.error(f'Unable to download {web_url} to "{output_path}" {msg}')
|
|
122
|
+
result = exc
|
|
123
123
|
|
|
124
|
-
|
|
125
|
-
success: list[Path] = []
|
|
126
|
-
fails: list[Exception] = [
|
|
127
|
-
result for result in results if isinstance(result, Exception) or (success.append(result) and False)
|
|
128
|
-
]
|
|
124
|
+
return file.id, result
|
|
129
125
|
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
async def _download_file(
|
|
133
|
-
self,
|
|
134
|
-
dl_link: str,
|
|
135
|
-
output_folder: str | PathLike[str] | None = None,
|
|
136
|
-
output_name: str | None = None,
|
|
137
|
-
) -> Path:
|
|
138
|
-
name = output_name or yarl.URL(dl_link).query["fn"]
|
|
139
|
-
output_path = Path(output_folder or Path()) / name
|
|
126
|
+
results = await async_map(download, fs.files_from(root_id))
|
|
127
|
+
return DownloadResults.split(dict(results))
|
|
140
128
|
|
|
129
|
+
async def _download_file(self, dl_link: str, output_path: str | PathLike[str]) -> Path:
|
|
130
|
+
output_path = Path(output_path)
|
|
141
131
|
async with self._api.get(dl_link, headers={"Referer": "https://transfer.it/"}) as response:
|
|
142
132
|
size = int(response.headers["Content-Length"])
|
|
143
|
-
with progress.new_task(name, size, "DOWN"):
|
|
133
|
+
with progress.new_task(output_path.name, size, "DOWN"):
|
|
144
134
|
return await download.stream(response.content, output_path)
|
|
@@ -11,9 +11,10 @@ from typing import TYPE_CHECKING, Literal, TypeVar, overload
|
|
|
11
11
|
import yarl
|
|
12
12
|
|
|
13
13
|
if TYPE_CHECKING:
|
|
14
|
-
from collections.abc import Awaitable, Iterable, Sequence
|
|
14
|
+
from collections.abc import Awaitable, Callable, Iterable, Sequence
|
|
15
15
|
|
|
16
|
-
|
|
16
|
+
_T1 = TypeVar("_T1")
|
|
17
|
+
_T2 = TypeVar("_T2")
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
logger = logging.getLogger(__name__)
|
|
@@ -76,60 +77,59 @@ def transform_v1_url(url: yarl.URL) -> yarl.URL:
|
|
|
76
77
|
|
|
77
78
|
|
|
78
79
|
@overload
|
|
79
|
-
async def
|
|
80
|
-
|
|
81
|
-
|
|
80
|
+
async def async_map(
|
|
81
|
+
coro_factory: Callable[[_T1], Awaitable[_T2]],
|
|
82
|
+
values: Iterable[_T1],
|
|
82
83
|
*,
|
|
83
|
-
return_exceptions: Literal[
|
|
84
|
-
|
|
84
|
+
return_exceptions: Literal[True],
|
|
85
|
+
task_limit: int = 10,
|
|
86
|
+
) -> list[_T2 | Exception]: ...
|
|
85
87
|
|
|
86
88
|
|
|
87
89
|
@overload
|
|
88
|
-
async def
|
|
89
|
-
|
|
90
|
-
|
|
90
|
+
async def async_map(
|
|
91
|
+
coro_factory: Callable[[_T1], Awaitable[_T2]],
|
|
92
|
+
values: Iterable[_T1],
|
|
91
93
|
*,
|
|
92
|
-
return_exceptions:
|
|
93
|
-
|
|
94
|
+
return_exceptions: Literal[False] = False,
|
|
95
|
+
task_limit: int = 10,
|
|
96
|
+
) -> list[_T2]: ...
|
|
94
97
|
|
|
95
98
|
|
|
96
|
-
async def
|
|
97
|
-
|
|
98
|
-
|
|
99
|
+
async def async_map(
|
|
100
|
+
coro_factory: Callable[[_T1], Awaitable[_T2]],
|
|
101
|
+
values: Iterable[_T1],
|
|
99
102
|
*,
|
|
100
|
-
return_exceptions: bool =
|
|
101
|
-
|
|
102
|
-
|
|
103
|
+
return_exceptions: bool = False,
|
|
104
|
+
task_limit: int = 10,
|
|
105
|
+
) -> Sequence[_T2 | Exception]:
|
|
106
|
+
"""Creates tasks lazily to minimize event loop overhead.
|
|
103
107
|
|
|
104
|
-
This function ensures there are never more than `
|
|
105
|
-
|
|
106
|
-
If `return_exceptions` is `False`, any exceptions other than `asyncio.CancelledError` raised within
|
|
107
|
-
a task will cancel all remaining tasks and wait for them to exit.
|
|
108
|
-
The exceptions are then combined and raised as an `ExceptionGroup`.
|
|
108
|
+
This function ensures there are never more than `task_limit` tasks are created at any given time.
|
|
109
109
|
"""
|
|
110
|
-
semaphore = asyncio.BoundedSemaphore(
|
|
111
|
-
tasks: list[asyncio.Task[
|
|
112
|
-
|
|
113
|
-
abort = False
|
|
110
|
+
semaphore = asyncio.BoundedSemaphore(task_limit)
|
|
111
|
+
tasks: list[asyncio.Task[_T2 | Exception]] = []
|
|
112
|
+
abort = asyncio.Event()
|
|
114
113
|
|
|
115
|
-
async def worker(coro: Awaitable[
|
|
114
|
+
async def worker(coro: Awaitable[_T2]) -> _T2 | Exception:
|
|
116
115
|
try:
|
|
117
116
|
return await coro
|
|
118
117
|
except Exception as e:
|
|
119
118
|
if return_exceptions:
|
|
120
119
|
return e
|
|
121
|
-
|
|
122
|
-
abort = True
|
|
120
|
+
abort.set()
|
|
123
121
|
raise
|
|
124
122
|
|
|
125
123
|
finally:
|
|
126
124
|
semaphore.release()
|
|
127
125
|
|
|
128
126
|
async with asyncio.TaskGroup() as tg:
|
|
129
|
-
for
|
|
130
|
-
if abort:
|
|
127
|
+
for value in values:
|
|
128
|
+
if abort.is_set():
|
|
131
129
|
break
|
|
130
|
+
|
|
132
131
|
await semaphore.acquire()
|
|
132
|
+
coro = coro_factory(value)
|
|
133
133
|
tasks.append(tg.create_task(worker(coro)))
|
|
134
134
|
|
|
135
135
|
return [t.result() for t in tasks]
|
|
@@ -1,87 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import asyncio
|
|
4
|
-
import contextlib
|
|
5
|
-
import errno
|
|
6
|
-
import logging
|
|
7
|
-
import shutil
|
|
8
|
-
import tempfile
|
|
9
|
-
from pathlib import Path
|
|
10
|
-
from typing import IO, TYPE_CHECKING
|
|
11
|
-
|
|
12
|
-
from mega import progress
|
|
13
|
-
from mega.chunker import MegaChunker
|
|
14
|
-
from mega.crypto import get_chunks
|
|
15
|
-
|
|
16
|
-
if TYPE_CHECKING:
|
|
17
|
-
from collections.abc import AsyncGenerator
|
|
18
|
-
|
|
19
|
-
import aiohttp
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
logger = logging.getLogger(__name__)
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
async def encrypted_stream(
|
|
26
|
-
stream: aiohttp.StreamReader,
|
|
27
|
-
output_path: Path,
|
|
28
|
-
file_size: int,
|
|
29
|
-
iv: tuple[int, int],
|
|
30
|
-
meta_mac: tuple[int, int],
|
|
31
|
-
key: tuple[int, int, int, int],
|
|
32
|
-
) -> Path:
|
|
33
|
-
if await asyncio.to_thread(output_path.exists):
|
|
34
|
-
raise FileExistsError(errno.EEXIST, output_path)
|
|
35
|
-
|
|
36
|
-
chunker = MegaChunker(iv, key, meta_mac)
|
|
37
|
-
progress_hook = progress.current_hook.get()
|
|
38
|
-
async with _new_temp_download(output_path) as output:
|
|
39
|
-
for _, chunk_size in get_chunks(file_size):
|
|
40
|
-
encrypted_chunk = await stream.readexactly(chunk_size)
|
|
41
|
-
chunk = chunker.read(encrypted_chunk)
|
|
42
|
-
output.write(chunk)
|
|
43
|
-
progress_hook(len(chunk))
|
|
44
|
-
|
|
45
|
-
chunker.check_integrity()
|
|
46
|
-
|
|
47
|
-
return output_path
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
async def stream(stream: aiohttp.StreamReader, output_path: Path) -> Path:
|
|
51
|
-
if await asyncio.to_thread(output_path.exists):
|
|
52
|
-
raise FileExistsError(errno.EEXIST, output_path)
|
|
53
|
-
|
|
54
|
-
chunk_size = 1024 * 1024 * 5 # 5MB
|
|
55
|
-
progress_hook = progress.current_hook.get()
|
|
56
|
-
async with _new_temp_download(output_path) as output:
|
|
57
|
-
async for chunk in stream.iter_chunked(chunk_size):
|
|
58
|
-
output.write(chunk)
|
|
59
|
-
progress_hook(len(chunk))
|
|
60
|
-
|
|
61
|
-
return output_path
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
@contextlib.asynccontextmanager
|
|
65
|
-
async def _new_temp_download(output_path: Path) -> AsyncGenerator[IO[bytes]]:
|
|
66
|
-
# We need NamedTemporaryFile to not delete on file.close() but on context exit, which is not supported until python 3.12
|
|
67
|
-
temp_file = tempfile.NamedTemporaryFile(prefix="mega_py_", delete=False)
|
|
68
|
-
logger.info(f'Created temp file "{temp_file.name!s}" for "{output_path!s}"')
|
|
69
|
-
try:
|
|
70
|
-
yield temp_file
|
|
71
|
-
|
|
72
|
-
def move():
|
|
73
|
-
temp_file.close()
|
|
74
|
-
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
75
|
-
shutil.move(temp_file.name, output_path)
|
|
76
|
-
logger.info(f'Moved temp file "{temp_file.name!s}" to "{output_path!s}"')
|
|
77
|
-
|
|
78
|
-
await asyncio.to_thread(move)
|
|
79
|
-
|
|
80
|
-
finally:
|
|
81
|
-
|
|
82
|
-
def delete():
|
|
83
|
-
if not temp_file.closed:
|
|
84
|
-
temp_file.close()
|
|
85
|
-
Path(temp_file.name).unlink(missing_ok=True)
|
|
86
|
-
|
|
87
|
-
await asyncio.to_thread(delete)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|