rclone-api 1.3.17__tar.gz → 1.3.19__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rclone_api-1.3.17 → rclone_api-1.3.19}/PKG-INFO +1 -1
- {rclone_api-1.3.17 → rclone_api-1.3.19}/pyproject.toml +1 -1
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/db/db.py +277 -277
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/db/models.py +57 -57
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/file.py +2 -4
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/mount_read_chunker.py +2 -1
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/s3/chunk_task.py +3 -1
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/s3/upload_file_multipart.py +10 -6
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/types.py +23 -1
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api.egg-info/PKG-INFO +1 -1
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_file_item.py +24 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.aiderignore +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.github/workflows/lint.yml +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.github/workflows/push_macos.yml +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.github/workflows/push_ubuntu.yml +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.github/workflows/push_win.yml +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.gitignore +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.pylintrc +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.vscode/launch.json +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.vscode/settings.json +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/.vscode/tasks.json +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/LICENSE +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/MANIFEST.in +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/README.md +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/clean +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/install +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/lint +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/requirements.testing.txt +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/setup.cfg +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/setup.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/__init__.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/assets/example.txt +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/cli.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/cmd/analyze.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/cmd/copy_large_s3.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/cmd/list_files.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/cmd/save_to_db.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/completed_process.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/config.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/convert.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/db/__init__.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/deprecated.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/diff.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/dir.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/dir_listing.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/exec.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/experimental/flags.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/experimental/flags_base.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/file_item.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/filelist.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/group_files.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/log.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/mount.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/process.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/profile/mount_copy_bytes.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/rclone.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/remote.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/rpath.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/s3/api.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/s3/basic_ops.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/s3/chunk_types.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/s3/create.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/s3/types.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/scan_missing_folders.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/util.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api/walk.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api.egg-info/SOURCES.txt +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api.egg-info/dependency_links.txt +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api.egg-info/entry_points.txt +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api.egg-info/requires.txt +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/src/rclone_api.egg-info/top_level.txt +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/test +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/archive/test_paramiko.py.disabled +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_cmd_list_files.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_copy.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_copy_bytes.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_copy_file_resumable_s3.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_copy_files.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_db.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_diff.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_group_files.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_is_synced.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_ls.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_ls_stream_files.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_mount.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_mount_s3.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_obscure.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_rclone_config.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_remote_control.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_remotes.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_s3.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_scan_missing_folders.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_size_files.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_size_suffix.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tests/test_walk.py +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/tox.ini +0 -0
- {rclone_api-1.3.17 → rclone_api-1.3.19}/upload_package.sh +0 -0
|
@@ -1,277 +1,277 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Database module for rclone_api.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
import os
|
|
6
|
-
from threading import Lock
|
|
7
|
-
from typing import Optional
|
|
8
|
-
|
|
9
|
-
from sqlmodel import Session, SQLModel, create_engine, select
|
|
10
|
-
|
|
11
|
-
from rclone_api.db.models import RepositoryMeta, create_file_entry_model
|
|
12
|
-
from rclone_api.file import FileItem
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def _to_table_name(remote_name: str) -> str:
|
|
16
|
-
return (
|
|
17
|
-
"files_"
|
|
18
|
-
+ remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
|
|
19
|
-
)
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class DB:
|
|
23
|
-
"""Database class for rclone_api."""
|
|
24
|
-
|
|
25
|
-
def __init__(self, db_path_url: str):
|
|
26
|
-
"""Initialize the database.
|
|
27
|
-
|
|
28
|
-
Args:
|
|
29
|
-
db_path: Path to the database file
|
|
30
|
-
"""
|
|
31
|
-
self.db_path_url = db_path_url
|
|
32
|
-
|
|
33
|
-
# When running multiple commands in parallel, the database connection may fail once
|
|
34
|
-
# when the database is first populated.
|
|
35
|
-
retries = 2
|
|
36
|
-
for _ in range(retries):
|
|
37
|
-
try:
|
|
38
|
-
self.engine = create_engine(db_path_url)
|
|
39
|
-
SQLModel.metadata.create_all(self.engine)
|
|
40
|
-
break
|
|
41
|
-
except Exception as e:
|
|
42
|
-
print(f"Failed to connect to database. Retrying... {e}")
|
|
43
|
-
else:
|
|
44
|
-
raise Exception("Failed to connect to database.")
|
|
45
|
-
self._cache: dict[str, DBRepo] = {}
|
|
46
|
-
self._cache_lock = Lock()
|
|
47
|
-
|
|
48
|
-
def drop_all(self) -> None:
|
|
49
|
-
"""Drop all tables in the database."""
|
|
50
|
-
SQLModel.metadata.drop_all(self.engine)
|
|
51
|
-
|
|
52
|
-
def close(self) -> None:
|
|
53
|
-
"""Close the database connection and release resources."""
|
|
54
|
-
if hasattr(self, "engine") and self.engine is not None:
|
|
55
|
-
self.engine.dispose()
|
|
56
|
-
|
|
57
|
-
def add_files(self, files: list[FileItem]) -> None:
|
|
58
|
-
"""Add files to the database.
|
|
59
|
-
|
|
60
|
-
Args:
|
|
61
|
-
remote_name: Name of the remote
|
|
62
|
-
files: List of file entries
|
|
63
|
-
"""
|
|
64
|
-
|
|
65
|
-
partition: dict[str, list[FileItem]] = {}
|
|
66
|
-
for file in files:
|
|
67
|
-
partition.setdefault(file.remote, []).append(file)
|
|
68
|
-
|
|
69
|
-
for remote_name, files in partition.items():
|
|
70
|
-
repo = self.get_or_create_repo(remote_name)
|
|
71
|
-
repo.insert_files(files)
|
|
72
|
-
|
|
73
|
-
def query_files(self, remote_name: str) -> list[FileItem]:
|
|
74
|
-
"""Query files from the database.
|
|
75
|
-
|
|
76
|
-
Args:
|
|
77
|
-
remote_name: Name of the remote
|
|
78
|
-
"""
|
|
79
|
-
repo = self.get_or_create_repo(remote_name)
|
|
80
|
-
files = repo.get_files()
|
|
81
|
-
out: list[FileItem] = []
|
|
82
|
-
for file in files:
|
|
83
|
-
out.append(file)
|
|
84
|
-
return out
|
|
85
|
-
|
|
86
|
-
def get_or_create_repo(self, remote_name: str) -> "DBRepo":
|
|
87
|
-
"""Get a table section for a remote.
|
|
88
|
-
|
|
89
|
-
Args:
|
|
90
|
-
remote_name: Name of the remote
|
|
91
|
-
table_name: Optional table name, will be derived from remote_name if not provided
|
|
92
|
-
|
|
93
|
-
Returns:
|
|
94
|
-
DBRepo: A table section for the remote
|
|
95
|
-
"""
|
|
96
|
-
with self._cache_lock:
|
|
97
|
-
if remote_name in self._cache:
|
|
98
|
-
return self._cache[remote_name]
|
|
99
|
-
table_name = _to_table_name(remote_name)
|
|
100
|
-
out = DBRepo(self.engine, remote_name, table_name)
|
|
101
|
-
self._cache[remote_name] = out
|
|
102
|
-
return out
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
class DBRepo:
|
|
106
|
-
"""Table repo remote."""
|
|
107
|
-
|
|
108
|
-
def __init__(self, engine, remote_name: str, table_name: Optional[str] = None):
|
|
109
|
-
"""Initialize a table section.
|
|
110
|
-
|
|
111
|
-
Args:
|
|
112
|
-
engine: SQLAlchemy engine
|
|
113
|
-
remote_name: Name of the remote
|
|
114
|
-
table_name: Optional table name, will be derived from remote_name if not provided
|
|
115
|
-
"""
|
|
116
|
-
self.engine = engine
|
|
117
|
-
self.remote_name = remote_name
|
|
118
|
-
|
|
119
|
-
# If table_name is not provided, derive one from the remote name.
|
|
120
|
-
if table_name is None:
|
|
121
|
-
# table_name = (
|
|
122
|
-
# "file_entries_"
|
|
123
|
-
# + remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
|
|
124
|
-
# )
|
|
125
|
-
table_name = _to_table_name(remote_name)
|
|
126
|
-
self.table_name = table_name
|
|
127
|
-
|
|
128
|
-
# Check if repository exists in RepositoryMeta; if not, create a new entry.
|
|
129
|
-
with Session(self.engine) as session:
|
|
130
|
-
existing_repo = session.exec(
|
|
131
|
-
select(RepositoryMeta).where(
|
|
132
|
-
RepositoryMeta.repo_name == self.remote_name
|
|
133
|
-
)
|
|
134
|
-
).first()
|
|
135
|
-
if not existing_repo:
|
|
136
|
-
repo_meta = RepositoryMeta(
|
|
137
|
-
repo_name=self.remote_name, file_table_name=self.table_name
|
|
138
|
-
)
|
|
139
|
-
session.add(repo_meta)
|
|
140
|
-
session.commit()
|
|
141
|
-
|
|
142
|
-
# Dynamically create the file entry model and its table.
|
|
143
|
-
self.FileEntryModel = create_file_entry_model(self.table_name)
|
|
144
|
-
SQLModel.metadata.create_all(self.engine, tables=[self.FileEntryModel.__table__]) # type: ignore
|
|
145
|
-
|
|
146
|
-
def insert_file(self, file: FileItem) -> None:
|
|
147
|
-
"""Insert a file entry into the table.
|
|
148
|
-
|
|
149
|
-
Args:
|
|
150
|
-
file: File entry
|
|
151
|
-
"""
|
|
152
|
-
return self.insert_files([file])
|
|
153
|
-
|
|
154
|
-
def insert_files(self, files: list[FileItem]) -> None:
|
|
155
|
-
"""
|
|
156
|
-
Insert multiple file entries into the table.
|
|
157
|
-
|
|
158
|
-
Three bulk operations are performed:
|
|
159
|
-
1. Select: Determine which files already exist.
|
|
160
|
-
2. Insert: Bulk-insert new file entries.
|
|
161
|
-
3. Update: Bulk-update existing file entries.
|
|
162
|
-
|
|
163
|
-
The FileEntryModel must define a unique constraint on (path, name) and have a primary key "id".
|
|
164
|
-
"""
|
|
165
|
-
# Step 1: Bulk select existing records.
|
|
166
|
-
# get_exists() returns a set of FileItem objects (based on path_no_remote and name) that already exist.
|
|
167
|
-
existing_files = self.get_exists(files)
|
|
168
|
-
|
|
169
|
-
# Determine which files need to be updated vs. inserted.
|
|
170
|
-
needs_update = existing_files
|
|
171
|
-
is_new = set(files) - existing_files
|
|
172
|
-
|
|
173
|
-
# Step 2: Bulk insert new rows.
|
|
174
|
-
new_values = [
|
|
175
|
-
{
|
|
176
|
-
"path": file.path_no_remote,
|
|
177
|
-
"name": file.name,
|
|
178
|
-
"size": file.size,
|
|
179
|
-
"mime_type": file.mime_type,
|
|
180
|
-
"mod_time": file.mod_time,
|
|
181
|
-
"suffix": file.real_suffix,
|
|
182
|
-
}
|
|
183
|
-
for file in is_new
|
|
184
|
-
]
|
|
185
|
-
with Session(self.engine) as session:
|
|
186
|
-
if new_values:
|
|
187
|
-
session.bulk_insert_mappings(self.FileEntryModel, new_values) # type: ignore
|
|
188
|
-
session.commit()
|
|
189
|
-
|
|
190
|
-
# Step 3: Bulk update existing rows.
|
|
191
|
-
# First, query the database for the primary keys of rows that match the unique keys in needs_update.
|
|
192
|
-
with Session(self.engine) as session:
|
|
193
|
-
# Collect all unique paths from files needing update.
|
|
194
|
-
update_paths = [file.path_no_remote for file in needs_update]
|
|
195
|
-
# Query for existing rows matching any of these paths.
|
|
196
|
-
db_entries = session.exec(
|
|
197
|
-
select(self.FileEntryModel).where(
|
|
198
|
-
self.FileEntryModel.path.in_(update_paths) # type: ignore
|
|
199
|
-
)
|
|
200
|
-
).all()
|
|
201
|
-
|
|
202
|
-
# Build a mapping from the unique key (path, name) to the primary key (id).
|
|
203
|
-
id_map = {(entry.path, entry.name): entry.id for entry in db_entries}
|
|
204
|
-
|
|
205
|
-
# Prepare bulk update mappings.
|
|
206
|
-
update_values = []
|
|
207
|
-
for file in needs_update:
|
|
208
|
-
key = (file.path_no_remote, file.name)
|
|
209
|
-
if key in id_map:
|
|
210
|
-
update_values.append(
|
|
211
|
-
{
|
|
212
|
-
"id": id_map[key],
|
|
213
|
-
"size": file.size,
|
|
214
|
-
"mime_type": file.mime_type,
|
|
215
|
-
"mod_time": file.mod_time,
|
|
216
|
-
"suffix": file.real_suffix,
|
|
217
|
-
}
|
|
218
|
-
)
|
|
219
|
-
if update_values:
|
|
220
|
-
session.bulk_update_mappings(self.FileEntryModel, update_values) # type: ignore
|
|
221
|
-
session.commit()
|
|
222
|
-
|
|
223
|
-
def get_exists(self, files: list[FileItem]) -> set[FileItem]:
|
|
224
|
-
"""Get file entries from the table that exist among the given files.
|
|
225
|
-
|
|
226
|
-
Args:
|
|
227
|
-
files: List of file entries
|
|
228
|
-
|
|
229
|
-
Returns:
|
|
230
|
-
Set of FileItem instances whose 'path_no_remote' exists in the table.
|
|
231
|
-
"""
|
|
232
|
-
# Extract unique paths from the input files.
|
|
233
|
-
paths = {file.path_no_remote for file in files}
|
|
234
|
-
|
|
235
|
-
with Session(self.engine) as session:
|
|
236
|
-
# Execute a single query to fetch all file paths in the table that match the input paths.
|
|
237
|
-
result = session.exec(
|
|
238
|
-
select(self.FileEntryModel.path).where(
|
|
239
|
-
self.FileEntryModel.path.in_(paths) # type: ignore
|
|
240
|
-
)
|
|
241
|
-
).all()
|
|
242
|
-
# Convert the result to a set for fast membership tests.
|
|
243
|
-
existing_paths = set(result)
|
|
244
|
-
|
|
245
|
-
# Return the set of FileItem objects that have a path in the existing_paths.
|
|
246
|
-
return {file for file in files if file.path_no_remote in existing_paths}
|
|
247
|
-
|
|
248
|
-
def get_files(self) -> list[FileItem]:
|
|
249
|
-
"""Get all files in the table.
|
|
250
|
-
|
|
251
|
-
Returns:
|
|
252
|
-
list: List of file entries
|
|
253
|
-
"""
|
|
254
|
-
# with Session(self.engine) as session:
|
|
255
|
-
# return session.exec(select(self.FileEntryModel)).all()
|
|
256
|
-
out: list[FileItem] = []
|
|
257
|
-
with Session(self.engine) as session:
|
|
258
|
-
query = session.exec(select(self.FileEntryModel)).all()
|
|
259
|
-
for item in query:
|
|
260
|
-
name = item.name # type: ignore
|
|
261
|
-
size = item.size # type: ignore
|
|
262
|
-
mime_type = item.mime_type # type: ignore
|
|
263
|
-
mod_time = item.mod_time # type: ignore
|
|
264
|
-
path = item.path # type: ignore
|
|
265
|
-
parent = os.path.dirname(path)
|
|
266
|
-
if parent == "/" or parent == ".":
|
|
267
|
-
parent = ""
|
|
268
|
-
o = FileItem(
|
|
269
|
-
remote=self.remote_name,
|
|
270
|
-
parent=parent,
|
|
271
|
-
name=name,
|
|
272
|
-
size=size,
|
|
273
|
-
mime_type=mime_type,
|
|
274
|
-
mod_time=mod_time,
|
|
275
|
-
)
|
|
276
|
-
out.append(o)
|
|
277
|
-
return out
|
|
1
|
+
"""
|
|
2
|
+
Database module for rclone_api.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from threading import Lock
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from sqlmodel import Session, SQLModel, create_engine, select
|
|
10
|
+
|
|
11
|
+
from rclone_api.db.models import RepositoryMeta, create_file_entry_model
|
|
12
|
+
from rclone_api.file import FileItem
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _to_table_name(remote_name: str) -> str:
|
|
16
|
+
return (
|
|
17
|
+
"files_"
|
|
18
|
+
+ remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DB:
|
|
23
|
+
"""Database class for rclone_api."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, db_path_url: str):
|
|
26
|
+
"""Initialize the database.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
db_path: Path to the database file
|
|
30
|
+
"""
|
|
31
|
+
self.db_path_url = db_path_url
|
|
32
|
+
|
|
33
|
+
# When running multiple commands in parallel, the database connection may fail once
|
|
34
|
+
# when the database is first populated.
|
|
35
|
+
retries = 2
|
|
36
|
+
for _ in range(retries):
|
|
37
|
+
try:
|
|
38
|
+
self.engine = create_engine(db_path_url)
|
|
39
|
+
SQLModel.metadata.create_all(self.engine)
|
|
40
|
+
break
|
|
41
|
+
except Exception as e:
|
|
42
|
+
print(f"Failed to connect to database. Retrying... {e}")
|
|
43
|
+
else:
|
|
44
|
+
raise Exception("Failed to connect to database.")
|
|
45
|
+
self._cache: dict[str, DBRepo] = {}
|
|
46
|
+
self._cache_lock = Lock()
|
|
47
|
+
|
|
48
|
+
def drop_all(self) -> None:
|
|
49
|
+
"""Drop all tables in the database."""
|
|
50
|
+
SQLModel.metadata.drop_all(self.engine)
|
|
51
|
+
|
|
52
|
+
def close(self) -> None:
|
|
53
|
+
"""Close the database connection and release resources."""
|
|
54
|
+
if hasattr(self, "engine") and self.engine is not None:
|
|
55
|
+
self.engine.dispose()
|
|
56
|
+
|
|
57
|
+
def add_files(self, files: list[FileItem]) -> None:
|
|
58
|
+
"""Add files to the database.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
remote_name: Name of the remote
|
|
62
|
+
files: List of file entries
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
partition: dict[str, list[FileItem]] = {}
|
|
66
|
+
for file in files:
|
|
67
|
+
partition.setdefault(file.remote, []).append(file)
|
|
68
|
+
|
|
69
|
+
for remote_name, files in partition.items():
|
|
70
|
+
repo = self.get_or_create_repo(remote_name)
|
|
71
|
+
repo.insert_files(files)
|
|
72
|
+
|
|
73
|
+
def query_files(self, remote_name: str) -> list[FileItem]:
|
|
74
|
+
"""Query files from the database.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
remote_name: Name of the remote
|
|
78
|
+
"""
|
|
79
|
+
repo = self.get_or_create_repo(remote_name)
|
|
80
|
+
files = repo.get_files()
|
|
81
|
+
out: list[FileItem] = []
|
|
82
|
+
for file in files:
|
|
83
|
+
out.append(file)
|
|
84
|
+
return out
|
|
85
|
+
|
|
86
|
+
def get_or_create_repo(self, remote_name: str) -> "DBRepo":
|
|
87
|
+
"""Get a table section for a remote.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
remote_name: Name of the remote
|
|
91
|
+
table_name: Optional table name, will be derived from remote_name if not provided
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
DBRepo: A table section for the remote
|
|
95
|
+
"""
|
|
96
|
+
with self._cache_lock:
|
|
97
|
+
if remote_name in self._cache:
|
|
98
|
+
return self._cache[remote_name]
|
|
99
|
+
table_name = _to_table_name(remote_name)
|
|
100
|
+
out = DBRepo(self.engine, remote_name, table_name)
|
|
101
|
+
self._cache[remote_name] = out
|
|
102
|
+
return out
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class DBRepo:
|
|
106
|
+
"""Table repo remote."""
|
|
107
|
+
|
|
108
|
+
def __init__(self, engine, remote_name: str, table_name: Optional[str] = None):
|
|
109
|
+
"""Initialize a table section.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
engine: SQLAlchemy engine
|
|
113
|
+
remote_name: Name of the remote
|
|
114
|
+
table_name: Optional table name, will be derived from remote_name if not provided
|
|
115
|
+
"""
|
|
116
|
+
self.engine = engine
|
|
117
|
+
self.remote_name = remote_name
|
|
118
|
+
|
|
119
|
+
# If table_name is not provided, derive one from the remote name.
|
|
120
|
+
if table_name is None:
|
|
121
|
+
# table_name = (
|
|
122
|
+
# "file_entries_"
|
|
123
|
+
# + remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
|
|
124
|
+
# )
|
|
125
|
+
table_name = _to_table_name(remote_name)
|
|
126
|
+
self.table_name = table_name
|
|
127
|
+
|
|
128
|
+
# Check if repository exists in RepositoryMeta; if not, create a new entry.
|
|
129
|
+
with Session(self.engine) as session:
|
|
130
|
+
existing_repo = session.exec(
|
|
131
|
+
select(RepositoryMeta).where(
|
|
132
|
+
RepositoryMeta.repo_name == self.remote_name
|
|
133
|
+
)
|
|
134
|
+
).first()
|
|
135
|
+
if not existing_repo:
|
|
136
|
+
repo_meta = RepositoryMeta(
|
|
137
|
+
repo_name=self.remote_name, file_table_name=self.table_name
|
|
138
|
+
)
|
|
139
|
+
session.add(repo_meta)
|
|
140
|
+
session.commit()
|
|
141
|
+
|
|
142
|
+
# Dynamically create the file entry model and its table.
|
|
143
|
+
self.FileEntryModel = create_file_entry_model(self.table_name)
|
|
144
|
+
SQLModel.metadata.create_all(self.engine, tables=[self.FileEntryModel.__table__]) # type: ignore
|
|
145
|
+
|
|
146
|
+
def insert_file(self, file: FileItem) -> None:
|
|
147
|
+
"""Insert a file entry into the table.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
file: File entry
|
|
151
|
+
"""
|
|
152
|
+
return self.insert_files([file])
|
|
153
|
+
|
|
154
|
+
def insert_files(self, files: list[FileItem]) -> None:
|
|
155
|
+
"""
|
|
156
|
+
Insert multiple file entries into the table.
|
|
157
|
+
|
|
158
|
+
Three bulk operations are performed:
|
|
159
|
+
1. Select: Determine which files already exist.
|
|
160
|
+
2. Insert: Bulk-insert new file entries.
|
|
161
|
+
3. Update: Bulk-update existing file entries.
|
|
162
|
+
|
|
163
|
+
The FileEntryModel must define a unique constraint on (path, name) and have a primary key "id".
|
|
164
|
+
"""
|
|
165
|
+
# Step 1: Bulk select existing records.
|
|
166
|
+
# get_exists() returns a set of FileItem objects (based on path_no_remote and name) that already exist.
|
|
167
|
+
existing_files = self.get_exists(files)
|
|
168
|
+
|
|
169
|
+
# Determine which files need to be updated vs. inserted.
|
|
170
|
+
needs_update = existing_files
|
|
171
|
+
is_new = set(files) - existing_files
|
|
172
|
+
|
|
173
|
+
# Step 2: Bulk insert new rows.
|
|
174
|
+
new_values = [
|
|
175
|
+
{
|
|
176
|
+
"path": file.path_no_remote,
|
|
177
|
+
"name": file.name,
|
|
178
|
+
"size": file.size,
|
|
179
|
+
"mime_type": file.mime_type,
|
|
180
|
+
"mod_time": file.mod_time,
|
|
181
|
+
"suffix": file.real_suffix,
|
|
182
|
+
}
|
|
183
|
+
for file in is_new
|
|
184
|
+
]
|
|
185
|
+
with Session(self.engine) as session:
|
|
186
|
+
if new_values:
|
|
187
|
+
session.bulk_insert_mappings(self.FileEntryModel, new_values) # type: ignore
|
|
188
|
+
session.commit()
|
|
189
|
+
|
|
190
|
+
# Step 3: Bulk update existing rows.
|
|
191
|
+
# First, query the database for the primary keys of rows that match the unique keys in needs_update.
|
|
192
|
+
with Session(self.engine) as session:
|
|
193
|
+
# Collect all unique paths from files needing update.
|
|
194
|
+
update_paths = [file.path_no_remote for file in needs_update]
|
|
195
|
+
# Query for existing rows matching any of these paths.
|
|
196
|
+
db_entries = session.exec(
|
|
197
|
+
select(self.FileEntryModel).where(
|
|
198
|
+
self.FileEntryModel.path.in_(update_paths) # type: ignore
|
|
199
|
+
)
|
|
200
|
+
).all()
|
|
201
|
+
|
|
202
|
+
# Build a mapping from the unique key (path, name) to the primary key (id).
|
|
203
|
+
id_map = {(entry.path, entry.name): entry.id for entry in db_entries}
|
|
204
|
+
|
|
205
|
+
# Prepare bulk update mappings.
|
|
206
|
+
update_values = []
|
|
207
|
+
for file in needs_update:
|
|
208
|
+
key = (file.path_no_remote, file.name)
|
|
209
|
+
if key in id_map:
|
|
210
|
+
update_values.append(
|
|
211
|
+
{
|
|
212
|
+
"id": id_map[key],
|
|
213
|
+
"size": file.size,
|
|
214
|
+
"mime_type": file.mime_type,
|
|
215
|
+
"mod_time": file.mod_time,
|
|
216
|
+
"suffix": file.real_suffix,
|
|
217
|
+
}
|
|
218
|
+
)
|
|
219
|
+
if update_values:
|
|
220
|
+
session.bulk_update_mappings(self.FileEntryModel, update_values) # type: ignore
|
|
221
|
+
session.commit()
|
|
222
|
+
|
|
223
|
+
def get_exists(self, files: list[FileItem]) -> set[FileItem]:
|
|
224
|
+
"""Get file entries from the table that exist among the given files.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
files: List of file entries
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
Set of FileItem instances whose 'path_no_remote' exists in the table.
|
|
231
|
+
"""
|
|
232
|
+
# Extract unique paths from the input files.
|
|
233
|
+
paths = {file.path_no_remote for file in files}
|
|
234
|
+
|
|
235
|
+
with Session(self.engine) as session:
|
|
236
|
+
# Execute a single query to fetch all file paths in the table that match the input paths.
|
|
237
|
+
result = session.exec(
|
|
238
|
+
select(self.FileEntryModel.path).where(
|
|
239
|
+
self.FileEntryModel.path.in_(paths) # type: ignore
|
|
240
|
+
)
|
|
241
|
+
).all()
|
|
242
|
+
# Convert the result to a set for fast membership tests.
|
|
243
|
+
existing_paths = set(result)
|
|
244
|
+
|
|
245
|
+
# Return the set of FileItem objects that have a path in the existing_paths.
|
|
246
|
+
return {file for file in files if file.path_no_remote in existing_paths}
|
|
247
|
+
|
|
248
|
+
def get_files(self) -> list[FileItem]:
|
|
249
|
+
"""Get all files in the table.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
list: List of file entries
|
|
253
|
+
"""
|
|
254
|
+
# with Session(self.engine) as session:
|
|
255
|
+
# return session.exec(select(self.FileEntryModel)).all()
|
|
256
|
+
out: list[FileItem] = []
|
|
257
|
+
with Session(self.engine) as session:
|
|
258
|
+
query = session.exec(select(self.FileEntryModel)).all()
|
|
259
|
+
for item in query:
|
|
260
|
+
name = item.name # type: ignore
|
|
261
|
+
size = item.size # type: ignore
|
|
262
|
+
mime_type = item.mime_type # type: ignore
|
|
263
|
+
mod_time = item.mod_time # type: ignore
|
|
264
|
+
path = item.path # type: ignore
|
|
265
|
+
parent = os.path.dirname(path)
|
|
266
|
+
if parent == "/" or parent == ".":
|
|
267
|
+
parent = ""
|
|
268
|
+
o = FileItem(
|
|
269
|
+
remote=self.remote_name,
|
|
270
|
+
parent=parent,
|
|
271
|
+
name=name,
|
|
272
|
+
size=size,
|
|
273
|
+
mime_type=mime_type,
|
|
274
|
+
mod_time=mod_time,
|
|
275
|
+
)
|
|
276
|
+
out.append(o)
|
|
277
|
+
return out
|
|
@@ -1,57 +1,57 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Database models for rclone_api.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
from abc import ABC, abstractmethod
|
|
6
|
-
from typing import Optional, Type
|
|
7
|
-
|
|
8
|
-
from sqlalchemy import BigInteger, Column
|
|
9
|
-
from sqlmodel import Field, SQLModel
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
# Meta table that indexes all repositories
|
|
13
|
-
class RepositoryMeta(SQLModel, table=True):
|
|
14
|
-
"""Repository metadata table."""
|
|
15
|
-
|
|
16
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
|
17
|
-
repo_name: str
|
|
18
|
-
file_table_name: str # The dedicated table name for file entries
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
# Base FileEntry model that will be extended
|
|
22
|
-
class FileEntry(SQLModel, ABC):
|
|
23
|
-
"""Base file entry model with common fields."""
|
|
24
|
-
|
|
25
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
|
26
|
-
path: str = Field(index=True, unique=True)
|
|
27
|
-
suffix: str = Field(index=True)
|
|
28
|
-
name: str
|
|
29
|
-
size: int = Field(sa_column=Column(BigInteger))
|
|
30
|
-
mime_type: str
|
|
31
|
-
mod_time: str
|
|
32
|
-
hash: Optional[str] = Field(default=None)
|
|
33
|
-
|
|
34
|
-
@abstractmethod
|
|
35
|
-
def table_name(self) -> str:
|
|
36
|
-
"""Return the table name for this file entry model."""
|
|
37
|
-
pass
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
# Factory to dynamically create a FileEntry model with a given table name
|
|
41
|
-
def create_file_entry_model(_table_name: str) -> Type[FileEntry]:
|
|
42
|
-
"""Create a file entry model with a given table name.
|
|
43
|
-
|
|
44
|
-
Args:
|
|
45
|
-
table_name: Table name
|
|
46
|
-
|
|
47
|
-
Returns:
|
|
48
|
-
Type[FileEntryBase]: File entry model class with specified table name
|
|
49
|
-
"""
|
|
50
|
-
|
|
51
|
-
class FileEntryConcrete(FileEntry, table=True):
|
|
52
|
-
__tablename__ = _table_name # type: ignore # dynamically set table name
|
|
53
|
-
|
|
54
|
-
def table_name(self) -> str:
|
|
55
|
-
return _table_name
|
|
56
|
-
|
|
57
|
-
return FileEntryConcrete
|
|
1
|
+
"""
|
|
2
|
+
Database models for rclone_api.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from typing import Optional, Type
|
|
7
|
+
|
|
8
|
+
from sqlalchemy import BigInteger, Column
|
|
9
|
+
from sqlmodel import Field, SQLModel
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Meta table that indexes all repositories
|
|
13
|
+
class RepositoryMeta(SQLModel, table=True):
|
|
14
|
+
"""Repository metadata table."""
|
|
15
|
+
|
|
16
|
+
id: Optional[int] = Field(default=None, primary_key=True)
|
|
17
|
+
repo_name: str
|
|
18
|
+
file_table_name: str # The dedicated table name for file entries
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# Base FileEntry model that will be extended
|
|
22
|
+
class FileEntry(SQLModel, ABC):
|
|
23
|
+
"""Base file entry model with common fields."""
|
|
24
|
+
|
|
25
|
+
id: Optional[int] = Field(default=None, primary_key=True)
|
|
26
|
+
path: str = Field(index=True, unique=True)
|
|
27
|
+
suffix: str = Field(index=True)
|
|
28
|
+
name: str
|
|
29
|
+
size: int = Field(sa_column=Column(BigInteger))
|
|
30
|
+
mime_type: str
|
|
31
|
+
mod_time: str
|
|
32
|
+
hash: Optional[str] = Field(default=None)
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
def table_name(self) -> str:
|
|
36
|
+
"""Return the table name for this file entry model."""
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# Factory to dynamically create a FileEntry model with a given table name
|
|
41
|
+
def create_file_entry_model(_table_name: str) -> Type[FileEntry]:
|
|
42
|
+
"""Create a file entry model with a given table name.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
table_name: Table name
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Type[FileEntryBase]: File entry model class with specified table name
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
class FileEntryConcrete(FileEntry, table=True):
|
|
52
|
+
__tablename__ = _table_name # type: ignore # dynamically set table name
|
|
53
|
+
|
|
54
|
+
def table_name(self) -> str:
|
|
55
|
+
return _table_name
|
|
56
|
+
|
|
57
|
+
return FileEntryConcrete
|
|
@@ -50,10 +50,7 @@ def _get_suffix(name: str, chop_compressed_suffixes: bool = True) -> str:
|
|
|
50
50
|
if chop_compressed_suffixes:
|
|
51
51
|
if last_part == "gz" and len(parts) > 2:
|
|
52
52
|
parts = parts[:-1]
|
|
53
|
-
|
|
54
|
-
# grab the last two parts
|
|
55
|
-
return ".".join(parts[-2:])
|
|
56
|
-
return ".".join(parts[1:])
|
|
53
|
+
return ".".join(parts[-1:])
|
|
57
54
|
except IndexError:
|
|
58
55
|
warnings.warn(f"Invalid name: {name} for normal suffix extraction")
|
|
59
56
|
suffix = Path(name).suffix
|
|
@@ -74,6 +71,7 @@ class FileItem:
|
|
|
74
71
|
mime_type: str
|
|
75
72
|
mod_time: str
|
|
76
73
|
hash: str | None = None
|
|
74
|
+
id: int | None = None
|
|
77
75
|
|
|
78
76
|
@property
|
|
79
77
|
def path(self) -> str:
|
|
@@ -115,8 +115,9 @@ class MultiMountFileChunker:
|
|
|
115
115
|
self._release_mount(mount)
|
|
116
116
|
|
|
117
117
|
if isinstance(bytes_or_err, Exception):
|
|
118
|
+
err: Exception = bytes_or_err
|
|
118
119
|
logger.warning(f"Fetch task returned exception: {bytes_or_err}")
|
|
119
|
-
return FilePart(payload=
|
|
120
|
+
return FilePart(payload=err, extra=extra)
|
|
120
121
|
logger.debug(f"Successfully fetched {size} bytes from offset {offset}")
|
|
121
122
|
out = FilePart(payload=bytes_or_err, extra=extra)
|
|
122
123
|
return out
|
|
@@ -115,7 +115,9 @@ class _OnCompleteHandler:
|
|
|
115
115
|
|
|
116
116
|
# done_part_numbers.add(part_number)
|
|
117
117
|
# queue_upload.put(fp)
|
|
118
|
-
self.part_number_tracker.add_finished_part_number(
|
|
118
|
+
self.part_number_tracker.add_finished_part_number(
|
|
119
|
+
part_number
|
|
120
|
+
) # in memory database, not persistant to resume.json
|
|
119
121
|
self.queue_upload.put(fp)
|
|
120
122
|
|
|
121
123
|
|
|
@@ -61,25 +61,29 @@ def handle_upload(
|
|
|
61
61
|
upload_info: UploadInfo, fp: FilePart | EndOfStream
|
|
62
62
|
) -> FinishedPiece | Exception | EndOfStream:
|
|
63
63
|
if isinstance(fp, EndOfStream):
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
part_number = extra.part_number
|
|
68
|
-
print(f"Handling upload for {part_number}, size {fp.size}")
|
|
64
|
+
eos: EndOfStream = fp
|
|
65
|
+
return eos
|
|
66
|
+
part_number: int | None = None
|
|
69
67
|
try:
|
|
68
|
+
assert isinstance(fp.extra, S3FileInfo)
|
|
69
|
+
extra: S3FileInfo = fp.extra
|
|
70
|
+
part_number = extra.part_number
|
|
71
|
+
print(f"Handling upload for {part_number}, size {fp.size}")
|
|
72
|
+
|
|
70
73
|
part: FinishedPiece = upload_task(
|
|
71
74
|
info=upload_info,
|
|
72
75
|
chunk=fp.load(),
|
|
73
76
|
part_number=part_number,
|
|
74
77
|
retries=upload_info.retries,
|
|
75
78
|
)
|
|
76
|
-
fp.close()
|
|
77
79
|
return part
|
|
78
80
|
except Exception as e:
|
|
79
81
|
stacktrace = traceback.format_exc()
|
|
80
82
|
msg = f"Error uploading part {part_number}: {e}\n{stacktrace}"
|
|
81
83
|
warnings.warn(msg)
|
|
82
84
|
return e
|
|
85
|
+
finally:
|
|
86
|
+
fp.close()
|
|
83
87
|
|
|
84
88
|
|
|
85
89
|
def prepare_upload_file_multipart(
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import atexit
|
|
1
2
|
import os
|
|
2
3
|
import re
|
|
3
4
|
import time
|
|
@@ -267,6 +268,26 @@ class EndOfStream:
|
|
|
267
268
|
pass
|
|
268
269
|
|
|
269
270
|
|
|
271
|
+
_CLEANUP_LIST: list[Path] = []
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def _add_for_cleanup(path: Path) -> None:
|
|
275
|
+
_CLEANUP_LIST.append(path)
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def _on_exit_cleanup() -> None:
|
|
279
|
+
paths = list(_CLEANUP_LIST)
|
|
280
|
+
for path in paths:
|
|
281
|
+
try:
|
|
282
|
+
if path.exists():
|
|
283
|
+
path.unlink()
|
|
284
|
+
except Exception as e:
|
|
285
|
+
warnings.warn(f"Cannot cleanup {path}: {e}")
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
atexit.register(_on_exit_cleanup)
|
|
289
|
+
|
|
290
|
+
|
|
270
291
|
class FilePart:
|
|
271
292
|
def __init__(self, payload: bytes | Exception, extra: Any) -> None:
|
|
272
293
|
from rclone_api.util import random_str
|
|
@@ -280,8 +301,9 @@ class FilePart:
|
|
|
280
301
|
self.payload = get_chunk_tmpdir() / f"{random_str(12)}.chunk"
|
|
281
302
|
with _TMP_DIR_ACCESS_LOCK:
|
|
282
303
|
if not self.payload.parent.exists():
|
|
283
|
-
self.payload.parent.mkdir(parents=True)
|
|
304
|
+
self.payload.parent.mkdir(parents=True, exist_ok=True)
|
|
284
305
|
self.payload.write_bytes(payload)
|
|
306
|
+
_add_for_cleanup(self.payload)
|
|
285
307
|
|
|
286
308
|
@property
|
|
287
309
|
def size(self) -> int:
|
|
@@ -32,6 +32,30 @@ class RcloneTestFileItem(unittest.TestCase):
|
|
|
32
32
|
)
|
|
33
33
|
self.assertEqual(file_item.real_suffix, "sql")
|
|
34
34
|
|
|
35
|
+
def test_weird_suffix(self) -> None:
|
|
36
|
+
name = r"%28sici%291096-911x%28199809%2931%3A3%3C170%3A%3Aaid-mpo8%3E3.0.co%3B2-8.pdf"
|
|
37
|
+
file_item: FileItem = FileItem(
|
|
38
|
+
remote="remote",
|
|
39
|
+
parent="parent",
|
|
40
|
+
name=name,
|
|
41
|
+
size=1,
|
|
42
|
+
mime_type="mime_type",
|
|
43
|
+
mod_time="mod_time",
|
|
44
|
+
)
|
|
45
|
+
self.assertEqual(file_item.real_suffix, "pdf")
|
|
46
|
+
|
|
47
|
+
def test_weird_suffix2(self) -> None:
|
|
48
|
+
name = "acb86a1f632adb2be7cac60d76c3c85b.cbz"
|
|
49
|
+
file_item = FileItem(
|
|
50
|
+
remote="remote",
|
|
51
|
+
parent="parent",
|
|
52
|
+
name=name,
|
|
53
|
+
size=1,
|
|
54
|
+
mime_type="mime_type",
|
|
55
|
+
mod_time="mod_time",
|
|
56
|
+
)
|
|
57
|
+
self.assertEqual(file_item.real_suffix, "cbz")
|
|
58
|
+
|
|
35
59
|
|
|
36
60
|
if __name__ == "__main__":
|
|
37
61
|
unittest.main()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|