rclone-api 1.3.18__py2.py3-none-any.whl → 1.3.19__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
rclone_api/db/db.py CHANGED
@@ -1,277 +1,277 @@
1
- """
2
- Database module for rclone_api.
3
- """
4
-
5
- import os
6
- from threading import Lock
7
- from typing import Optional
8
-
9
- from sqlmodel import Session, SQLModel, create_engine, select
10
-
11
- from rclone_api.db.models import RepositoryMeta, create_file_entry_model
12
- from rclone_api.file import FileItem
13
-
14
-
15
- def _to_table_name(remote_name: str) -> str:
16
- return (
17
- "files_"
18
- + remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
19
- )
20
-
21
-
22
- class DB:
23
- """Database class for rclone_api."""
24
-
25
- def __init__(self, db_path_url: str):
26
- """Initialize the database.
27
-
28
- Args:
29
- db_path: Path to the database file
30
- """
31
- self.db_path_url = db_path_url
32
-
33
- # When running multiple commands in parallel, the database connection may fail once
34
- # when the database is first populated.
35
- retries = 2
36
- for _ in range(retries):
37
- try:
38
- self.engine = create_engine(db_path_url)
39
- SQLModel.metadata.create_all(self.engine)
40
- break
41
- except Exception as e:
42
- print(f"Failed to connect to database. Retrying... {e}")
43
- else:
44
- raise Exception("Failed to connect to database.")
45
- self._cache: dict[str, DBRepo] = {}
46
- self._cache_lock = Lock()
47
-
48
- def drop_all(self) -> None:
49
- """Drop all tables in the database."""
50
- SQLModel.metadata.drop_all(self.engine)
51
-
52
- def close(self) -> None:
53
- """Close the database connection and release resources."""
54
- if hasattr(self, "engine") and self.engine is not None:
55
- self.engine.dispose()
56
-
57
- def add_files(self, files: list[FileItem]) -> None:
58
- """Add files to the database.
59
-
60
- Args:
61
- remote_name: Name of the remote
62
- files: List of file entries
63
- """
64
-
65
- partition: dict[str, list[FileItem]] = {}
66
- for file in files:
67
- partition.setdefault(file.remote, []).append(file)
68
-
69
- for remote_name, files in partition.items():
70
- repo = self.get_or_create_repo(remote_name)
71
- repo.insert_files(files)
72
-
73
- def query_files(self, remote_name: str) -> list[FileItem]:
74
- """Query files from the database.
75
-
76
- Args:
77
- remote_name: Name of the remote
78
- """
79
- repo = self.get_or_create_repo(remote_name)
80
- files = repo.get_files()
81
- out: list[FileItem] = []
82
- for file in files:
83
- out.append(file)
84
- return out
85
-
86
- def get_or_create_repo(self, remote_name: str) -> "DBRepo":
87
- """Get a table section for a remote.
88
-
89
- Args:
90
- remote_name: Name of the remote
91
- table_name: Optional table name, will be derived from remote_name if not provided
92
-
93
- Returns:
94
- DBRepo: A table section for the remote
95
- """
96
- with self._cache_lock:
97
- if remote_name in self._cache:
98
- return self._cache[remote_name]
99
- table_name = _to_table_name(remote_name)
100
- out = DBRepo(self.engine, remote_name, table_name)
101
- self._cache[remote_name] = out
102
- return out
103
-
104
-
105
- class DBRepo:
106
- """Table repo remote."""
107
-
108
- def __init__(self, engine, remote_name: str, table_name: Optional[str] = None):
109
- """Initialize a table section.
110
-
111
- Args:
112
- engine: SQLAlchemy engine
113
- remote_name: Name of the remote
114
- table_name: Optional table name, will be derived from remote_name if not provided
115
- """
116
- self.engine = engine
117
- self.remote_name = remote_name
118
-
119
- # If table_name is not provided, derive one from the remote name.
120
- if table_name is None:
121
- # table_name = (
122
- # "file_entries_"
123
- # + remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
124
- # )
125
- table_name = _to_table_name(remote_name)
126
- self.table_name = table_name
127
-
128
- # Check if repository exists in RepositoryMeta; if not, create a new entry.
129
- with Session(self.engine) as session:
130
- existing_repo = session.exec(
131
- select(RepositoryMeta).where(
132
- RepositoryMeta.repo_name == self.remote_name
133
- )
134
- ).first()
135
- if not existing_repo:
136
- repo_meta = RepositoryMeta(
137
- repo_name=self.remote_name, file_table_name=self.table_name
138
- )
139
- session.add(repo_meta)
140
- session.commit()
141
-
142
- # Dynamically create the file entry model and its table.
143
- self.FileEntryModel = create_file_entry_model(self.table_name)
144
- SQLModel.metadata.create_all(self.engine, tables=[self.FileEntryModel.__table__]) # type: ignore
145
-
146
- def insert_file(self, file: FileItem) -> None:
147
- """Insert a file entry into the table.
148
-
149
- Args:
150
- file: File entry
151
- """
152
- return self.insert_files([file])
153
-
154
- def insert_files(self, files: list[FileItem]) -> None:
155
- """
156
- Insert multiple file entries into the table.
157
-
158
- Three bulk operations are performed:
159
- 1. Select: Determine which files already exist.
160
- 2. Insert: Bulk-insert new file entries.
161
- 3. Update: Bulk-update existing file entries.
162
-
163
- The FileEntryModel must define a unique constraint on (path, name) and have a primary key "id".
164
- """
165
- # Step 1: Bulk select existing records.
166
- # get_exists() returns a set of FileItem objects (based on path_no_remote and name) that already exist.
167
- existing_files = self.get_exists(files)
168
-
169
- # Determine which files need to be updated vs. inserted.
170
- needs_update = existing_files
171
- is_new = set(files) - existing_files
172
-
173
- # Step 2: Bulk insert new rows.
174
- new_values = [
175
- {
176
- "path": file.path_no_remote,
177
- "name": file.name,
178
- "size": file.size,
179
- "mime_type": file.mime_type,
180
- "mod_time": file.mod_time,
181
- "suffix": file.real_suffix,
182
- }
183
- for file in is_new
184
- ]
185
- with Session(self.engine) as session:
186
- if new_values:
187
- session.bulk_insert_mappings(self.FileEntryModel, new_values) # type: ignore
188
- session.commit()
189
-
190
- # Step 3: Bulk update existing rows.
191
- # First, query the database for the primary keys of rows that match the unique keys in needs_update.
192
- with Session(self.engine) as session:
193
- # Collect all unique paths from files needing update.
194
- update_paths = [file.path_no_remote for file in needs_update]
195
- # Query for existing rows matching any of these paths.
196
- db_entries = session.exec(
197
- select(self.FileEntryModel).where(
198
- self.FileEntryModel.path.in_(update_paths) # type: ignore
199
- )
200
- ).all()
201
-
202
- # Build a mapping from the unique key (path, name) to the primary key (id).
203
- id_map = {(entry.path, entry.name): entry.id for entry in db_entries}
204
-
205
- # Prepare bulk update mappings.
206
- update_values = []
207
- for file in needs_update:
208
- key = (file.path_no_remote, file.name)
209
- if key in id_map:
210
- update_values.append(
211
- {
212
- "id": id_map[key],
213
- "size": file.size,
214
- "mime_type": file.mime_type,
215
- "mod_time": file.mod_time,
216
- "suffix": file.real_suffix,
217
- }
218
- )
219
- if update_values:
220
- session.bulk_update_mappings(self.FileEntryModel, update_values) # type: ignore
221
- session.commit()
222
-
223
- def get_exists(self, files: list[FileItem]) -> set[FileItem]:
224
- """Get file entries from the table that exist among the given files.
225
-
226
- Args:
227
- files: List of file entries
228
-
229
- Returns:
230
- Set of FileItem instances whose 'path_no_remote' exists in the table.
231
- """
232
- # Extract unique paths from the input files.
233
- paths = {file.path_no_remote for file in files}
234
-
235
- with Session(self.engine) as session:
236
- # Execute a single query to fetch all file paths in the table that match the input paths.
237
- result = session.exec(
238
- select(self.FileEntryModel.path).where(
239
- self.FileEntryModel.path.in_(paths) # type: ignore
240
- )
241
- ).all()
242
- # Convert the result to a set for fast membership tests.
243
- existing_paths = set(result)
244
-
245
- # Return the set of FileItem objects that have a path in the existing_paths.
246
- return {file for file in files if file.path_no_remote in existing_paths}
247
-
248
- def get_files(self) -> list[FileItem]:
249
- """Get all files in the table.
250
-
251
- Returns:
252
- list: List of file entries
253
- """
254
- # with Session(self.engine) as session:
255
- # return session.exec(select(self.FileEntryModel)).all()
256
- out: list[FileItem] = []
257
- with Session(self.engine) as session:
258
- query = session.exec(select(self.FileEntryModel)).all()
259
- for item in query:
260
- name = item.name # type: ignore
261
- size = item.size # type: ignore
262
- mime_type = item.mime_type # type: ignore
263
- mod_time = item.mod_time # type: ignore
264
- path = item.path # type: ignore
265
- parent = os.path.dirname(path)
266
- if parent == "/" or parent == ".":
267
- parent = ""
268
- o = FileItem(
269
- remote=self.remote_name,
270
- parent=parent,
271
- name=name,
272
- size=size,
273
- mime_type=mime_type,
274
- mod_time=mod_time,
275
- )
276
- out.append(o)
277
- return out
1
+ """
2
+ Database module for rclone_api.
3
+ """
4
+
5
+ import os
6
+ from threading import Lock
7
+ from typing import Optional
8
+
9
+ from sqlmodel import Session, SQLModel, create_engine, select
10
+
11
+ from rclone_api.db.models import RepositoryMeta, create_file_entry_model
12
+ from rclone_api.file import FileItem
13
+
14
+
15
+ def _to_table_name(remote_name: str) -> str:
16
+ return (
17
+ "files_"
18
+ + remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
19
+ )
20
+
21
+
22
+ class DB:
23
+ """Database class for rclone_api."""
24
+
25
+ def __init__(self, db_path_url: str):
26
+ """Initialize the database.
27
+
28
+ Args:
29
+ db_path: Path to the database file
30
+ """
31
+ self.db_path_url = db_path_url
32
+
33
+ # When running multiple commands in parallel, the database connection may fail once
34
+ # when the database is first populated.
35
+ retries = 2
36
+ for _ in range(retries):
37
+ try:
38
+ self.engine = create_engine(db_path_url)
39
+ SQLModel.metadata.create_all(self.engine)
40
+ break
41
+ except Exception as e:
42
+ print(f"Failed to connect to database. Retrying... {e}")
43
+ else:
44
+ raise Exception("Failed to connect to database.")
45
+ self._cache: dict[str, DBRepo] = {}
46
+ self._cache_lock = Lock()
47
+
48
+ def drop_all(self) -> None:
49
+ """Drop all tables in the database."""
50
+ SQLModel.metadata.drop_all(self.engine)
51
+
52
+ def close(self) -> None:
53
+ """Close the database connection and release resources."""
54
+ if hasattr(self, "engine") and self.engine is not None:
55
+ self.engine.dispose()
56
+
57
+ def add_files(self, files: list[FileItem]) -> None:
58
+ """Add files to the database.
59
+
60
+ Args:
61
+ remote_name: Name of the remote
62
+ files: List of file entries
63
+ """
64
+
65
+ partition: dict[str, list[FileItem]] = {}
66
+ for file in files:
67
+ partition.setdefault(file.remote, []).append(file)
68
+
69
+ for remote_name, files in partition.items():
70
+ repo = self.get_or_create_repo(remote_name)
71
+ repo.insert_files(files)
72
+
73
+ def query_files(self, remote_name: str) -> list[FileItem]:
74
+ """Query files from the database.
75
+
76
+ Args:
77
+ remote_name: Name of the remote
78
+ """
79
+ repo = self.get_or_create_repo(remote_name)
80
+ files = repo.get_files()
81
+ out: list[FileItem] = []
82
+ for file in files:
83
+ out.append(file)
84
+ return out
85
+
86
+ def get_or_create_repo(self, remote_name: str) -> "DBRepo":
87
+ """Get a table section for a remote.
88
+
89
+ Args:
90
+ remote_name: Name of the remote
91
+ table_name: Optional table name, will be derived from remote_name if not provided
92
+
93
+ Returns:
94
+ DBRepo: A table section for the remote
95
+ """
96
+ with self._cache_lock:
97
+ if remote_name in self._cache:
98
+ return self._cache[remote_name]
99
+ table_name = _to_table_name(remote_name)
100
+ out = DBRepo(self.engine, remote_name, table_name)
101
+ self._cache[remote_name] = out
102
+ return out
103
+
104
+
105
+ class DBRepo:
106
+ """Table repo remote."""
107
+
108
+ def __init__(self, engine, remote_name: str, table_name: Optional[str] = None):
109
+ """Initialize a table section.
110
+
111
+ Args:
112
+ engine: SQLAlchemy engine
113
+ remote_name: Name of the remote
114
+ table_name: Optional table name, will be derived from remote_name if not provided
115
+ """
116
+ self.engine = engine
117
+ self.remote_name = remote_name
118
+
119
+ # If table_name is not provided, derive one from the remote name.
120
+ if table_name is None:
121
+ # table_name = (
122
+ # "file_entries_"
123
+ # + remote_name.replace(":", "_").replace(" ", "_").replace("/", "_").lower()
124
+ # )
125
+ table_name = _to_table_name(remote_name)
126
+ self.table_name = table_name
127
+
128
+ # Check if repository exists in RepositoryMeta; if not, create a new entry.
129
+ with Session(self.engine) as session:
130
+ existing_repo = session.exec(
131
+ select(RepositoryMeta).where(
132
+ RepositoryMeta.repo_name == self.remote_name
133
+ )
134
+ ).first()
135
+ if not existing_repo:
136
+ repo_meta = RepositoryMeta(
137
+ repo_name=self.remote_name, file_table_name=self.table_name
138
+ )
139
+ session.add(repo_meta)
140
+ session.commit()
141
+
142
+ # Dynamically create the file entry model and its table.
143
+ self.FileEntryModel = create_file_entry_model(self.table_name)
144
+ SQLModel.metadata.create_all(self.engine, tables=[self.FileEntryModel.__table__]) # type: ignore
145
+
146
+ def insert_file(self, file: FileItem) -> None:
147
+ """Insert a file entry into the table.
148
+
149
+ Args:
150
+ file: File entry
151
+ """
152
+ return self.insert_files([file])
153
+
154
+ def insert_files(self, files: list[FileItem]) -> None:
155
+ """
156
+ Insert multiple file entries into the table.
157
+
158
+ Three bulk operations are performed:
159
+ 1. Select: Determine which files already exist.
160
+ 2. Insert: Bulk-insert new file entries.
161
+ 3. Update: Bulk-update existing file entries.
162
+
163
+ The FileEntryModel must define a unique constraint on (path, name) and have a primary key "id".
164
+ """
165
+ # Step 1: Bulk select existing records.
166
+ # get_exists() returns a set of FileItem objects (based on path_no_remote and name) that already exist.
167
+ existing_files = self.get_exists(files)
168
+
169
+ # Determine which files need to be updated vs. inserted.
170
+ needs_update = existing_files
171
+ is_new = set(files) - existing_files
172
+
173
+ # Step 2: Bulk insert new rows.
174
+ new_values = [
175
+ {
176
+ "path": file.path_no_remote,
177
+ "name": file.name,
178
+ "size": file.size,
179
+ "mime_type": file.mime_type,
180
+ "mod_time": file.mod_time,
181
+ "suffix": file.real_suffix,
182
+ }
183
+ for file in is_new
184
+ ]
185
+ with Session(self.engine) as session:
186
+ if new_values:
187
+ session.bulk_insert_mappings(self.FileEntryModel, new_values) # type: ignore
188
+ session.commit()
189
+
190
+ # Step 3: Bulk update existing rows.
191
+ # First, query the database for the primary keys of rows that match the unique keys in needs_update.
192
+ with Session(self.engine) as session:
193
+ # Collect all unique paths from files needing update.
194
+ update_paths = [file.path_no_remote for file in needs_update]
195
+ # Query for existing rows matching any of these paths.
196
+ db_entries = session.exec(
197
+ select(self.FileEntryModel).where(
198
+ self.FileEntryModel.path.in_(update_paths) # type: ignore
199
+ )
200
+ ).all()
201
+
202
+ # Build a mapping from the unique key (path, name) to the primary key (id).
203
+ id_map = {(entry.path, entry.name): entry.id for entry in db_entries}
204
+
205
+ # Prepare bulk update mappings.
206
+ update_values = []
207
+ for file in needs_update:
208
+ key = (file.path_no_remote, file.name)
209
+ if key in id_map:
210
+ update_values.append(
211
+ {
212
+ "id": id_map[key],
213
+ "size": file.size,
214
+ "mime_type": file.mime_type,
215
+ "mod_time": file.mod_time,
216
+ "suffix": file.real_suffix,
217
+ }
218
+ )
219
+ if update_values:
220
+ session.bulk_update_mappings(self.FileEntryModel, update_values) # type: ignore
221
+ session.commit()
222
+
223
+ def get_exists(self, files: list[FileItem]) -> set[FileItem]:
224
+ """Get file entries from the table that exist among the given files.
225
+
226
+ Args:
227
+ files: List of file entries
228
+
229
+ Returns:
230
+ Set of FileItem instances whose 'path_no_remote' exists in the table.
231
+ """
232
+ # Extract unique paths from the input files.
233
+ paths = {file.path_no_remote for file in files}
234
+
235
+ with Session(self.engine) as session:
236
+ # Execute a single query to fetch all file paths in the table that match the input paths.
237
+ result = session.exec(
238
+ select(self.FileEntryModel.path).where(
239
+ self.FileEntryModel.path.in_(paths) # type: ignore
240
+ )
241
+ ).all()
242
+ # Convert the result to a set for fast membership tests.
243
+ existing_paths = set(result)
244
+
245
+ # Return the set of FileItem objects that have a path in the existing_paths.
246
+ return {file for file in files if file.path_no_remote in existing_paths}
247
+
248
+ def get_files(self) -> list[FileItem]:
249
+ """Get all files in the table.
250
+
251
+ Returns:
252
+ list: List of file entries
253
+ """
254
+ # with Session(self.engine) as session:
255
+ # return session.exec(select(self.FileEntryModel)).all()
256
+ out: list[FileItem] = []
257
+ with Session(self.engine) as session:
258
+ query = session.exec(select(self.FileEntryModel)).all()
259
+ for item in query:
260
+ name = item.name # type: ignore
261
+ size = item.size # type: ignore
262
+ mime_type = item.mime_type # type: ignore
263
+ mod_time = item.mod_time # type: ignore
264
+ path = item.path # type: ignore
265
+ parent = os.path.dirname(path)
266
+ if parent == "/" or parent == ".":
267
+ parent = ""
268
+ o = FileItem(
269
+ remote=self.remote_name,
270
+ parent=parent,
271
+ name=name,
272
+ size=size,
273
+ mime_type=mime_type,
274
+ mod_time=mod_time,
275
+ )
276
+ out.append(o)
277
+ return out
rclone_api/db/models.py CHANGED
@@ -1,57 +1,57 @@
1
- """
2
- Database models for rclone_api.
3
- """
4
-
5
- from abc import ABC, abstractmethod
6
- from typing import Optional, Type
7
-
8
- from sqlalchemy import BigInteger, Column
9
- from sqlmodel import Field, SQLModel
10
-
11
-
12
- # Meta table that indexes all repositories
13
- class RepositoryMeta(SQLModel, table=True):
14
- """Repository metadata table."""
15
-
16
- id: Optional[int] = Field(default=None, primary_key=True)
17
- repo_name: str
18
- file_table_name: str # The dedicated table name for file entries
19
-
20
-
21
- # Base FileEntry model that will be extended
22
- class FileEntry(SQLModel, ABC):
23
- """Base file entry model with common fields."""
24
-
25
- id: Optional[int] = Field(default=None, primary_key=True)
26
- path: str = Field(index=True, unique=True)
27
- suffix: str = Field(index=True)
28
- name: str
29
- size: int = Field(sa_column=Column(BigInteger))
30
- mime_type: str
31
- mod_time: str
32
- hash: Optional[str] = Field(default=None)
33
-
34
- @abstractmethod
35
- def table_name(self) -> str:
36
- """Return the table name for this file entry model."""
37
- pass
38
-
39
-
40
- # Factory to dynamically create a FileEntry model with a given table name
41
- def create_file_entry_model(_table_name: str) -> Type[FileEntry]:
42
- """Create a file entry model with a given table name.
43
-
44
- Args:
45
- table_name: Table name
46
-
47
- Returns:
48
- Type[FileEntryBase]: File entry model class with specified table name
49
- """
50
-
51
- class FileEntryConcrete(FileEntry, table=True):
52
- __tablename__ = _table_name # type: ignore # dynamically set table name
53
-
54
- def table_name(self) -> str:
55
- return _table_name
56
-
57
- return FileEntryConcrete
1
+ """
2
+ Database models for rclone_api.
3
+ """
4
+
5
+ from abc import ABC, abstractmethod
6
+ from typing import Optional, Type
7
+
8
+ from sqlalchemy import BigInteger, Column
9
+ from sqlmodel import Field, SQLModel
10
+
11
+
12
+ # Meta table that indexes all repositories
13
+ class RepositoryMeta(SQLModel, table=True):
14
+ """Repository metadata table."""
15
+
16
+ id: Optional[int] = Field(default=None, primary_key=True)
17
+ repo_name: str
18
+ file_table_name: str # The dedicated table name for file entries
19
+
20
+
21
+ # Base FileEntry model that will be extended
22
+ class FileEntry(SQLModel, ABC):
23
+ """Base file entry model with common fields."""
24
+
25
+ id: Optional[int] = Field(default=None, primary_key=True)
26
+ path: str = Field(index=True, unique=True)
27
+ suffix: str = Field(index=True)
28
+ name: str
29
+ size: int = Field(sa_column=Column(BigInteger))
30
+ mime_type: str
31
+ mod_time: str
32
+ hash: Optional[str] = Field(default=None)
33
+
34
+ @abstractmethod
35
+ def table_name(self) -> str:
36
+ """Return the table name for this file entry model."""
37
+ pass
38
+
39
+
40
+ # Factory to dynamically create a FileEntry model with a given table name
41
+ def create_file_entry_model(_table_name: str) -> Type[FileEntry]:
42
+ """Create a file entry model with a given table name.
43
+
44
+ Args:
45
+ table_name: Table name
46
+
47
+ Returns:
48
+ Type[FileEntryBase]: File entry model class with specified table name
49
+ """
50
+
51
+ class FileEntryConcrete(FileEntry, table=True):
52
+ __tablename__ = _table_name # type: ignore # dynamically set table name
53
+
54
+ def table_name(self) -> str:
55
+ return _table_name
56
+
57
+ return FileEntryConcrete
rclone_api/file.py CHANGED
@@ -71,6 +71,7 @@ class FileItem:
71
71
  mime_type: str
72
72
  mod_time: str
73
73
  hash: str | None = None
74
+ id: int | None = None
74
75
 
75
76
  @property
76
77
  def path(self) -> str:
@@ -115,8 +115,9 @@ class MultiMountFileChunker:
115
115
  self._release_mount(mount)
116
116
 
117
117
  if isinstance(bytes_or_err, Exception):
118
+ err: Exception = bytes_or_err
118
119
  logger.warning(f"Fetch task returned exception: {bytes_or_err}")
119
- return FilePart(payload=bytes_or_err, extra=extra)
120
+ return FilePart(payload=err, extra=extra)
120
121
  logger.debug(f"Successfully fetched {size} bytes from offset {offset}")
121
122
  out = FilePart(payload=bytes_or_err, extra=extra)
122
123
  return out
@@ -115,7 +115,9 @@ class _OnCompleteHandler:
115
115
 
116
116
  # done_part_numbers.add(part_number)
117
117
  # queue_upload.put(fp)
118
- self.part_number_tracker.add_finished_part_number(part_number)
118
+ self.part_number_tracker.add_finished_part_number(
119
+ part_number
120
+ ) # in memory database, not persistant to resume.json
119
121
  self.queue_upload.put(fp)
120
122
 
121
123
 
@@ -61,25 +61,29 @@ def handle_upload(
61
61
  upload_info: UploadInfo, fp: FilePart | EndOfStream
62
62
  ) -> FinishedPiece | Exception | EndOfStream:
63
63
  if isinstance(fp, EndOfStream):
64
- return fp
65
- assert isinstance(fp.extra, S3FileInfo)
66
- extra: S3FileInfo = fp.extra
67
- part_number = extra.part_number
68
- print(f"Handling upload for {part_number}, size {fp.size}")
64
+ eos: EndOfStream = fp
65
+ return eos
66
+ part_number: int | None = None
69
67
  try:
68
+ assert isinstance(fp.extra, S3FileInfo)
69
+ extra: S3FileInfo = fp.extra
70
+ part_number = extra.part_number
71
+ print(f"Handling upload for {part_number}, size {fp.size}")
72
+
70
73
  part: FinishedPiece = upload_task(
71
74
  info=upload_info,
72
75
  chunk=fp.load(),
73
76
  part_number=part_number,
74
77
  retries=upload_info.retries,
75
78
  )
76
- fp.close()
77
79
  return part
78
80
  except Exception as e:
79
81
  stacktrace = traceback.format_exc()
80
82
  msg = f"Error uploading part {part_number}: {e}\n{stacktrace}"
81
83
  warnings.warn(msg)
82
84
  return e
85
+ finally:
86
+ fp.close()
83
87
 
84
88
 
85
89
  def prepare_upload_file_multipart(
rclone_api/types.py CHANGED
@@ -1,3 +1,4 @@
1
+ import atexit
1
2
  import os
2
3
  import re
3
4
  import time
@@ -267,6 +268,26 @@ class EndOfStream:
267
268
  pass
268
269
 
269
270
 
271
+ _CLEANUP_LIST: list[Path] = []
272
+
273
+
274
+ def _add_for_cleanup(path: Path) -> None:
275
+ _CLEANUP_LIST.append(path)
276
+
277
+
278
+ def _on_exit_cleanup() -> None:
279
+ paths = list(_CLEANUP_LIST)
280
+ for path in paths:
281
+ try:
282
+ if path.exists():
283
+ path.unlink()
284
+ except Exception as e:
285
+ warnings.warn(f"Cannot cleanup {path}: {e}")
286
+
287
+
288
+ atexit.register(_on_exit_cleanup)
289
+
290
+
270
291
  class FilePart:
271
292
  def __init__(self, payload: bytes | Exception, extra: Any) -> None:
272
293
  from rclone_api.util import random_str
@@ -280,8 +301,9 @@ class FilePart:
280
301
  self.payload = get_chunk_tmpdir() / f"{random_str(12)}.chunk"
281
302
  with _TMP_DIR_ACCESS_LOCK:
282
303
  if not self.payload.parent.exists():
283
- self.payload.parent.mkdir(parents=True)
304
+ self.payload.parent.mkdir(parents=True, exist_ok=True)
284
305
  self.payload.write_bytes(payload)
306
+ _add_for_cleanup(self.payload)
285
307
 
286
308
  @property
287
309
  def size(self) -> int:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.3.18
3
+ Version: 1.3.19
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  License: BSD 3-Clause License
@@ -8,19 +8,19 @@ rclone_api/diff.py,sha256=tMoJMAGmLSE6Q_7QhPf6PnCzb840djxMZtDmhc2GlGQ,5227
8
8
  rclone_api/dir.py,sha256=i4h7LX5hB_WmVixxDRWL_l1nifvscrdWct_8Wx7wHZc,3540
9
9
  rclone_api/dir_listing.py,sha256=GoziW8Sne6FY90MLNcb2aO3aaa3jphB6H8ExYrV0Ryo,1882
10
10
  rclone_api/exec.py,sha256=Bq0gkyZ10mEY0FRyzNZgdN4FaWP9vpeCk1kjpg-gN_8,1083
11
- rclone_api/file.py,sha256=aBHzC-sqwPJ5fSI8bj9XOXJY_I6aCevnxXqiLscl4oo,5393
11
+ rclone_api/file.py,sha256=cz-7_nJArkVdJ9z2QaC_XZYpihXe3IPBC90Z5_3g2aw,5419
12
12
  rclone_api/file_item.py,sha256=cH-AQYsxedhNPp4c8NHY1ad4Z7St4yf_VGbmiGD59no,1770
13
13
  rclone_api/filelist.py,sha256=xbiusvNgaB_b_kQOZoHMJJxn6TWGtPrWd2J042BI28o,767
14
14
  rclone_api/group_files.py,sha256=H92xPW9lQnbNw5KbtZCl00bD6iRh9yRbCuxku4j_3dg,8036
15
15
  rclone_api/log.py,sha256=VZHM7pNSXip2ZLBKMP7M1u-rp_F7zoafFDuR8CPUoKI,1271
16
16
  rclone_api/mount.py,sha256=TE_VIBMW7J1UkF_6HRCt8oi_jGdMov4S51bm2OgxFAM,10045
17
- rclone_api/mount_read_chunker.py,sha256=bi4N-VkyPdM4RWUYhIqJ71lQrXanck4PpF3pY8k2xnQ,4722
17
+ rclone_api/mount_read_chunker.py,sha256=7jaF1Rsjr-kXIZW--Ol1QuG7WArBgdIcpQ0AJMYn7bI,4764
18
18
  rclone_api/process.py,sha256=BGXJTZVT__jeaDyjN8_kRycliOhkBErMPdHO1hKRvJE,5271
19
19
  rclone_api/rclone.py,sha256=ogWjSt--Ph2dpeq31mWsBRBvKhpf1EF0jJD2HxgQ8T0,50903
20
20
  rclone_api/remote.py,sha256=O9WDUFQy9f6oT1HdUbTixK2eg0xtBBm8k4Xl6aa6K00,431
21
21
  rclone_api/rpath.py,sha256=8ZA_1wxWtskwcy0I8V2VbjKDmzPkiWd8Q2JQSvh-sYE,2586
22
22
  rclone_api/scan_missing_folders.py,sha256=Kulca2Q6WZodt00ATFHkmqqInuoPvBkhTcS9703y6po,4740
23
- rclone_api/types.py,sha256=XiiWoGXAzNxTEfRcszw3BeRF7ZATXHIAPFg2-aJzUfo,9926
23
+ rclone_api/types.py,sha256=OYO14H6Xf6u2nHbrzfoMbGbU7LRPhALaRA9FEClf8OI,10388
24
24
  rclone_api/util.py,sha256=F9Q3zbWRsgPF4NG6OWB63cZ7GVq82lsraP47gmmDohU,5416
25
25
  rclone_api/walk.py,sha256=-54NVE8EJcCstwDoaC_UtHm73R2HrZwVwQmsnv55xNU,3369
26
26
  rclone_api/assets/example.txt,sha256=lTBovRjiz0_TgtAtbA1C5hNi2ffbqnNPqkKg6UiKCT8,54
@@ -29,21 +29,21 @@ rclone_api/cmd/copy_large_s3.py,sha256=nOpAUAQN1mJnf4EIZCh4OVCW7Q4_EXJeLFVe6r_9r
29
29
  rclone_api/cmd/list_files.py,sha256=x8FHODEilwKqwdiU1jdkeJbLwOqUkUQuDWPo2u_zpf0,741
30
30
  rclone_api/cmd/save_to_db.py,sha256=ylvnhg_yzexM-m6Zr7XDiswvoDVSl56ELuFAdb9gqBY,1957
31
31
  rclone_api/db/__init__.py,sha256=OSRUdnSWUlDTOHmjdjVmxYTUNpTbtaJ5Ll9sl-PfZg0,40
32
- rclone_api/db/db.py,sha256=1yoom28TGwCKVkb9WFbgDTTYE4SxmYfALDEUTW8_Pms,10351
33
- rclone_api/db/models.py,sha256=unKEiu8l4R4UAEoncEbOHEda227DpXm-cWwRV6vwJXE,1657
32
+ rclone_api/db/db.py,sha256=ZpYfeCUe8MKg_fdJucRSe6-fwGY_rWqUn7WkHCNFH_4,10074
33
+ rclone_api/db/models.py,sha256=v7qaXUehvsDvU51uk69JI23fSIs9JFGcOa-Tv1c_wVs,1600
34
34
  rclone_api/experimental/flags.py,sha256=qCVD--fSTmzlk9hloRLr0q9elzAOFzPsvVpKM3aB1Mk,2739
35
35
  rclone_api/experimental/flags_base.py,sha256=ajU_czkTcAxXYU-SlmiCfHY7aCQGHvpCLqJ-Z8uZLk0,2102
36
36
  rclone_api/profile/mount_copy_bytes.py,sha256=nZtqMukLhSzHq64Pn1I8pXwjoraqWjCKey3WLAeubx0,9069
37
37
  rclone_api/s3/api.py,sha256=PafsIEyWDpLWAXsZAjFm9CY14vJpsDr9lOsn0kGRLZ0,4009
38
38
  rclone_api/s3/basic_ops.py,sha256=hK3366xhVEzEcjz9Gk_8lFx6MRceAk72cax6mUrr6ko,2104
39
- rclone_api/s3/chunk_task.py,sha256=plrQyjuHFdJfV55kKH4wqo6QGll9n4_7BO4B1A7j6kY,7121
39
+ rclone_api/s3/chunk_task.py,sha256=kA6_5fLNdtT3QdTFrfBY6y8sH9Og8nM2mrjgAz_g1Rc,7196
40
40
  rclone_api/s3/chunk_types.py,sha256=oSWv8No9V3BeM7IcGnowyR2a7YrszdAXzEJlxaeZcp0,8852
41
41
  rclone_api/s3/create.py,sha256=wgfkapv_j904CfKuWyiBIWJVxfAx_ftemFSUV14aT68,3149
42
42
  rclone_api/s3/types.py,sha256=Elmh__gvZJyJyElYwMmvYZIBIunDJiTRAbEg21GmsRU,1604
43
- rclone_api/s3/upload_file_multipart.py,sha256=d8ZWqO8n9wsqRF6JjmvAFmG1aCkFqdSB1L8yxe_5qiY,11669
44
- rclone_api-1.3.18.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
45
- rclone_api-1.3.18.dist-info/METADATA,sha256=ow95lJm4jG8h05LuHtbeagZgL8R_qb7dqtwYU9s0cV4,4598
46
- rclone_api-1.3.18.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
47
- rclone_api-1.3.18.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
48
- rclone_api-1.3.18.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
49
- rclone_api-1.3.18.dist-info/RECORD,,
43
+ rclone_api/s3/upload_file_multipart.py,sha256=eVjaRoE0xfoOFmiYRuwz3tVfT2TvJl2lSYSAut12fMg,11765
44
+ rclone_api-1.3.19.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
45
+ rclone_api-1.3.19.dist-info/METADATA,sha256=fN9Od_VwYlO6TkvAdThtH-GdpyFUlkhXTpOGU0n-Dpk,4598
46
+ rclone_api-1.3.19.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
47
+ rclone_api-1.3.19.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
48
+ rclone_api-1.3.19.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
49
+ rclone_api-1.3.19.dist-info/RECORD,,