digitalkin 0.2.14__py3-none-any.whl → 0.2.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- digitalkin/__version__.py +1 -1
- digitalkin/services/filesystem/default_filesystem.py +337 -130
- digitalkin/services/filesystem/filesystem_strategy.py +202 -34
- digitalkin/services/filesystem/grpc_filesystem.py +235 -118
- digitalkin/services/setup/grpc_setup.py +1 -0
- {digitalkin-0.2.14.dist-info → digitalkin-0.2.16.dist-info}/METADATA +1 -1
- {digitalkin-0.2.14.dist-info → digitalkin-0.2.16.dist-info}/RECORD +14 -13
- {digitalkin-0.2.14.dist-info → digitalkin-0.2.16.dist-info}/top_level.txt +1 -0
- modules/cpu_intensive_module.py +0 -1
- modules/text_transform_module.py +0 -1
- services/filesystem_module.py +198 -0
- {modules → services}/storage_module.py +20 -7
- {digitalkin-0.2.14.dist-info → digitalkin-0.2.16.dist-info}/WHEEL +0 -0
- {digitalkin-0.2.14.dist-info → digitalkin-0.2.16.dist-info}/licenses/LICENSE +0 -0
digitalkin/__version__.py
CHANGED
|
@@ -1,208 +1,415 @@
|
|
|
1
|
-
"""Default filesystem."""
|
|
1
|
+
"""Default filesystem implementation."""
|
|
2
2
|
|
|
3
|
+
import hashlib
|
|
3
4
|
import os
|
|
4
5
|
import tempfile
|
|
6
|
+
import uuid
|
|
5
7
|
from pathlib import Path
|
|
8
|
+
from typing import Any, Literal
|
|
6
9
|
|
|
7
10
|
from digitalkin.logger import logger
|
|
8
11
|
from digitalkin.services.filesystem.filesystem_strategy import (
|
|
9
|
-
|
|
12
|
+
FileFilter,
|
|
13
|
+
FilesystemRecord,
|
|
10
14
|
FilesystemServiceError,
|
|
11
15
|
FilesystemStrategy,
|
|
12
|
-
|
|
16
|
+
UploadFileData,
|
|
13
17
|
)
|
|
14
18
|
|
|
15
19
|
|
|
16
20
|
class DefaultFilesystem(FilesystemStrategy):
|
|
17
|
-
"""Default
|
|
21
|
+
"""Default filesystem implementation.
|
|
18
22
|
|
|
19
|
-
|
|
23
|
+
This implementation provides a local filesystem-based storage solution
|
|
24
|
+
with support for all filesystem operations defined in the strategy.
|
|
25
|
+
Files are stored in a temporary directory with proper metadata tracking.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(self, mission_id: str, setup_version_id: str) -> None:
|
|
20
29
|
"""Initialize the default filesystem strategy.
|
|
21
30
|
|
|
22
31
|
Args:
|
|
23
32
|
mission_id: The ID of the mission this strategy is associated with
|
|
24
33
|
setup_version_id: The ID of the setup version this strategy is associated with
|
|
25
|
-
config: A dictionary mapping names to Pydantic model classes
|
|
26
34
|
"""
|
|
27
|
-
super().__init__(mission_id, setup_version_id
|
|
28
|
-
self.temp_root: str =
|
|
35
|
+
super().__init__(mission_id, setup_version_id)
|
|
36
|
+
self.temp_root: str = tempfile.mkdtemp()
|
|
29
37
|
os.makedirs(self.temp_root, exist_ok=True)
|
|
30
|
-
self.db: dict[str,
|
|
38
|
+
self.db: dict[str, FilesystemRecord] = {}
|
|
39
|
+
logger.debug("DefaultFilesystem initialized with temp_root: %s", self.temp_root)
|
|
31
40
|
|
|
32
|
-
def
|
|
33
|
-
"""Get the temporary directory path for a specific
|
|
41
|
+
def _get_context_temp_dir(self, context: str) -> str:
|
|
42
|
+
"""Get the temporary directory path for a specific context.
|
|
34
43
|
|
|
35
44
|
Args:
|
|
36
|
-
|
|
45
|
+
context: The mission ID or setup ID.
|
|
37
46
|
|
|
38
47
|
Returns:
|
|
39
|
-
str: Path to the
|
|
48
|
+
str: Path to the context's temporary directory
|
|
40
49
|
"""
|
|
41
|
-
# Create a
|
|
42
|
-
|
|
43
|
-
os.makedirs(
|
|
44
|
-
return
|
|
50
|
+
# Create a context-specific directory to organize files
|
|
51
|
+
context_dir = os.path.join(self.temp_root, context.replace(":", "_"))
|
|
52
|
+
os.makedirs(context_dir, exist_ok=True)
|
|
53
|
+
return context_dir
|
|
45
54
|
|
|
46
|
-
|
|
47
|
-
|
|
55
|
+
@staticmethod
|
|
56
|
+
def _calculate_checksum(content: bytes) -> str:
|
|
57
|
+
"""Calculate SHA-256 checksum of content.
|
|
48
58
|
|
|
49
59
|
Args:
|
|
50
|
-
content: The content
|
|
51
|
-
name: The name of the file to be created
|
|
52
|
-
file_type: The type of data being uploaded
|
|
60
|
+
content: The content to calculate checksum for
|
|
53
61
|
|
|
54
62
|
Returns:
|
|
55
|
-
|
|
63
|
+
str: The SHA-256 checksum
|
|
64
|
+
"""
|
|
65
|
+
return hashlib.sha256(content).hexdigest()
|
|
56
66
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
67
|
+
def _filter_db(
|
|
68
|
+
self,
|
|
69
|
+
filters: FileFilter,
|
|
70
|
+
) -> list[FilesystemRecord]:
|
|
71
|
+
"""Filter the in-memory database based on provided filters.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
filters: Filter criteria for the files
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
list[FilesystemRecord]: List of files matching the filters
|
|
60
78
|
"""
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
79
|
+
logger.debug("Filtering db with filters: %s", filters)
|
|
80
|
+
return [
|
|
81
|
+
f
|
|
82
|
+
for f in self.db.values()
|
|
83
|
+
if (not filters.names or f.name in filters.names)
|
|
84
|
+
and (not filters.file_ids or f.id in filters.file_ids)
|
|
85
|
+
and (not filters.file_types or f.file_type in filters.file_types)
|
|
86
|
+
and f.context == self.mission_id
|
|
87
|
+
and (not filters.status or f.status == filters.status)
|
|
88
|
+
and (not filters.content_type_prefix or f.content_type.startswith(filters.content_type_prefix))
|
|
89
|
+
and (not filters.min_size_bytes or f.size_bytes >= filters.min_size_bytes)
|
|
90
|
+
and (not filters.max_size_bytes or f.size_bytes <= filters.max_size_bytes)
|
|
91
|
+
and (not filters.prefix or f.name.startswith(filters.prefix))
|
|
92
|
+
and (not filters.content_type or f.content_type == filters.content_type)
|
|
93
|
+
]
|
|
94
|
+
|
|
95
|
+
def upload_files(
|
|
96
|
+
self,
|
|
97
|
+
files: list[UploadFileData],
|
|
98
|
+
) -> tuple[list[FilesystemRecord], int, int]:
|
|
99
|
+
"""Upload multiple files to the system.
|
|
100
|
+
|
|
101
|
+
This method allows batch uploading of files with validation and
|
|
102
|
+
error handling for each individual file. Files are processed
|
|
103
|
+
atomically - if one fails, others may still succeed.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
files: List of files to upload
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
tuple[list[FilesystemRecord], int, int]: List of uploaded files, total uploaded count, total failed count
|
|
80
110
|
|
|
81
|
-
|
|
82
|
-
|
|
111
|
+
Raises:
|
|
112
|
+
FilesystemServiceError: If there is an error uploading the files
|
|
113
|
+
"""
|
|
114
|
+
uploaded_files: list[FilesystemRecord] = []
|
|
115
|
+
total_uploaded = 0
|
|
116
|
+
total_failed = 0
|
|
117
|
+
|
|
118
|
+
for file in files:
|
|
119
|
+
try:
|
|
120
|
+
# Check if file with same name exists in the context
|
|
121
|
+
context_dir = self._get_context_temp_dir(self.mission_id)
|
|
122
|
+
file_path = os.path.join(context_dir, file.name)
|
|
123
|
+
if os.path.exists(file_path) and not file.replace_if_exists:
|
|
124
|
+
msg = f"File with name {file.name} already exists."
|
|
125
|
+
logger.error(msg)
|
|
126
|
+
raise FilesystemServiceError(msg) # noqa: TRY301
|
|
127
|
+
|
|
128
|
+
Path(file_path).write_bytes(file.content)
|
|
129
|
+
storage_url = str(Path(file_path).resolve())
|
|
130
|
+
file_data = FilesystemRecord(
|
|
131
|
+
id=str(uuid.uuid4()),
|
|
132
|
+
context=self.mission_id,
|
|
133
|
+
name=file.name,
|
|
134
|
+
file_type=file.file_type,
|
|
135
|
+
content_type=file.content_type or "application/octet-stream",
|
|
136
|
+
size_bytes=len(file.content),
|
|
137
|
+
checksum=self._calculate_checksum(file.content),
|
|
138
|
+
metadata=file.metadata,
|
|
139
|
+
storage_url=storage_url,
|
|
140
|
+
status=file.status if hasattr(file, "status") and file.status else "ACTIVE",
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
self.db[file_data.id] = file_data
|
|
144
|
+
uploaded_files.append(file_data)
|
|
145
|
+
total_uploaded += 1
|
|
146
|
+
logger.debug("Uploaded file %s", file_data)
|
|
147
|
+
|
|
148
|
+
except Exception as e: # noqa: PERF203
|
|
149
|
+
logger.exception("Error uploading file %s: %s", file.name, e)
|
|
150
|
+
total_failed += 1
|
|
151
|
+
# If only one file and it failed, propagate the error for pytest.raises
|
|
152
|
+
if len(files) == 1:
|
|
153
|
+
raise
|
|
154
|
+
|
|
155
|
+
return uploaded_files, total_uploaded, total_failed
|
|
156
|
+
|
|
157
|
+
def get_files(
|
|
158
|
+
self,
|
|
159
|
+
filters: FileFilter,
|
|
160
|
+
*,
|
|
161
|
+
list_size: int = 100,
|
|
162
|
+
offset: int = 0,
|
|
163
|
+
order: str | None = None, # noqa: ARG002
|
|
164
|
+
include_content: bool = False,
|
|
165
|
+
) -> tuple[list[FilesystemRecord], int]:
|
|
166
|
+
"""List files with filtering, sorting, and pagination.
|
|
167
|
+
|
|
168
|
+
This method provides flexible file querying capabilities with support for:
|
|
169
|
+
- Multiple filter criteria (name, type, dates, size, etc.)
|
|
170
|
+
- Pagination for large result sets
|
|
171
|
+
- Sorting by various fields
|
|
172
|
+
- Scoped access by context
|
|
83
173
|
|
|
84
174
|
Args:
|
|
85
|
-
|
|
175
|
+
filters: Filter criteria for the files
|
|
176
|
+
list_size: Number of files to return per page
|
|
177
|
+
offset: Offset to start listing files from
|
|
178
|
+
order: Fields to order results by (example: "created_at:asc,name:desc")
|
|
179
|
+
include_content: Whether to include file content in response
|
|
86
180
|
|
|
87
181
|
Returns:
|
|
88
|
-
|
|
182
|
+
tuple[list[FilesystemRecord], int]: List of files, total count
|
|
89
183
|
|
|
90
184
|
Raises:
|
|
91
|
-
|
|
92
|
-
FilesystemServiceError: If the file does not exist
|
|
185
|
+
FilesystemServiceError: If there is an error listing the files
|
|
93
186
|
"""
|
|
94
187
|
try:
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
188
|
+
logger.debug("Listing files with filters: %s", filters)
|
|
189
|
+
# Filter files based on provided criteria
|
|
190
|
+
filtered_files = self._filter_db(filters)
|
|
191
|
+
if not filtered_files:
|
|
192
|
+
return [], 0
|
|
193
|
+
# Sort if order is specified
|
|
194
|
+
# TODO
|
|
195
|
+
|
|
196
|
+
# Apply pagination
|
|
197
|
+
start_idx = offset
|
|
198
|
+
end_idx = start_idx + list_size
|
|
199
|
+
paginated_files = filtered_files[start_idx:end_idx]
|
|
200
|
+
|
|
201
|
+
if include_content:
|
|
202
|
+
for file in paginated_files:
|
|
203
|
+
file.content = Path(file.storage_url).read_bytes()
|
|
204
|
+
|
|
205
|
+
except Exception as e:
|
|
206
|
+
msg = f"Error listing files: {e!s}"
|
|
103
207
|
logger.exception(msg)
|
|
104
208
|
raise FilesystemServiceError(msg)
|
|
209
|
+
else:
|
|
210
|
+
return paginated_files, len(filtered_files)
|
|
211
|
+
|
|
212
|
+
def get_file(
|
|
213
|
+
self,
|
|
214
|
+
file_id: str,
|
|
215
|
+
*,
|
|
216
|
+
include_content: bool = False,
|
|
217
|
+
) -> FilesystemRecord:
|
|
218
|
+
"""Get a specific file by ID or name.
|
|
105
219
|
|
|
106
|
-
|
|
107
|
-
|
|
220
|
+
This method fetches detailed information about a single file,
|
|
221
|
+
with optional content inclusion. Supports lookup by either
|
|
222
|
+
unique ID or name within a context.
|
|
108
223
|
|
|
109
224
|
Args:
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
file_type: The type of data being updated
|
|
225
|
+
file_id: The ID of the file to be retrieved
|
|
226
|
+
include_content: Whether to include file content in response
|
|
113
227
|
|
|
114
228
|
Returns:
|
|
115
|
-
|
|
229
|
+
FilesystemRecord: Metadata about the retrieved file
|
|
116
230
|
|
|
117
231
|
Raises:
|
|
118
|
-
|
|
119
|
-
FilesystemServiceError: If there is an error during update
|
|
232
|
+
FilesystemServiceError: If there is an error retrieving the file
|
|
120
233
|
"""
|
|
121
|
-
if name not in self.db:
|
|
122
|
-
msg = f"File with name {name} does not exist."
|
|
123
|
-
logger.error(msg)
|
|
124
|
-
raise FileNotFoundError(msg)
|
|
125
234
|
try:
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
235
|
+
logger.debug("Getting file with id: %s", file_id)
|
|
236
|
+
file_data: FilesystemRecord | None = None
|
|
237
|
+
if file_id:
|
|
238
|
+
file_data = self.db.get(file_id)
|
|
239
|
+
|
|
240
|
+
if not file_data:
|
|
241
|
+
msg = f"File not found with id {file_id}"
|
|
242
|
+
logger.error(msg)
|
|
243
|
+
raise FilesystemServiceError(msg) # noqa: TRY301
|
|
244
|
+
|
|
245
|
+
if include_content:
|
|
246
|
+
file_path = file_data.storage_url
|
|
247
|
+
if os.path.exists(file_path):
|
|
248
|
+
content = Path(file_path).read_bytes()
|
|
249
|
+
file_data.content = content
|
|
250
|
+
|
|
251
|
+
except Exception as e:
|
|
252
|
+
msg = f"Error getting file: {e!s}"
|
|
139
253
|
logger.exception(msg)
|
|
140
254
|
raise FilesystemServiceError(msg)
|
|
141
255
|
else:
|
|
142
|
-
return
|
|
143
|
-
|
|
144
|
-
def
|
|
145
|
-
|
|
256
|
+
return file_data
|
|
257
|
+
|
|
258
|
+
def update_file(
|
|
259
|
+
self,
|
|
260
|
+
file_id: str,
|
|
261
|
+
content: bytes | None = None,
|
|
262
|
+
file_type: Literal[
|
|
263
|
+
"UNSPECIFIED",
|
|
264
|
+
"DOCUMENT",
|
|
265
|
+
"IMAGE",
|
|
266
|
+
"VIDEO",
|
|
267
|
+
"AUDIO",
|
|
268
|
+
"ARCHIVE",
|
|
269
|
+
"CODE",
|
|
270
|
+
"OTHER",
|
|
271
|
+
]
|
|
272
|
+
| None = None,
|
|
273
|
+
content_type: str | None = None,
|
|
274
|
+
metadata: dict[str, Any] | None = None,
|
|
275
|
+
new_name: str | None = None,
|
|
276
|
+
status: str | None = None,
|
|
277
|
+
) -> FilesystemRecord:
|
|
278
|
+
"""Update file metadata, content, or both.
|
|
279
|
+
|
|
280
|
+
This method allows updating various aspects of a file:
|
|
281
|
+
- Rename files
|
|
282
|
+
- Update content and content type
|
|
283
|
+
- Modify metadata
|
|
284
|
+
- Create new versions
|
|
146
285
|
|
|
147
286
|
Args:
|
|
148
|
-
|
|
287
|
+
file_id: The id of the file to be updated
|
|
288
|
+
content: Optional new content of the file
|
|
289
|
+
file_type: Optional new type of data
|
|
290
|
+
content_type: Optional new MIME type
|
|
291
|
+
metadata: Optional new metadata (will merge with existing)
|
|
292
|
+
new_name: Optional new name for the file
|
|
293
|
+
status: Optional new status for the file
|
|
149
294
|
|
|
150
295
|
Returns:
|
|
151
|
-
|
|
296
|
+
FilesystemRecord: Metadata about the updated file
|
|
152
297
|
|
|
153
298
|
Raises:
|
|
154
|
-
|
|
155
|
-
FilesystemServiceError: If there is an error during deletion
|
|
299
|
+
FilesystemServiceError: If there is an error during update
|
|
156
300
|
"""
|
|
157
|
-
|
|
158
|
-
if
|
|
159
|
-
msg = f"File with
|
|
301
|
+
logger.debug("Updating file with id: %s", file_id)
|
|
302
|
+
if file_id not in self.db:
|
|
303
|
+
msg = f"File with id {file_id} does not exist."
|
|
160
304
|
logger.error(msg)
|
|
161
|
-
raise FileNotFoundError(msg)
|
|
162
|
-
|
|
163
|
-
# Get the file path
|
|
164
|
-
kin_context_dir = self._get_kin_context_temp_dir(self.mission_id)
|
|
165
|
-
file_path = os.path.join(kin_context_dir, name)
|
|
166
|
-
|
|
167
|
-
# Check if the file exists in the filesystem
|
|
168
|
-
if not os.path.exists(file_path):
|
|
169
|
-
msg = f"File {name} exists in database but not in filesystem at {file_path}."
|
|
170
|
-
logger.error(msg)
|
|
171
|
-
# We could decide to just remove from DB here, but that might hide a larger issue
|
|
172
|
-
# So we're raising a custom error to alert about the inconsistency
|
|
173
305
|
raise FilesystemServiceError(msg)
|
|
174
306
|
|
|
175
307
|
try:
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
308
|
+
context_dir = self._get_context_temp_dir(self.mission_id)
|
|
309
|
+
file_path = os.path.join(context_dir, file_id)
|
|
310
|
+
existing_file = self.db[file_id]
|
|
179
311
|
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
312
|
+
if content is not None:
|
|
313
|
+
Path(file_path).write_bytes(content)
|
|
314
|
+
existing_file.size_bytes = len(content)
|
|
315
|
+
existing_file.checksum = self._calculate_checksum(content)
|
|
316
|
+
|
|
317
|
+
if file_type is not None:
|
|
318
|
+
existing_file.file_type = file_type
|
|
319
|
+
|
|
320
|
+
if content_type is not None:
|
|
321
|
+
existing_file.content_type = content_type
|
|
322
|
+
|
|
323
|
+
if metadata is not None:
|
|
324
|
+
existing_file.metadata = metadata
|
|
325
|
+
|
|
326
|
+
if status is not None:
|
|
327
|
+
existing_file.status = status
|
|
328
|
+
|
|
329
|
+
if new_name is not None:
|
|
330
|
+
new_path = os.path.join(context_dir, new_name)
|
|
331
|
+
os.rename(file_path, new_path)
|
|
332
|
+
existing_file.name = new_name
|
|
333
|
+
existing_file.storage_url = str(Path(new_path).resolve())
|
|
334
|
+
|
|
335
|
+
self.db[file_id] = existing_file
|
|
336
|
+
|
|
337
|
+
except Exception as e:
|
|
338
|
+
msg = f"Error updating file {file_id}: {e!s}"
|
|
186
339
|
logger.exception(msg)
|
|
187
340
|
raise FilesystemServiceError(msg)
|
|
188
341
|
else:
|
|
189
|
-
return
|
|
342
|
+
return existing_file
|
|
343
|
+
|
|
344
|
+
def delete_files(
|
|
345
|
+
self,
|
|
346
|
+
filters: FileFilter,
|
|
347
|
+
*,
|
|
348
|
+
permanent: bool = False,
|
|
349
|
+
force: bool = False, # noqa: ARG002
|
|
350
|
+
) -> tuple[dict[str, bool], int, int]:
|
|
351
|
+
"""Delete multiple files.
|
|
352
|
+
|
|
353
|
+
This method supports batch deletion of files with options for:
|
|
354
|
+
- Soft deletion (marking as deleted)
|
|
355
|
+
- Permanent deletion
|
|
356
|
+
- Force deletion of files in use
|
|
357
|
+
- Individual error reporting per file
|
|
190
358
|
|
|
191
|
-
|
|
192
|
-
|
|
359
|
+
Args:
|
|
360
|
+
filters: Filter criteria for the files to delete
|
|
361
|
+
permanent: Whether to permanently delete the files
|
|
362
|
+
force: Whether to force delete even if files are in use
|
|
193
363
|
|
|
194
364
|
Returns:
|
|
195
|
-
|
|
196
|
-
"""
|
|
197
|
-
return list(self.db.values())
|
|
365
|
+
tuple[dict[str, bool], int, int]: Results per file, total deleted count, total failed count
|
|
198
366
|
|
|
199
|
-
|
|
200
|
-
|
|
367
|
+
Raises:
|
|
368
|
+
FilesystemServiceError: If there is an error deleting the files
|
|
369
|
+
"""
|
|
370
|
+
logger.debug("Deleting files with filters: %s", filters)
|
|
371
|
+
results: dict[str, bool] = {} # id -> success
|
|
372
|
+
total_deleted = 0
|
|
373
|
+
total_failed = 0
|
|
201
374
|
|
|
202
|
-
|
|
203
|
-
|
|
375
|
+
try:
|
|
376
|
+
# Determine which files to delete
|
|
377
|
+
files_to_delete = [f.id for f in self._filter_db(filters)]
|
|
378
|
+
|
|
379
|
+
if not files_to_delete:
|
|
380
|
+
logger.info("No files match the deletion criteria.")
|
|
381
|
+
return results, total_deleted, total_failed
|
|
382
|
+
|
|
383
|
+
for file_id in files_to_delete:
|
|
384
|
+
file_data = self.db[file_id]
|
|
385
|
+
if not file_data:
|
|
386
|
+
results[file_id] = False
|
|
387
|
+
total_failed += 1
|
|
388
|
+
continue
|
|
389
|
+
|
|
390
|
+
try:
|
|
391
|
+
file_path = file_data.storage_url
|
|
392
|
+
if os.path.exists(file_path):
|
|
393
|
+
if permanent:
|
|
394
|
+
os.remove(file_path)
|
|
395
|
+
del self.db[file_id]
|
|
396
|
+
else:
|
|
397
|
+
file_data.status = "DELETED"
|
|
398
|
+
self.db[file_id] = file_data
|
|
399
|
+
results[file_id] = True
|
|
400
|
+
total_deleted += 1
|
|
401
|
+
else:
|
|
402
|
+
results[file_id] = False
|
|
403
|
+
total_failed += 1
|
|
404
|
+
except Exception as e:
|
|
405
|
+
logger.exception("Error deleting file %s: %s", file_id, e)
|
|
406
|
+
results[file_id] = False
|
|
407
|
+
total_failed += 1
|
|
408
|
+
|
|
409
|
+
except Exception as e:
|
|
410
|
+
msg = f"Error in delete_files: {e!s}"
|
|
411
|
+
logger.exception(msg)
|
|
412
|
+
raise FilesystemServiceError(msg)
|
|
204
413
|
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
"""
|
|
208
|
-
return {name: self.db.get(name, None) for name in names}
|
|
414
|
+
else:
|
|
415
|
+
return results, total_deleted, total_failed
|