saviialib 1.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. saviialib/__init__.py +79 -0
  2. saviialib/general_types/__init__.py +0 -0
  3. saviialib/general_types/api/__init__.py +0 -0
  4. saviialib/general_types/api/saviia_api_types.py +48 -0
  5. saviialib/general_types/api/saviia_backup_api_types.py +24 -0
  6. saviialib/general_types/api/saviia_netcamera_api_types.py +11 -0
  7. saviialib/general_types/api/saviia_shakes_api_types.py +21 -0
  8. saviialib/general_types/api/saviia_thies_api_types.py +31 -0
  9. saviialib/general_types/error_types/__init__.py +0 -0
  10. saviialib/general_types/error_types/api/__init__.py +0 -0
  11. saviialib/general_types/error_types/api/saviia_api_error_types.py +113 -0
  12. saviialib/general_types/error_types/api/saviia_netcamera_error_types.py +7 -0
  13. saviialib/general_types/error_types/common/__init__.py +7 -0
  14. saviialib/general_types/error_types/common/common_types.py +26 -0
  15. saviialib/libs/directory_client/__init__.py +4 -0
  16. saviialib/libs/directory_client/client/os_client.py +55 -0
  17. saviialib/libs/directory_client/directory_client.py +44 -0
  18. saviialib/libs/directory_client/directory_client_contract.py +40 -0
  19. saviialib/libs/directory_client/types/directory_client_types.py +6 -0
  20. saviialib/libs/files_client/__init__.py +4 -0
  21. saviialib/libs/files_client/clients/aiofiles_client.py +44 -0
  22. saviialib/libs/files_client/clients/csv_client.py +42 -0
  23. saviialib/libs/files_client/files_client.py +26 -0
  24. saviialib/libs/files_client/files_client_contract.py +13 -0
  25. saviialib/libs/files_client/types/files_client_types.py +32 -0
  26. saviialib/libs/ftp_client/__init__.py +4 -0
  27. saviialib/libs/ftp_client/clients/__init__.py +0 -0
  28. saviialib/libs/ftp_client/clients/aioftp_client.py +52 -0
  29. saviialib/libs/ftp_client/clients/ftplib_client.py +58 -0
  30. saviialib/libs/ftp_client/ftp_client.py +25 -0
  31. saviialib/libs/ftp_client/ftp_client_contract.py +13 -0
  32. saviialib/libs/ftp_client/types/__init__.py +3 -0
  33. saviialib/libs/ftp_client/types/ftp_client_types.py +18 -0
  34. saviialib/libs/log_client/__init__.py +19 -0
  35. saviialib/libs/log_client/log_client.py +46 -0
  36. saviialib/libs/log_client/log_client_contract.py +28 -0
  37. saviialib/libs/log_client/logging_client/logging_client.py +58 -0
  38. saviialib/libs/log_client/types/__init__.py +0 -0
  39. saviialib/libs/log_client/types/log_client_types.py +47 -0
  40. saviialib/libs/log_client/utils/log_client_utils.py +6 -0
  41. saviialib/libs/sftp_client/__init__.py +8 -0
  42. saviialib/libs/sftp_client/clients/asyncssh_sftp_client.py +83 -0
  43. saviialib/libs/sftp_client/sftp_client.py +26 -0
  44. saviialib/libs/sftp_client/sftp_client_contract.py +13 -0
  45. saviialib/libs/sftp_client/types/sftp_client_types.py +24 -0
  46. saviialib/libs/sharepoint_client/__init__.py +17 -0
  47. saviialib/libs/sharepoint_client/clients/sharepoint_rest_api.py +160 -0
  48. saviialib/libs/sharepoint_client/sharepoint_client.py +58 -0
  49. saviialib/libs/sharepoint_client/sharepoint_client_contract.py +26 -0
  50. saviialib/libs/sharepoint_client/types/sharepoint_client_types.py +30 -0
  51. saviialib/libs/zero_dependency/utils/booleans_utils.py +2 -0
  52. saviialib/libs/zero_dependency/utils/datetime_utils.py +25 -0
  53. saviialib/libs/zero_dependency/utils/strings_utils.py +5 -0
  54. saviialib/services/backup/__init__.py +0 -0
  55. saviialib/services/backup/api.py +36 -0
  56. saviialib/services/backup/controllers/__init__.py +0 -0
  57. saviialib/services/backup/controllers/types/__init__.py +6 -0
  58. saviialib/services/backup/controllers/types/upload_backup_to_sharepoint_types.py +18 -0
  59. saviialib/services/backup/controllers/upload_backup_to_sharepoint.py +87 -0
  60. saviialib/services/backup/use_cases/constants/upload_backup_to_sharepoint_constants.py +5 -0
  61. saviialib/services/backup/use_cases/types/__init__.py +7 -0
  62. saviialib/services/backup/use_cases/types/upload_backup_to_sharepoint_types.py +11 -0
  63. saviialib/services/backup/use_cases/upload_backup_to_sharepoint.py +474 -0
  64. saviialib/services/backup/utils/__init__.py +3 -0
  65. saviialib/services/backup/utils/upload_backup_to_sharepoint_utils.py +100 -0
  66. saviialib/services/netcamera/api.py +30 -0
  67. saviialib/services/netcamera/controllers/get_media_files.py +40 -0
  68. saviialib/services/netcamera/controllers/types/get_media_files_types.py +16 -0
  69. saviialib/services/netcamera/use_cases/get_media_files.py +76 -0
  70. saviialib/services/netcamera/use_cases/types/get_media_files_types.py +18 -0
  71. saviialib/services/shakes/__init__.py +0 -0
  72. saviialib/services/shakes/api.py +31 -0
  73. saviialib/services/shakes/controllers/get_miniseed_files.py +48 -0
  74. saviialib/services/shakes/controllers/types/get_miniseed_files_types.py +16 -0
  75. saviialib/services/shakes/use_cases/get_miniseed_files.py +79 -0
  76. saviialib/services/shakes/use_cases/types/get_miniseed_files_types.py +18 -0
  77. saviialib/services/shakes/use_cases/utils/get_miniseed_files_utils.py +11 -0
  78. saviialib/services/thies/__init__.py +0 -0
  79. saviialib/services/thies/api.py +42 -0
  80. saviialib/services/thies/constants/update_thies_data_constants.py +67 -0
  81. saviialib/services/thies/controllers/types/update_thies_data_types.py +18 -0
  82. saviialib/services/thies/controllers/update_thies_data.py +119 -0
  83. saviialib/services/thies/use_cases/components/create_thies_statistics_file.py +115 -0
  84. saviialib/services/thies/use_cases/components/thies_bp.py +442 -0
  85. saviialib/services/thies/use_cases/types/update_thies_data_types.py +24 -0
  86. saviialib/services/thies/use_cases/update_thies_data.py +391 -0
  87. saviialib/services/thies/utils/update_thies_data_utils.py +21 -0
  88. saviialib-1.6.1.dist-info/METADATA +126 -0
  89. saviialib-1.6.1.dist-info/RECORD +91 -0
  90. saviialib-1.6.1.dist-info/WHEEL +4 -0
  91. saviialib-1.6.1.dist-info/licenses/LICENSE +22 -0
@@ -0,0 +1,474 @@
1
+ import asyncio
2
+ from time import time
3
+ from logging import Logger
4
+ from saviialib.general_types.error_types.api.saviia_api_error_types import (
5
+ BackupEmptyError,
6
+ BackupSourcePathError,
7
+ BackupUploadError,
8
+ )
9
+ from saviialib.general_types.error_types.common import (
10
+ SharepointClientError,
11
+ )
12
+ from saviialib.libs.directory_client import DirectoryClient, DirectoryClientArgs
13
+ from saviialib.libs.files_client import (
14
+ FilesClient,
15
+ FilesClientInitArgs,
16
+ ReadArgs,
17
+ )
18
+ from saviialib.libs.sharepoint_client import (
19
+ SharepointClient,
20
+ SharepointClientInitArgs,
21
+ SpUploadFileArgs,
22
+ SpCreateFolderArgs,
23
+ )
24
+ from saviialib.libs.log_client import (
25
+ LogClient,
26
+ LogClientArgs,
27
+ InfoArgs,
28
+ DebugArgs,
29
+ WarningArgs,
30
+ ErrorArgs,
31
+ LogStatus,
32
+ )
33
+
34
+ from saviialib.services.backup.utils.upload_backup_to_sharepoint_utils import (
35
+ calculate_percentage_uploaded,
36
+ count_files_in_directory,
37
+ parse_execute_response,
38
+ show_upload_result,
39
+ get_pending_files_for_folder,
40
+ save_file,
41
+ )
42
+ from saviialib.libs.zero_dependency.utils.booleans_utils import boolean_to_emoji
43
+
44
+ from .types.upload_backup_to_sharepoint_types import (
45
+ UploadBackupToSharepointUseCaseInput,
46
+ )
47
+ from typing import Dict, List
48
+
49
+
50
+ class UploadBackupToSharepointUsecase:
51
+ LOCAL_BACKUP_NAME = "saviia-local-backup"
52
+
53
+ def __init__(self, input: UploadBackupToSharepointUseCaseInput):
54
+ self.sharepoint_config = input.sharepoint_config
55
+ self.local_backup_source_path = input.local_backup_source_path
56
+ self.sharepoint_destination_path = input.sharepoint_destination_path
57
+ self.files_client = self._initialize_files_client()
58
+ self.dir_client = self._initialize_directory_client()
59
+ self.sharepoint_client = self._initalize_sharepoint_client()
60
+
61
+ self.grouped_files_by_folder = {}
62
+ self.logger: Logger = input.logger
63
+ self.infofile: Dict[str, str | List[str]] = {}
64
+ self.log_client = LogClient(
65
+ LogClientArgs(
66
+ client_name="logging",
67
+ class_name="backup_to_sharepoint",
68
+ service_name="backup",
69
+ active_record=True,
70
+ )
71
+ )
72
+
73
+ def _initalize_sharepoint_client(self):
74
+ return SharepointClient(
75
+ SharepointClientInitArgs(
76
+ self.sharepoint_config, client_name="sharepoint_rest_api"
77
+ )
78
+ )
79
+
80
+ def _initialize_directory_client(self):
81
+ return DirectoryClient(DirectoryClientArgs(client_name="os_client"))
82
+
83
+ def _initialize_files_client(self):
84
+ return FilesClient(FilesClientInitArgs(client_name="aiofiles_client"))
85
+
86
+ async def _group_files_by_folder(self) -> dict[str, list[str]]:
87
+ """Groups files to upload by their parent folder"""
88
+ folder_names = await self.dir_client.listdir(self.local_backup_source_path)
89
+ grouped = {}
90
+ if len(folder_names) == 0:
91
+ raise BackupEmptyError
92
+
93
+ async def walk_directory(base_folder: str) -> List[str]:
94
+ """Recursivelty collect file paths relative to base folder"""
95
+ all_files = []
96
+ entries = await self.dir_client.listdir(base_folder)
97
+ if ".PASS.txt" in entries:
98
+ return []
99
+ for entry in entries:
100
+ full_path = self.dir_client.join_paths(base_folder, entry)
101
+ if await self.dir_client.isdir(full_path):
102
+ sub_entries = await self.dir_client.listdir(full_path)
103
+ if ".PASS.txt" in sub_entries:
104
+ continue
105
+ sub_files = await walk_directory(full_path)
106
+ all_files.extend(sub_files)
107
+ else:
108
+ rel_path = self.dir_client.relative_path(
109
+ full_path, self.local_backup_source_path
110
+ )
111
+ all_files.append(rel_path)
112
+ return all_files
113
+
114
+ for folder_name in folder_names:
115
+ folder_path = self.dir_client.join_paths(
116
+ self.local_backup_source_path, folder_name
117
+ )
118
+ if not await self.dir_client.isdir(folder_path):
119
+ continue
120
+ entries = await self.dir_client.listdir(folder_path)
121
+ if ".PASS.txt" in entries:
122
+ grouped[folder_name] = set()
123
+ continue
124
+
125
+ all_files = await walk_directory(folder_path)
126
+ grouped[folder_name] = set(all_files)
127
+ return grouped
128
+
129
+ async def _create_or_update_infofile(self):
130
+ """Creates or updates .infofile.json with pass/reset flags for each directory."""
131
+ self.log_client.method_name = "_create_or_update_infofile"
132
+ self.log_client.info(
133
+ InfoArgs(LogStatus.STARTED, metadata={"msg": "Creating .infofile.json"})
134
+ )
135
+ infofile_path = self.dir_client.join_paths(
136
+ self.local_backup_source_path, ".infofile.json"
137
+ )
138
+ infofile_exists = await self.dir_client.path_exists(infofile_path)
139
+
140
+ if infofile_exists:
141
+ self.log_client.debug(
142
+ DebugArgs(LogStatus.ALERT, metadata={"msg": "Updating .infofile.json"})
143
+ )
144
+ self.infofile = await self.files_client.read(
145
+ ReadArgs(file_path=infofile_path, mode="json")
146
+ ) # type: ignore
147
+ return
148
+
149
+ for folder, files in self.grouped_files_by_folder.items():
150
+ should_pass = len(files) == 0
151
+ should_reset = any(
152
+ f.endswith(".RESET.txt") or "/.RESET.txt" in f for f in files
153
+ )
154
+ self.infofile[folder] = { # type: ignore
155
+ "pass": should_pass,
156
+ "reset": should_reset,
157
+ "failed": [],
158
+ }
159
+ self.log_client.info(
160
+ InfoArgs(
161
+ LogStatus.SUCCESSFUL, metadata={"msg": "Infofile created succesfully"}
162
+ )
163
+ )
164
+ for dir in self.infofile:
165
+ self.log_client.debug(
166
+ DebugArgs(
167
+ LogStatus.ALERT,
168
+ metadata={
169
+ "msg": (
170
+ f"{dir}: Pass {boolean_to_emoji(self.infofile[dir]['pass'])} " # type: ignore
171
+ f"Reset {boolean_to_emoji(self.infofile[dir]['reset'])}" # type: ignore
172
+ )
173
+ },
174
+ )
175
+ )
176
+
177
+ async def prepare_backup(self):
178
+ self.log_client.method_name = "prepare_backup"
179
+ self.log_client.debug(
180
+ DebugArgs(
181
+ LogStatus.STARTED,
182
+ metadata={"msg": "* Extracting folders from local backup directory *"},
183
+ )
184
+ )
185
+ # Check if the local backup directory exists. If doesn't exist, then create it.
186
+ local_backup_path_exists = await self.dir_client.path_exists(
187
+ self.local_backup_source_path
188
+ )
189
+ if not local_backup_path_exists:
190
+ raise BackupSourcePathError(
191
+ reason=f"'{self.local_backup_source_path}' doesn't exist."
192
+ )
193
+ # Create the destination directory if it doesn't exist
194
+ complete_destination_path = (
195
+ self.sharepoint_destination_path
196
+ + "/"
197
+ + UploadBackupToSharepointUsecase.LOCAL_BACKUP_NAME
198
+ )
199
+ async with self.sharepoint_client:
200
+ await self.sharepoint_client.create_folder(
201
+ SpCreateFolderArgs(folder_relative_url=complete_destination_path)
202
+ )
203
+ # Check out the directories and the files inside each one
204
+ self.grouped_files_by_folder = await self._group_files_by_folder()
205
+ # Create or read the info file
206
+ await self._create_or_update_infofile()
207
+ # Replicate local directories in the sharepoint directory.
208
+ for folder in self.grouped_files_by_folder.keys():
209
+ should_pass = self.infofile[folder]["pass"] # type: ignore
210
+ should_reset = self.infofile[folder]["reset"] # type: ignore
211
+ if should_reset:
212
+ self.log_client.warning(
213
+ WarningArgs(
214
+ LogStatus.ALERT,
215
+ metadata={
216
+ "msg": f"The '{folder}' will reseted before the synchronization as it contains the .RESET.txt file ❗️"
217
+ },
218
+ )
219
+ )
220
+ if should_pass:
221
+ self.log_client.debug(
222
+ DebugArgs(
223
+ LogStatus.ALERT,
224
+ metadata={
225
+ "msg": f"The '{folder}' will not be synchronised as it contains the .PASS.txt file 👻"
226
+ },
227
+ )
228
+ )
229
+ continue
230
+ async with self.sharepoint_client:
231
+ await self.sharepoint_client.create_folder(
232
+ SpCreateFolderArgs(
233
+ folder_relative_url=complete_destination_path + "/" + folder
234
+ )
235
+ )
236
+ self.grouped_files_by_folder[folder] = { # type: ignore
237
+ f
238
+ for f in self.grouped_files_by_folder[folder]
239
+ if not f.endswith(".PASS.txt") and not f.endswith(".RESET.txt")
240
+ }
241
+ self.sharepoint_destination_path = complete_destination_path
242
+
243
+ self.log_client.debug(
244
+ DebugArgs(LogStatus.SUCCESSFUL, metadata={"msg": "Ready to migrate"})
245
+ )
246
+
247
+ async def migrate_files(self) -> list:
248
+ self.log_client.method_name = "migrate_files"
249
+ self.log_client.debug(
250
+ DebugArgs(
251
+ LogStatus.STARTED, metadata={"msg": "* Counting files to migrate *"}
252
+ )
253
+ )
254
+ tasks = []
255
+ local_files: set
256
+ for folder_name, local_files in self.grouped_files_by_folder.items(): # type: ignore
257
+ should_pass = self.infofile[folder_name]["pass"] # type: ignore
258
+ if should_pass:
259
+ continue
260
+
261
+ count_files_in_dir = await count_files_in_directory(
262
+ self.local_backup_source_path, folder_name
263
+ )
264
+ if count_files_in_dir == 0:
265
+ self.log_client.debug(
266
+ DebugArgs(
267
+ LogStatus.ALERT,
268
+ metadata={"msg": f"The folder '{folder_name}' is empty ⚠️"},
269
+ )
270
+ )
271
+ continue
272
+ # Check out the pending files and add the failed failes
273
+ self.log_client.debug(
274
+ DebugArgs(
275
+ LogStatus.ALERT,
276
+ metadata={
277
+ "msg": f"Checking files to upload in '{folder_name}' directory 👀"
278
+ },
279
+ )
280
+ )
281
+ async with self.sharepoint_client:
282
+ pending_files, summary_msg = await get_pending_files_for_folder(
283
+ self.sharepoint_client,
284
+ self.dir_client,
285
+ self.sharepoint_destination_path,
286
+ local_files,
287
+ set(self.infofile[folder_name]["failed"]), # type: ignore
288
+ )
289
+ self.log_client.debug(
290
+ DebugArgs(
291
+ LogStatus.ALERT,
292
+ metadata={"msg": summary_msg},
293
+ )
294
+ )
295
+ # Update the files to upload
296
+ self.grouped_files_by_folder[folder_name] = [
297
+ f
298
+ for f in local_files
299
+ if self.dir_client.get_basename(f) in pending_files
300
+ ]
301
+
302
+ if len(pending_files) == 0:
303
+ self.log_client.debug(
304
+ DebugArgs(
305
+ LogStatus.ALERT,
306
+ metadata={
307
+ "msg": f"All the files in '{folder_name}' have already been synchronised"
308
+ },
309
+ )
310
+ )
311
+ continue
312
+ # Export the file to Sharepoint
313
+ for file_name in self.grouped_files_by_folder[folder_name]: # type: ignore
314
+ if file_name in [".RESET.txt", ".PASS.txt"]:
315
+ continue
316
+ tasks.append(self._upload_file_to_sharepoint(folder_name, file_name))
317
+ self.log_client.debug(
318
+ DebugArgs(
319
+ LogStatus.SUCCESSFUL,
320
+ metadata={"msg": "* The reviewing process has finished *"},
321
+ )
322
+ )
323
+ return tasks
324
+
325
+ async def _upload_file_to_sharepoint(self, folder_name, file_name) -> dict:
326
+ """Task for uploads a file and logs progress."""
327
+ self.log_client.method_name = "_upload_file_to_sharepoint"
328
+ self.log_client.debug(
329
+ DebugArgs(
330
+ LogStatus.STARTED,
331
+ metadata={"msg": f"Uploading file '{file_name}' from '{folder_name}' "},
332
+ )
333
+ )
334
+ # Retrieve the content from the file in the local directory
335
+ file_path = self.dir_client.join_paths(self.local_backup_source_path, file_name)
336
+ file_content = await self.files_client.read(ReadArgs(file_path, mode="rb"))
337
+ # Upload the local file to Sharepoint directory
338
+ uploaded, error_message = None, ""
339
+ try:
340
+ sharepoint_client = SharepointClient(
341
+ SharepointClientInitArgs(
342
+ self.sharepoint_config, client_name="sharepoint_rest_api"
343
+ )
344
+ )
345
+ except ConnectionError as error:
346
+ self.log_client.error(
347
+ ErrorArgs(
348
+ LogStatus.ERROR,
349
+ metadata={"msg": error.__str__()},
350
+ )
351
+ )
352
+ raise SharepointClientError(error)
353
+ async with sharepoint_client:
354
+ try:
355
+ relative_folder = "/".join(file_name.split("/")[:-1])
356
+ folder_url = (
357
+ f"{self.sharepoint_destination_path}/{relative_folder}"
358
+ if relative_folder
359
+ else self.sharepoint_destination_path
360
+ )
361
+ await sharepoint_client.create_folder(
362
+ SpCreateFolderArgs(folder_relative_url=folder_url)
363
+ )
364
+
365
+ await sharepoint_client.upload_file(
366
+ SpUploadFileArgs(
367
+ folder_relative_url=folder_url,
368
+ file_content=file_content, # type: ignore
369
+ file_name=self.dir_client.get_basename(file_name),
370
+ )
371
+ )
372
+ uploaded = True
373
+ # Remove the file from the source directory if RESET is true
374
+ should_reset = self.infofile[folder_name]["reset"] # type: ignore
375
+ if should_reset:
376
+ source_file_path = self.dir_client.join_paths(
377
+ self.local_backup_source_path, file_name
378
+ )
379
+ await self.dir_client.remove_file(source_file_path)
380
+ self.log_client.debug(
381
+ DebugArgs(
382
+ LogStatus.ALERT,
383
+ metadata={
384
+ "msg": f"File {file_name} has been deleted from '{folder_name}'"
385
+ },
386
+ )
387
+ )
388
+ except ConnectionError as error:
389
+ error_message = str(error)
390
+ uploaded = False
391
+ # Add the file to failed failes
392
+ self.log_client.debug(
393
+ DebugArgs(
394
+ LogStatus.ALERT,
395
+ metadata={"msg": f"{file_name} could not be uploaded 🚨"},
396
+ )
397
+ )
398
+ self.infofile[folder_name]["failed"].append(file_name) # type: ignore
399
+ self.log_client.debug(
400
+ DebugArgs(
401
+ LogStatus.SUCCESSFUL if uploaded else LogStatus.ALERT,
402
+ metadata={
403
+ "msg": show_upload_result(uploaded, file_name, error_message)
404
+ },
405
+ )
406
+ )
407
+ return {
408
+ "parent_folder": folder_name,
409
+ "file_name": file_name,
410
+ "uploaded": uploaded,
411
+ "error_message": error_message,
412
+ }
413
+
414
+ async def execute(self):
415
+ """Exports all files from the local backup folder to SharePoint cloud."""
416
+ self.log_client.method_name = "execute"
417
+ self.log_client.info(
418
+ InfoArgs(LogStatus.STARTED, metadata={"msg": "Starting backup process"})
419
+ )
420
+ start_time = time()
421
+ await self.prepare_backup()
422
+ tasks = await self.migrate_files()
423
+ already_migrated = (
424
+ len(tasks) == 0
425
+ ) # If the tasks set is zero, then were no any errors.
426
+ results = await asyncio.gather(*tasks, return_exceptions=True)
427
+ success = calculate_percentage_uploaded(results, len(tasks)) # type: ignore
428
+ if success < 100.0 and not already_migrated:
429
+ self.log_client.method_name = "execute"
430
+ self.log_client.warning(
431
+ WarningArgs(
432
+ LogStatus.FAILED,
433
+ metadata={
434
+ "msg": (
435
+ "Not all the files have been uploaded ⚠️."
436
+ f"Files failed to upload: {(1 - success):.2%}"
437
+ )
438
+ },
439
+ )
440
+ )
441
+ # Save any failed files for exporting in the next migration
442
+ await save_file(self.files_client, ".infofile.json", self.infofile, "json")
443
+ await save_file(
444
+ self.files_client,
445
+ "BACKUP_LOG_HISTORY.log",
446
+ "\n".join(self.log_client.log_history),
447
+ "w",
448
+ )
449
+ raise BackupUploadError(
450
+ reason="Not all the files have been uploaded successfully."
451
+ )
452
+ else:
453
+ end_time = time()
454
+ backup_time = end_time - start_time
455
+ self.log_client.method_name = "execute"
456
+ self.log_client.debug(
457
+ DebugArgs(
458
+ LogStatus.SUCCESSFUL,
459
+ metadata={
460
+ "msg": (
461
+ f"Migration time: {backup_time:.2f} seconds ✨. "
462
+ "All the files were uploaded successfully 🎉"
463
+ )
464
+ },
465
+ )
466
+ )
467
+ await save_file(self.files_client, ".infofile.json", self.infofile, "json")
468
+ await save_file(
469
+ self.files_client,
470
+ "BACKUP_LOG_HISTORY.log",
471
+ "\n".join(self.log_client.log_history),
472
+ "w",
473
+ )
474
+ return parse_execute_response(results) # type: ignore
@@ -0,0 +1,3 @@
1
+ from ...thies.utils.update_thies_data_utils import parse_execute_response
2
+
3
+ __all__ = ["parse_execute_response"]
@@ -0,0 +1,100 @@
1
+ from typing import List, Dict
2
+ from saviialib.general_types.error_types.api.saviia_api_error_types import (
3
+ BackupSourcePathError,
4
+ )
5
+ from saviialib.libs.zero_dependency.utils.booleans_utils import boolean_to_emoji
6
+ from saviialib.libs.directory_client import DirectoryClient, DirectoryClientArgs
7
+ from saviialib.libs.sharepoint_client import (
8
+ SpListFilesArgs,
9
+ )
10
+ from saviialib.libs.files_client import (
11
+ WriteArgs,
12
+ )
13
+
14
+ dir_client = DirectoryClient(DirectoryClientArgs(client_name="os_client"))
15
+
16
+
17
+ async def get_pending_files_for_folder(
18
+ sharepoint_client,
19
+ dir_client,
20
+ sharepoint_path: str,
21
+ local_files: set[str],
22
+ failed_files: set[str],
23
+ ) -> tuple[set[str], str]:
24
+ folders = extract_folders_from_files(local_files)
25
+ sharepoint_files = set()
26
+
27
+ async with sharepoint_client:
28
+ for folder in folders:
29
+ files = await sharepoint_client.list_files(
30
+ SpListFilesArgs(f"{sharepoint_path}/{folder}")
31
+ )
32
+ sharepoint_files.update(x["Name"] for x in files["value"]) # type: ignore
33
+
34
+ local_basenames = {dir_client.get_basename(f) for f in local_files}
35
+ pending_files = local_basenames.difference(sharepoint_files).union(failed_files)
36
+ summary_msg = (
37
+ f"SharePoint Files: {len(sharepoint_files)}. "
38
+ f"Local Files: {len(local_files)}. "
39
+ f"Failed files: {len(failed_files)}. "
40
+ f"Pending Files: {len(pending_files)}. "
41
+ )
42
+ return pending_files, summary_msg
43
+
44
+
45
+ def parse_execute_response(results: List[Dict]) -> Dict[str, List[str]]:
46
+ try:
47
+ return {
48
+ "new_files": len(
49
+ [item["file_name"] for item in results if item.get("uploaded")] # type: ignore
50
+ ),
51
+ }
52
+ except (IsADirectoryError, AttributeError, ConnectionError) as error:
53
+ raise BackupSourcePathError(reason=error)
54
+
55
+
56
+ def show_upload_result(uploaded: bool, file_name: str, error_message: str = "") -> str:
57
+ status = boolean_to_emoji(uploaded)
58
+ message = (
59
+ "was uploaded successfully"
60
+ if uploaded
61
+ else f"failed to upload. Error: {error_message}"
62
+ )
63
+ result = f"File {file_name} {message} {status}"
64
+ return result
65
+
66
+
67
+ def calculate_percentage_uploaded(results: List[Dict], total_files: int) -> float:
68
+ uploaded_count = sum(
69
+ 1 for result in results if isinstance(result, dict) and result.get("uploaded")
70
+ )
71
+ return (uploaded_count / total_files) * 100 if total_files > 0 else 0
72
+
73
+
74
+ async def count_files_in_directory(base_path: str, folder_name: str) -> int:
75
+ full_path = dir_client.join_paths(base_path, folder_name)
76
+ count = 0
77
+ tree = await dir_client.walk(full_path)
78
+ for root, _, files in tree:
79
+ count += len(files)
80
+ return count
81
+
82
+
83
+ def extract_folders_from_files(files: set[str]) -> set[str]:
84
+ folders = set()
85
+ for f in files:
86
+ parts = f.split("/")
87
+ if len(parts) > 1:
88
+ for i in range(1, len(parts)):
89
+ folders.add("/".join(parts[:i]))
90
+ return folders
91
+
92
+
93
+ async def save_file(files_client, file_name, file_content, mode):
94
+ await files_client.write(
95
+ WriteArgs(
96
+ file_name=file_name,
97
+ file_content=file_content, # type: ignore
98
+ mode=mode,
99
+ )
100
+ )
@@ -0,0 +1,30 @@
1
+ from .controllers.get_media_files import (
2
+ GetMediaFilesController,
3
+ GetMediaFilesControllerInput,
4
+ )
5
+ from typing import Dict, Tuple, Any
6
+ from saviialib.general_types.api.saviia_netcamera_api_types import SaviiaNetcameraConfig
7
+
8
+
9
+ class NetcameraAPI:
10
+ """This class provides methods for interacting with network cameras and retrieving
11
+ files using streaming services."""
12
+
13
+ def __init__(self, config: SaviiaNetcameraConfig):
14
+ self.config = config
15
+
16
+ async def get_media_files(
17
+ self, cameras: Dict[str, Tuple[str, int]]
18
+ ) -> Dict[str, Any]:
19
+ """Retrieve media files from Network cameras.
20
+
21
+ :param cameras: Dictionary where the key is the identifier of the camera, and the
22
+ value is a tuple wich contains the service IP address and port of connection.
23
+ Example: {'cam_01': ('192.168.1.10', 8080), ...}
24
+ :type cameras: dict
25
+ :return response: A dictionary containg information of the extraction operation.
26
+ :rtype: dict
27
+ """
28
+ controller = GetMediaFilesController(GetMediaFilesControllerInput(cameras))
29
+ response = await controller.execute()
30
+ return response.__dict__
@@ -0,0 +1,40 @@
1
+ from .types.get_media_files_types import (
2
+ GetMediaFilesControllerInput,
3
+ GetMediaFilesControllerOutput,
4
+ )
5
+ from saviialib.services.netcamera.use_cases.get_media_files import (
6
+ GetMediaFilesUseCase,
7
+ GetMediaFilesUseCaseInput,
8
+ )
9
+ from http import HTTPStatus
10
+ from saviialib.general_types.error_types.api.saviia_netcamera_error_types import (
11
+ NetcameraConnectionError,
12
+ )
13
+
14
+
15
+ class GetMediaFilesController:
16
+ def __init__(self, input: GetMediaFilesControllerInput) -> None:
17
+ self.use_case = GetMediaFilesUseCase(
18
+ GetMediaFilesUseCaseInput(
19
+ cameras=input.cameras,
20
+ username=input.config.username,
21
+ password=input.config.password,
22
+ protocol=input.config.protocol,
23
+ logger=input.config.logger,
24
+ destination_path=input.config.destination_path,
25
+ )
26
+ )
27
+
28
+ async def execute(self) -> GetMediaFilesControllerOutput:
29
+ try:
30
+ _ = await self.use_case.execute()
31
+ return GetMediaFilesControllerOutput(
32
+ message="The extraction of media files was successfully!",
33
+ status=HTTPStatus.OK.value,
34
+ )
35
+ except NetcameraConnectionError as error:
36
+ return GetMediaFilesControllerOutput(
37
+ message="An unnexpected error ocurred while extractingphotos and videos.",
38
+ status=HTTPStatus.GATEWAY_TIMEOUT.value,
39
+ metadata={"error": error.__str__()},
40
+ )
@@ -0,0 +1,16 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Dict, Tuple
3
+ from saviialib.general_types.api.saviia_netcamera_api_types import SaviiaNetcameraConfig
4
+
5
+
6
+ @dataclass
7
+ class GetMediaFilesControllerInput:
8
+ config = SaviiaNetcameraConfig
9
+ cameras: Dict[str, Tuple[str, int]]
10
+
11
+
12
+ @dataclass
13
+ class GetMediaFilesControllerOutput:
14
+ status: int
15
+ message: str
16
+ metadata: Dict = field(default_factory=dict)