saviialib 0.9.1__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of saviialib might be problematic. Click here for more details.
- saviialib/__init__.py +73 -3
- saviialib/general_types/api/__init__.py +0 -3
- saviialib/general_types/api/{epii_api_types.py → saviia_api_types.py} +4 -38
- saviialib/general_types/api/saviia_backup_api_types.py +24 -0
- saviialib/general_types/api/saviia_netcamera_api_types.py +11 -0
- saviialib/general_types/api/saviia_shakes_api_types.py +21 -0
- saviialib/general_types/api/saviia_thies_api_types.py +31 -0
- saviialib/general_types/error_types/api/{epii_api_error_types.py → saviia_api_error_types.py} +20 -0
- saviialib/general_types/error_types/api/saviia_netcamera_error_types.py +7 -0
- saviialib/general_types/error_types/common/common_types.py +9 -0
- saviialib/libs/directory_client/__init__.py +4 -0
- saviialib/libs/directory_client/client/os_client.py +55 -0
- saviialib/libs/directory_client/directory_client.py +44 -0
- saviialib/libs/directory_client/directory_client_contract.py +40 -0
- saviialib/libs/directory_client/types/directory_client_types.py +6 -0
- saviialib/libs/ffmpeg_client/__init__.py +8 -0
- saviialib/libs/ffmpeg_client/clients/ffmpeg_asyncio_client.py +101 -0
- saviialib/libs/ffmpeg_client/ffmpeg_client.py +25 -0
- saviialib/libs/ffmpeg_client/ffmpeg_client_contract.py +12 -0
- saviialib/libs/ffmpeg_client/types/ffmpeg_client_types.py +28 -0
- saviialib/libs/files_client/__init__.py +2 -2
- saviialib/libs/files_client/clients/aiofiles_client.py +26 -3
- saviialib/libs/files_client/clients/csv_client.py +42 -0
- saviialib/libs/files_client/files_client.py +5 -7
- saviialib/libs/files_client/types/files_client_types.py +5 -4
- saviialib/libs/ftp_client/clients/aioftp_client.py +13 -6
- saviialib/libs/ftp_client/clients/ftplib_client.py +58 -0
- saviialib/libs/ftp_client/ftp_client.py +8 -5
- saviialib/libs/ftp_client/ftp_client_contract.py +2 -2
- saviialib/libs/log_client/__init__.py +19 -0
- saviialib/libs/log_client/log_client.py +46 -0
- saviialib/libs/log_client/log_client_contract.py +28 -0
- saviialib/libs/log_client/logging_client/logging_client.py +58 -0
- saviialib/libs/log_client/types/log_client_types.py +47 -0
- saviialib/libs/log_client/utils/log_client_utils.py +6 -0
- saviialib/libs/sftp_client/__init__.py +8 -0
- saviialib/libs/sftp_client/clients/asyncssh_sftp_client.py +83 -0
- saviialib/libs/sftp_client/sftp_client.py +26 -0
- saviialib/libs/sftp_client/sftp_client_contract.py +13 -0
- saviialib/libs/sftp_client/types/sftp_client_types.py +24 -0
- saviialib/libs/sharepoint_client/__init__.py +2 -0
- saviialib/libs/sharepoint_client/clients/sharepoint_rest_api.py +31 -6
- saviialib/libs/sharepoint_client/sharepoint_client.py +25 -1
- saviialib/libs/sharepoint_client/sharepoint_client_contract.py +5 -0
- saviialib/libs/sharepoint_client/types/sharepoint_client_types.py +5 -0
- saviialib/libs/zero_dependency/utils/booleans_utils.py +2 -0
- saviialib/libs/zero_dependency/utils/datetime_utils.py +1 -1
- saviialib/libs/zero_dependency/utils/strings_utils.py +5 -0
- saviialib/services/backup/api.py +36 -0
- saviialib/services/backup/controllers/__init__.py +0 -0
- saviialib/services/{epii → backup}/controllers/types/__init__.py +1 -1
- saviialib/services/{epii → backup}/controllers/types/upload_backup_to_sharepoint_types.py +4 -2
- saviialib/services/{epii → backup}/controllers/upload_backup_to_sharepoint.py +9 -8
- saviialib/services/backup/use_cases/constants/upload_backup_to_sharepoint_constants.py +5 -0
- saviialib/services/{epii → backup}/use_cases/types/__init__.py +1 -1
- saviialib/services/{epii → backup}/use_cases/types/upload_backup_to_sharepoint_types.py +4 -2
- saviialib/services/backup/use_cases/upload_backup_to_sharepoint.py +474 -0
- saviialib/services/backup/utils/__init__.py +3 -0
- saviialib/services/backup/utils/upload_backup_to_sharepoint_utils.py +100 -0
- saviialib/services/netcamera/api.py +30 -0
- saviialib/services/netcamera/controllers/get_media_files.py +40 -0
- saviialib/services/netcamera/controllers/types/get_media_files_types.py +16 -0
- saviialib/services/netcamera/use_cases/get_media_files.py +76 -0
- saviialib/services/netcamera/use_cases/types/get_media_files_types.py +18 -0
- saviialib/services/shakes/__init__.py +0 -0
- saviialib/services/shakes/api.py +31 -0
- saviialib/services/shakes/controllers/get_miniseed_files.py +48 -0
- saviialib/services/shakes/controllers/types/get_miniseed_files_types.py +16 -0
- saviialib/services/shakes/use_cases/get_miniseed_files.py +79 -0
- saviialib/services/shakes/use_cases/types/get_miniseed_files_types.py +18 -0
- saviialib/services/shakes/use_cases/utils/get_miniseed_files_utils.py +11 -0
- saviialib/services/thies/__init__.py +0 -0
- saviialib/services/thies/api.py +42 -0
- saviialib/services/thies/constants/update_thies_data_constants.py +67 -0
- saviialib/services/{epii → thies}/controllers/types/update_thies_data_types.py +5 -4
- saviialib/services/{epii → thies}/controllers/update_thies_data.py +18 -6
- saviialib/services/thies/use_cases/components/create_thies_statistics_file.py +115 -0
- saviialib/services/thies/use_cases/components/thies_bp.py +442 -0
- saviialib/services/{epii → thies}/use_cases/types/update_thies_data_types.py +10 -2
- saviialib/services/thies/use_cases/update_thies_data.py +391 -0
- saviialib-1.6.0.dist-info/METADATA +126 -0
- saviialib-1.6.0.dist-info/RECORD +96 -0
- {saviialib-0.9.1.dist-info → saviialib-1.6.0.dist-info}/WHEEL +1 -1
- saviialib/services/epii/api.py +0 -80
- saviialib/services/epii/use_cases/constants/update_thies_data_constants.py +0 -5
- saviialib/services/epii/use_cases/constants/upload_backup_to_sharepoint_constants.py +0 -5
- saviialib/services/epii/use_cases/update_thies_data.py +0 -171
- saviialib/services/epii/use_cases/upload_backup_to_sharepoint.py +0 -241
- saviialib/services/epii/utils/__init__.py +0 -3
- saviialib/services/epii/utils/upload_backup_to_sharepoint_utils.py +0 -102
- saviialib-0.9.1.dist-info/METADATA +0 -120
- saviialib-0.9.1.dist-info/RECORD +0 -49
- /saviialib/{services/epii → libs/log_client/types}/__init__.py +0 -0
- /saviialib/services/{epii/controllers → backup}/__init__.py +0 -0
- /saviialib/services/{epii → thies}/utils/update_thies_data_utils.py +0 -0
- {saviialib-0.9.1.dist-info → saviialib-1.6.0.dist-info/licenses}/LICENSE +0 -0
saviialib/services/epii/api.py
DELETED
|
@@ -1,80 +0,0 @@
|
|
|
1
|
-
from typing import Any, Dict
|
|
2
|
-
|
|
3
|
-
from .controllers.types.update_thies_data_types import UpdateThiesDataControllerInput
|
|
4
|
-
from .controllers.types.upload_backup_to_sharepoint_types import (
|
|
5
|
-
UploadBackupToSharepointControllerInput,
|
|
6
|
-
)
|
|
7
|
-
from .controllers.update_thies_data import UpdateThiesDataController
|
|
8
|
-
from .controllers.upload_backup_to_sharepoint import UploadBackupToSharepointController
|
|
9
|
-
from saviialib.general_types.api.epii_api_types import (
|
|
10
|
-
EpiiUpdateThiesConfig,
|
|
11
|
-
EpiiSharepointBackupConfig,
|
|
12
|
-
EpiiAPIConfig,
|
|
13
|
-
)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class EpiiAPI:
|
|
17
|
-
"""
|
|
18
|
-
EpiiAPI is a service class that provides methods to interact with Patagonia Center system.
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
def __init__(self, config: EpiiAPIConfig):
|
|
22
|
-
self.ftp_port = config.ftp_port
|
|
23
|
-
self.ftp_host = config.ftp_host
|
|
24
|
-
self.ftp_user = config.ftp_user
|
|
25
|
-
self.ftp_password = config.ftp_password
|
|
26
|
-
self.sharepoint_client_id = config.sharepoint_client_id
|
|
27
|
-
self.sharepoint_client_secret = config.sharepoint_client_secret
|
|
28
|
-
self.sharepoint_tenant_id = config.sharepoint_tenant_id
|
|
29
|
-
self.sharepoint_tenant_name = config.sharepoint_tenant_name
|
|
30
|
-
self.sharepoint_site_name = config.sharepoint_site_name
|
|
31
|
-
|
|
32
|
-
async def update_thies_data(self) -> Dict[str, Any]:
|
|
33
|
-
"""
|
|
34
|
-
This method establishes a connection to an FTP server using the provided
|
|
35
|
-
credentials and updates data related to THIES Data Logger.
|
|
36
|
-
|
|
37
|
-
Returns:
|
|
38
|
-
response (dict): A dictionary representation of the API response.
|
|
39
|
-
"""
|
|
40
|
-
config = EpiiUpdateThiesConfig(
|
|
41
|
-
ftp_port=self.ftp_port,
|
|
42
|
-
ftp_host=self.ftp_host,
|
|
43
|
-
ftp_user=self.ftp_user,
|
|
44
|
-
ftp_password=self.ftp_password,
|
|
45
|
-
sharepoint_client_id=self.sharepoint_client_id,
|
|
46
|
-
sharepoint_client_secret=self.sharepoint_client_secret,
|
|
47
|
-
sharepoint_site_name=self.sharepoint_site_name,
|
|
48
|
-
sharepoint_tenant_id=self.sharepoint_tenant_id,
|
|
49
|
-
sharepoint_tenant_name=self.sharepoint_tenant_name,
|
|
50
|
-
)
|
|
51
|
-
controller = UpdateThiesDataController(UpdateThiesDataControllerInput(config))
|
|
52
|
-
response = await controller.execute()
|
|
53
|
-
return response.__dict__
|
|
54
|
-
|
|
55
|
-
async def upload_backup_to_sharepoint(
|
|
56
|
-
self, local_backup_source_path: str, destination_folders: dict[str, str]
|
|
57
|
-
) -> Dict[str, Any]:
|
|
58
|
-
"""Migrate a backup folder from Home assistant to Sharepoint directory.
|
|
59
|
-
Args:
|
|
60
|
-
local_backup_source_path (str): Local path to backup.
|
|
61
|
-
Returns:
|
|
62
|
-
response (dict): A dictionary containing the response from the upload operation.
|
|
63
|
-
This dictionary will typically include information about the success or
|
|
64
|
-
failure of the upload, as well as any relevant metadata.
|
|
65
|
-
"""
|
|
66
|
-
config = EpiiSharepointBackupConfig(
|
|
67
|
-
sharepoint_client_id=self.sharepoint_client_id,
|
|
68
|
-
sharepoint_client_secret=self.sharepoint_client_secret,
|
|
69
|
-
sharepoint_site_name=self.sharepoint_site_name,
|
|
70
|
-
sharepoint_tenant_id=self.sharepoint_tenant_id,
|
|
71
|
-
sharepoint_tenant_name=self.sharepoint_tenant_name,
|
|
72
|
-
local_backup_source_path=local_backup_source_path,
|
|
73
|
-
destination_folders=destination_folders
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
controller = UploadBackupToSharepointController(
|
|
77
|
-
UploadBackupToSharepointControllerInput(config)
|
|
78
|
-
)
|
|
79
|
-
response = await controller.execute()
|
|
80
|
-
return response.__dict__
|
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
SHAREPOINT_BASE_URL = "/sites/uc365_CentrosyEstacionesRegionalesUC/Shared%20Documents/General/Test_Raspberry/THIES"
|
|
2
|
-
SHAREPOINT_THIES_FOLDERS = ["AVG", "EXT"]
|
|
3
|
-
|
|
4
|
-
FTP_SERVER_PATH_AVG_FILES = "ftp/thies/BINFILES/ARCH_AV1"
|
|
5
|
-
FTP_SERVER_PATH_EXT_FILES = "ftp/thies/BINFILES/ARCH_EX1"
|
|
@@ -1,171 +0,0 @@
|
|
|
1
|
-
import saviialib.services.epii.use_cases.constants.update_thies_data_constants as c
|
|
2
|
-
from saviialib.general_types.error_types.api.epii_api_error_types import (
|
|
3
|
-
SharePointFetchingError,
|
|
4
|
-
SharePointUploadError,
|
|
5
|
-
ThiesConnectionError,
|
|
6
|
-
ThiesFetchingError,
|
|
7
|
-
)
|
|
8
|
-
from saviialib.general_types.error_types.common import (
|
|
9
|
-
EmptyDataError,
|
|
10
|
-
FtpClientError,
|
|
11
|
-
SharepointClientError,
|
|
12
|
-
)
|
|
13
|
-
from saviialib.libs.ftp_client import (
|
|
14
|
-
FTPClient,
|
|
15
|
-
FtpClientInitArgs,
|
|
16
|
-
FtpListFilesArgs,
|
|
17
|
-
FtpReadFileArgs,
|
|
18
|
-
)
|
|
19
|
-
from saviialib.libs.sharepoint_client import (
|
|
20
|
-
SharepointClient,
|
|
21
|
-
SharepointClientInitArgs,
|
|
22
|
-
SpListFilesArgs,
|
|
23
|
-
SpUploadFileArgs,
|
|
24
|
-
)
|
|
25
|
-
from saviialib.services.epii.use_cases.types import (
|
|
26
|
-
FtpClientConfig,
|
|
27
|
-
SharepointConfig,
|
|
28
|
-
UpdateThiesDataUseCaseInput,
|
|
29
|
-
)
|
|
30
|
-
from saviialib.services.epii.utils import (
|
|
31
|
-
parse_execute_response,
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
class UpdateThiesDataUseCase:
|
|
36
|
-
def __init__(self, input: UpdateThiesDataUseCaseInput):
|
|
37
|
-
self.sharepoint_client = self._initialize_sharepoint_client(
|
|
38
|
-
input.sharepoint_config
|
|
39
|
-
)
|
|
40
|
-
self.thies_ftp_client = self._initialize_thies_ftp_client(input.ftp_config)
|
|
41
|
-
self.uploading = set()
|
|
42
|
-
|
|
43
|
-
def _initialize_sharepoint_client(
|
|
44
|
-
self, config: SharepointConfig
|
|
45
|
-
) -> SharepointClient:
|
|
46
|
-
"""Initialize the HTTP client."""
|
|
47
|
-
try:
|
|
48
|
-
return SharepointClient(
|
|
49
|
-
SharepointClientInitArgs(config, client_name="sharepoint_rest_api")
|
|
50
|
-
)
|
|
51
|
-
except ConnectionError as error:
|
|
52
|
-
raise SharepointClientError(error)
|
|
53
|
-
|
|
54
|
-
def _initialize_thies_ftp_client(self, config: FtpClientConfig) -> FTPClient:
|
|
55
|
-
"""Initialize the FTP client."""
|
|
56
|
-
try:
|
|
57
|
-
return FTPClient(FtpClientInitArgs(config, client_name="aioftp_client"))
|
|
58
|
-
except RuntimeError as error:
|
|
59
|
-
raise FtpClientError(error)
|
|
60
|
-
|
|
61
|
-
async def fetch_cloud_file_names(self) -> set[str]:
|
|
62
|
-
"""Fetch file names from the RCER cloud."""
|
|
63
|
-
|
|
64
|
-
try:
|
|
65
|
-
cloud_files = set()
|
|
66
|
-
async with self.sharepoint_client:
|
|
67
|
-
for folder in c.SHAREPOINT_THIES_FOLDERS:
|
|
68
|
-
args = SpListFilesArgs(
|
|
69
|
-
folder_relative_url=f"{c.SHAREPOINT_BASE_URL}/{folder}"
|
|
70
|
-
)
|
|
71
|
-
response = await self.sharepoint_client.list_files(args)
|
|
72
|
-
cloud_files.update(
|
|
73
|
-
{f"{folder}_{item['Name']}" for item in response["value"]}
|
|
74
|
-
)
|
|
75
|
-
return cloud_files
|
|
76
|
-
except ConnectionError as error:
|
|
77
|
-
raise SharePointFetchingError(reason=error)
|
|
78
|
-
|
|
79
|
-
async def fetch_thies_file_names(self) -> set[str]:
|
|
80
|
-
"""Fetch file names from the THIES FTP server."""
|
|
81
|
-
try:
|
|
82
|
-
avg_files = await self.thies_ftp_client.list_files(
|
|
83
|
-
FtpListFilesArgs(path=c.FTP_SERVER_PATH_AVG_FILES)
|
|
84
|
-
)
|
|
85
|
-
ext_files = await self.thies_ftp_client.list_files(
|
|
86
|
-
FtpListFilesArgs(path=c.FTP_SERVER_PATH_EXT_FILES)
|
|
87
|
-
)
|
|
88
|
-
return {f"AVG_{name}" for name in avg_files} | {
|
|
89
|
-
f"EXT_{name}" for name in ext_files
|
|
90
|
-
}
|
|
91
|
-
except ConnectionRefusedError as error:
|
|
92
|
-
raise ThiesConnectionError(reason=error)
|
|
93
|
-
except ConnectionAbortedError as error:
|
|
94
|
-
raise ThiesFetchingError(reason=error)
|
|
95
|
-
|
|
96
|
-
async def fetch_thies_file_content(self) -> dict[str, bytes]:
|
|
97
|
-
"""Fetch the content of files from the THIES FTP server."""
|
|
98
|
-
try:
|
|
99
|
-
content_files = {}
|
|
100
|
-
for file in self.uploading:
|
|
101
|
-
origin, filename = file.split("_", 1)
|
|
102
|
-
file_path = (
|
|
103
|
-
f"{c.FTP_SERVER_PATH_AVG_FILES}/{filename}"
|
|
104
|
-
if origin == "AVG"
|
|
105
|
-
else f"{c.FTP_SERVER_PATH_EXT_FILES}/{filename}"
|
|
106
|
-
)
|
|
107
|
-
content = await self.thies_ftp_client.read_file(
|
|
108
|
-
FtpReadFileArgs(file_path)
|
|
109
|
-
)
|
|
110
|
-
content_files[file] = content # Save the file with its prefix
|
|
111
|
-
return content_files
|
|
112
|
-
except ConnectionRefusedError as error:
|
|
113
|
-
raise ThiesConnectionError(reason=error)
|
|
114
|
-
except ConnectionAbortedError as error:
|
|
115
|
-
raise ThiesFetchingError(reason=error)
|
|
116
|
-
|
|
117
|
-
async def upload_thies_files_to_sharepoint(
|
|
118
|
-
self, files: dict
|
|
119
|
-
) -> dict[str, list[str]]:
|
|
120
|
-
"""Upload files to SharePoint and categorize the results."""
|
|
121
|
-
upload_results = {"failed_files": [], "new_files": []}
|
|
122
|
-
|
|
123
|
-
async with self.sharepoint_client:
|
|
124
|
-
for file, file_content in files.items():
|
|
125
|
-
try:
|
|
126
|
-
folder, file_name = file.split("_", 1)
|
|
127
|
-
args = SpUploadFileArgs(
|
|
128
|
-
folder_relative_url=f"{c.SHAREPOINT_BASE_URL}/{folder}",
|
|
129
|
-
file_content=file_content,
|
|
130
|
-
file_name=file_name,
|
|
131
|
-
)
|
|
132
|
-
await self.sharepoint_client.upload_file(args)
|
|
133
|
-
upload_results["new_files"].append(file)
|
|
134
|
-
|
|
135
|
-
except ConnectionError as error:
|
|
136
|
-
upload_results["failed_files"].append(
|
|
137
|
-
f"{file} (Error: {str(error)})"
|
|
138
|
-
)
|
|
139
|
-
|
|
140
|
-
if upload_results["failed_files"]:
|
|
141
|
-
raise SharePointUploadError(
|
|
142
|
-
reason="Files failed to upload: "
|
|
143
|
-
+ ", ".join(upload_results["failed_files"])
|
|
144
|
-
)
|
|
145
|
-
|
|
146
|
-
return upload_results
|
|
147
|
-
|
|
148
|
-
async def execute(self) -> dict:
|
|
149
|
-
"""Synchronize data from the THIES Center to the cloud."""
|
|
150
|
-
try:
|
|
151
|
-
thies_files = await self.fetch_thies_file_names()
|
|
152
|
-
except RuntimeError as error:
|
|
153
|
-
raise FtpClientError(error)
|
|
154
|
-
try:
|
|
155
|
-
cloud_files = await self.fetch_cloud_file_names()
|
|
156
|
-
except RuntimeError as error:
|
|
157
|
-
raise SharepointClient(error)
|
|
158
|
-
|
|
159
|
-
self.uploading = thies_files - cloud_files
|
|
160
|
-
if not self.uploading:
|
|
161
|
-
raise EmptyDataError(reason="No files to upload.")
|
|
162
|
-
|
|
163
|
-
# Fetch the content of the files to be uploaded from THIES FTP Server
|
|
164
|
-
thies_fetched_files = await self.fetch_thies_file_content()
|
|
165
|
-
|
|
166
|
-
# Upload the fetched files to SharePoint and gather statistics
|
|
167
|
-
upload_statistics = await self.upload_thies_files_to_sharepoint(
|
|
168
|
-
thies_fetched_files
|
|
169
|
-
)
|
|
170
|
-
|
|
171
|
-
return parse_execute_response(thies_fetched_files, upload_statistics)
|
|
@@ -1,241 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import os
|
|
3
|
-
from time import time
|
|
4
|
-
import saviialib.services.epii.use_cases.constants.upload_backup_to_sharepoint_constants as c
|
|
5
|
-
from saviialib.general_types.error_types.api.epii_api_error_types import (
|
|
6
|
-
BackupEmptyError,
|
|
7
|
-
BackupSourcePathError,
|
|
8
|
-
BackupUploadError,
|
|
9
|
-
)
|
|
10
|
-
from saviialib.general_types.error_types.common import (
|
|
11
|
-
SharepointClientError,
|
|
12
|
-
)
|
|
13
|
-
from saviialib.libs.files_client import FilesClient, FilesClientInitArgs, ReadArgs
|
|
14
|
-
from saviialib.libs.sharepoint_client import (
|
|
15
|
-
SharepointClient,
|
|
16
|
-
SharepointClientInitArgs,
|
|
17
|
-
SpUploadFileArgs,
|
|
18
|
-
)
|
|
19
|
-
from saviialib.services.epii.utils.upload_backup_to_sharepoint_utils import (
|
|
20
|
-
calculate_percentage_uploaded,
|
|
21
|
-
count_files_in_directory,
|
|
22
|
-
directory_exists,
|
|
23
|
-
extract_error_message,
|
|
24
|
-
parse_execute_response,
|
|
25
|
-
show_upload_result,
|
|
26
|
-
)
|
|
27
|
-
|
|
28
|
-
from .types.upload_backup_to_sharepoint_types import (
|
|
29
|
-
UploadBackupToSharepointUseCaseInput,
|
|
30
|
-
)
|
|
31
|
-
from .constants.upload_backup_to_sharepoint_constants import LOGGER
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
class UploadBackupToSharepointUsecase:
|
|
35
|
-
def __init__(self, input: UploadBackupToSharepointUseCaseInput):
|
|
36
|
-
self.sharepoint_config = input.sharepoint_config
|
|
37
|
-
self.local_backup_source_path = input.local_backup_source_path
|
|
38
|
-
self.destination_folders = input.destination_folders
|
|
39
|
-
self.files_client = self._initialize_files_client()
|
|
40
|
-
self.log_history = []
|
|
41
|
-
self.grouped_files_by_folder = None
|
|
42
|
-
self.total_files = None
|
|
43
|
-
|
|
44
|
-
def _initialize_files_client(self):
|
|
45
|
-
return FilesClient(FilesClientInitArgs(client_name="aiofiles_client"))
|
|
46
|
-
|
|
47
|
-
async def _extract_filesnames_by_folder(self) -> dict[str, list[str]]:
|
|
48
|
-
"""Groups files by their parent folder."""
|
|
49
|
-
backup_folder_exists = await asyncio.to_thread(
|
|
50
|
-
os.path.exists, self.local_backup_source_path
|
|
51
|
-
)
|
|
52
|
-
if not backup_folder_exists:
|
|
53
|
-
return {}
|
|
54
|
-
folder_names = await asyncio.to_thread(
|
|
55
|
-
os.listdir, self.local_backup_source_path
|
|
56
|
-
)
|
|
57
|
-
return {
|
|
58
|
-
folder_name: [
|
|
59
|
-
file_name
|
|
60
|
-
for file_name in await asyncio.to_thread(
|
|
61
|
-
os.listdir, os.path.join(self.local_backup_source_path, folder_name)
|
|
62
|
-
)
|
|
63
|
-
]
|
|
64
|
-
for folder_name in folder_names
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
def _save_log_history(self) -> None:
|
|
68
|
-
text_file = "\n".join(self.log_history)
|
|
69
|
-
log_history_filepath = "BACKUP_LOG_HISTORY.log"
|
|
70
|
-
with open(log_history_filepath, "w") as file:
|
|
71
|
-
file.write(text_file)
|
|
72
|
-
|
|
73
|
-
async def export_file_to_sharepoint(
|
|
74
|
-
self, folder_name: str, file_name: str, file_content: bytes
|
|
75
|
-
) -> tuple[bool, str]:
|
|
76
|
-
"""Uploads a file to the specified folder in SharePoint."""
|
|
77
|
-
uploaded = None
|
|
78
|
-
error_message = ""
|
|
79
|
-
|
|
80
|
-
try:
|
|
81
|
-
sharepoint_client = SharepointClient(
|
|
82
|
-
SharepointClientInitArgs(
|
|
83
|
-
self.sharepoint_config, client_name="sharepoint_rest_api"
|
|
84
|
-
)
|
|
85
|
-
)
|
|
86
|
-
except ConnectionError as error:
|
|
87
|
-
raise SharepointClientError(error)
|
|
88
|
-
|
|
89
|
-
async with sharepoint_client:
|
|
90
|
-
try:
|
|
91
|
-
destination_folder = self.destination_folders.get(
|
|
92
|
-
folder_name, folder_name
|
|
93
|
-
)
|
|
94
|
-
folder_url = f"{c.SHAREPOINT_BASE_URL}/{destination_folder}"
|
|
95
|
-
args = SpUploadFileArgs(
|
|
96
|
-
folder_relative_url=folder_url,
|
|
97
|
-
file_content=file_content,
|
|
98
|
-
file_name=file_name,
|
|
99
|
-
)
|
|
100
|
-
await sharepoint_client.upload_file(args)
|
|
101
|
-
uploaded = True
|
|
102
|
-
except ConnectionError as error:
|
|
103
|
-
error_message = str(error)
|
|
104
|
-
uploaded = False
|
|
105
|
-
|
|
106
|
-
return uploaded, error_message
|
|
107
|
-
|
|
108
|
-
async def upload_and_log_progress_task(self, folder_name, file_name) -> dict:
|
|
109
|
-
"""Task for uploads a file and logs progress."""
|
|
110
|
-
uploading_message = (
|
|
111
|
-
f"[BACKUP] Uploading file '{file_name}' from '{folder_name}' "
|
|
112
|
-
)
|
|
113
|
-
self.log_history.append(uploading_message)
|
|
114
|
-
LOGGER.debug(uploading_message)
|
|
115
|
-
file_path = os.path.join(self.local_backup_source_path, folder_name, file_name)
|
|
116
|
-
file_content = await self.files_client.read(ReadArgs(file_path, mode="rb"))
|
|
117
|
-
uploaded, error_message = await self.export_file_to_sharepoint(
|
|
118
|
-
folder_name, file_name, file_content
|
|
119
|
-
)
|
|
120
|
-
result_message = show_upload_result(uploaded, file_name)
|
|
121
|
-
LOGGER.debug(result_message)
|
|
122
|
-
self.log_history.append(result_message)
|
|
123
|
-
return {
|
|
124
|
-
"parent_folder": folder_name,
|
|
125
|
-
"file_name": file_name,
|
|
126
|
-
"uploaded": uploaded,
|
|
127
|
-
"error_message": error_message,
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
async def retry_upload_failed_files(self, results) -> None:
|
|
131
|
-
failed_files = [item for item in results if not item["uploaded"]]
|
|
132
|
-
tasks = []
|
|
133
|
-
retry_message = (
|
|
134
|
-
f"[BACKUP] Retrying upload for {len(failed_files)} failed files... 🚨"
|
|
135
|
-
)
|
|
136
|
-
self.log_history.append(retry_message)
|
|
137
|
-
LOGGER.debug(retry_message)
|
|
138
|
-
for file in failed_files:
|
|
139
|
-
tasks.append(
|
|
140
|
-
self.upload_and_log_progress_task(
|
|
141
|
-
file["parent_folder"], file["file_name"]
|
|
142
|
-
)
|
|
143
|
-
)
|
|
144
|
-
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
145
|
-
success = calculate_percentage_uploaded(results, self.total_files)
|
|
146
|
-
if success < 100.0:
|
|
147
|
-
raise BackupUploadError(reason=extract_error_message(results, success))
|
|
148
|
-
else:
|
|
149
|
-
successful_upload_retry = (
|
|
150
|
-
"[BACKUP] All files uploaded successfully after retry."
|
|
151
|
-
)
|
|
152
|
-
self.log_history.append(successful_upload_retry)
|
|
153
|
-
LOGGER.debug(successful_upload_retry)
|
|
154
|
-
self._save_log_history()
|
|
155
|
-
return parse_execute_response(results)
|
|
156
|
-
|
|
157
|
-
async def execute(self):
|
|
158
|
-
"""Exports all files from the local backup folder to SharePoint cloud."""
|
|
159
|
-
self.grouped_files_by_folder = await self._extract_filesnames_by_folder()
|
|
160
|
-
self.total_files = sum(
|
|
161
|
-
len(files) for files in self.grouped_files_by_folder.values()
|
|
162
|
-
)
|
|
163
|
-
tasks = []
|
|
164
|
-
start_time = time()
|
|
165
|
-
|
|
166
|
-
# Check if the local path exists in the main directory
|
|
167
|
-
if not await directory_exists(self.local_backup_source_path):
|
|
168
|
-
raise BackupSourcePathError(
|
|
169
|
-
reason=f"'{self.local_backup_source_path}' doesn't exist."
|
|
170
|
-
)
|
|
171
|
-
|
|
172
|
-
# Check if the current folder only have files.
|
|
173
|
-
items = [
|
|
174
|
-
item
|
|
175
|
-
for item in await asyncio.to_thread(
|
|
176
|
-
os.listdir, self.local_backup_source_path
|
|
177
|
-
)
|
|
178
|
-
]
|
|
179
|
-
for item in items:
|
|
180
|
-
folder_included = item in self.destination_folders.keys()
|
|
181
|
-
is_file = not await asyncio.to_thread(
|
|
182
|
-
os.path.isdir, os.path.join(self.local_backup_source_path, item)
|
|
183
|
-
)
|
|
184
|
-
|
|
185
|
-
if not folder_included and not is_file:
|
|
186
|
-
raise BackupSourcePathError(
|
|
187
|
-
reason=(
|
|
188
|
-
f"'{item}' must be included in the destination folders dictionary",
|
|
189
|
-
)
|
|
190
|
-
)
|
|
191
|
-
elif folder_included and is_file:
|
|
192
|
-
print(folder_included, is_file)
|
|
193
|
-
raise BackupSourcePathError(reason=(f"'{item}' must be a directory.",))
|
|
194
|
-
|
|
195
|
-
if self.total_files == 0:
|
|
196
|
-
no_files_message = (
|
|
197
|
-
f"[BACKUP] {self.local_backup_source_path} has no files ⚠️"
|
|
198
|
-
)
|
|
199
|
-
self.log_history.append(no_files_message)
|
|
200
|
-
LOGGER.debug(no_files_message)
|
|
201
|
-
raise BackupEmptyError
|
|
202
|
-
# Create task for each file stored in the the local backup folder.
|
|
203
|
-
for folder_name in self.grouped_files_by_folder:
|
|
204
|
-
if (
|
|
205
|
-
await count_files_in_directory(
|
|
206
|
-
self.local_backup_source_path, folder_name
|
|
207
|
-
)
|
|
208
|
-
== 0
|
|
209
|
-
):
|
|
210
|
-
empty_folder_message = f"[BACKUP] The folder '{folder_name}' is empty ⚠️"
|
|
211
|
-
LOGGER.debug(empty_folder_message)
|
|
212
|
-
self.log_history.append(empty_folder_message)
|
|
213
|
-
continue
|
|
214
|
-
extracting_files_message = (
|
|
215
|
-
"[BACKUP]" + f" Extracting files from '{folder_name} ".center(15, "*")
|
|
216
|
-
)
|
|
217
|
-
self.log_history.append(extracting_files_message)
|
|
218
|
-
LOGGER.debug(extracting_files_message)
|
|
219
|
-
for file_name in self.grouped_files_by_folder[folder_name]:
|
|
220
|
-
tasks.append(self.upload_and_log_progress_task(folder_name, file_name))
|
|
221
|
-
|
|
222
|
-
# Execution of multiple asynchronous tasks for files migration.
|
|
223
|
-
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
224
|
-
success = calculate_percentage_uploaded(results, self.total_files)
|
|
225
|
-
if success < 100.0:
|
|
226
|
-
await self.retry_upload_failed_files(results)
|
|
227
|
-
else:
|
|
228
|
-
end_time = time()
|
|
229
|
-
backup_time = end_time - start_time
|
|
230
|
-
successful_backup_message = (
|
|
231
|
-
f"[BACKUP] Migration time: {backup_time:.2f} seconds ✨"
|
|
232
|
-
)
|
|
233
|
-
self.log_history.append(successful_backup_message)
|
|
234
|
-
|
|
235
|
-
finished_backup_message = (
|
|
236
|
-
"[BACKUP] All the files were uploaded successfully 🎉"
|
|
237
|
-
)
|
|
238
|
-
self.log_history.append(finished_backup_message)
|
|
239
|
-
|
|
240
|
-
self._save_log_history()
|
|
241
|
-
return parse_execute_response(results)
|
|
@@ -1,102 +0,0 @@
|
|
|
1
|
-
import re
|
|
2
|
-
from typing import List, Dict, Optional
|
|
3
|
-
import asyncio
|
|
4
|
-
import os
|
|
5
|
-
from saviialib.general_types.error_types.api.epii_api_error_types import (
|
|
6
|
-
BackupSourcePathError,
|
|
7
|
-
)
|
|
8
|
-
from saviialib.services.epii.use_cases.constants.upload_backup_to_sharepoint_constants import (
|
|
9
|
-
LOGGER,
|
|
10
|
-
)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def extract_error_information(error: str) -> Optional[Dict[str, str]]:
|
|
14
|
-
match = re.search(r"(\d+), message='([^']*)', url=\"([^\"]*)\"", error)
|
|
15
|
-
if match:
|
|
16
|
-
return {
|
|
17
|
-
"status_code": match.group(1),
|
|
18
|
-
"message": match.group(2),
|
|
19
|
-
"url": match.group(3),
|
|
20
|
-
}
|
|
21
|
-
return None
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
def explain_status_code(status_code: int) -> str:
|
|
25
|
-
explanations = {
|
|
26
|
-
404: "Probably an error with file or folder source path.",
|
|
27
|
-
403: "Permission denied when accessing the source path.",
|
|
28
|
-
500: "Internal server error occurred during upload.",
|
|
29
|
-
}
|
|
30
|
-
return explanations.get(status_code, "Unknown error occurred.")
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def extract_error_message(results: List[Dict], success: float) -> str:
|
|
34
|
-
LOGGER.info(
|
|
35
|
-
"[BACKUP] Not all files uploaded ⚠️\n"
|
|
36
|
-
f"[BACKUP] Files failed to upload: {(1 - success):.2%}"
|
|
37
|
-
)
|
|
38
|
-
|
|
39
|
-
failed_files = [item for item in results if not item.get("uploaded")]
|
|
40
|
-
|
|
41
|
-
error_data = []
|
|
42
|
-
for item in failed_files:
|
|
43
|
-
error_info = extract_error_information(item.get("error_message", ""))
|
|
44
|
-
if error_info:
|
|
45
|
-
error_data.append(
|
|
46
|
-
{
|
|
47
|
-
"file_name": item["file_name"],
|
|
48
|
-
"status_code": error_info["status_code"],
|
|
49
|
-
"message": error_info["message"],
|
|
50
|
-
"url": error_info["url"],
|
|
51
|
-
}
|
|
52
|
-
)
|
|
53
|
-
|
|
54
|
-
# Group errors by code.
|
|
55
|
-
grouped_errors: Dict[str, List[Dict]] = {}
|
|
56
|
-
for error in error_data:
|
|
57
|
-
code = error["status_code"]
|
|
58
|
-
grouped_errors.setdefault(code, []).append(error)
|
|
59
|
-
|
|
60
|
-
# Summary
|
|
61
|
-
for code, items in grouped_errors.items():
|
|
62
|
-
LOGGER.info(f"[BACKUP] Status code {code} - {explain_status_code(int(code))}")
|
|
63
|
-
for item in items:
|
|
64
|
-
LOGGER.info(
|
|
65
|
-
f"[BACKUP] File {item['file_name']}, url: {item['url']}, message: {item['message']}"
|
|
66
|
-
)
|
|
67
|
-
|
|
68
|
-
failed_file_names = [item["file_name"] for item in failed_files]
|
|
69
|
-
return f"Failed files: {', '.join(failed_file_names)}."
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def parse_execute_response(results: List[Dict]) -> Dict[str, List[str]]:
|
|
73
|
-
try:
|
|
74
|
-
return {
|
|
75
|
-
"new_files": len(
|
|
76
|
-
[item["file_name"] for item in results if item.get("uploaded")]
|
|
77
|
-
),
|
|
78
|
-
}
|
|
79
|
-
except (IsADirectoryError, AttributeError, ConnectionError) as error:
|
|
80
|
-
raise BackupSourcePathError(reason=error)
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
def show_upload_result(uploaded: bool, file_name: str) -> str:
|
|
84
|
-
status = "✅" if uploaded else "❌"
|
|
85
|
-
message = "was uploaded successfully" if uploaded else "failed to upload"
|
|
86
|
-
result = f"[BACKUP] File {file_name} {message} {status}"
|
|
87
|
-
return result
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
def calculate_percentage_uploaded(results: List[Dict], total_files: int) -> float:
|
|
91
|
-
uploaded_count = sum(
|
|
92
|
-
1 for result in results if isinstance(result, dict) and result.get("uploaded")
|
|
93
|
-
)
|
|
94
|
-
return (uploaded_count / total_files) * 100 if total_files > 0 else 0
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
async def directory_exists(path: str) -> bool:
|
|
98
|
-
return await asyncio.to_thread(os.path.exists, path)
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
async def count_files_in_directory(path: str, folder_name: str) -> int:
|
|
102
|
-
return len(await asyncio.to_thread(os.listdir, os.path.join(path, folder_name)))
|