edu-rdm-integration 3.2.8__py3-none-any.whl → 3.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- edu_rdm_integration/app_settings.py +5 -0
- edu_rdm_integration/apps.py +1 -1
- edu_rdm_integration/collect_data/collect.py +4 -54
- edu_rdm_integration/consts.py +0 -1
- edu_rdm_integration/export_data/base/functions.py +15 -108
- edu_rdm_integration/export_data/consts.py +5 -0
- edu_rdm_integration/export_data/dataclasses.py +11 -0
- edu_rdm_integration/export_data/export_manger.py +246 -0
- edu_rdm_integration/export_data/queue.py +172 -0
- edu_rdm_integration/helpers.py +19 -2
- edu_rdm_integration/management/general.py +0 -12
- edu_rdm_integration/migrations/0009_auto_20240522_1619.py +25 -0
- edu_rdm_integration/migrations/{0009_transferredentity_export_enabled.py → 0010_transferredentity_export_enabled.py} +2 -2
- edu_rdm_integration/migrations/0011_exportingdatasubstageentity.py +30 -0
- edu_rdm_integration/migrations/0012_exportingdatasubstageattachment_attachment_size.py +21 -0
- edu_rdm_integration/migrations/0013_set_attachment_size.py +48 -0
- edu_rdm_integration/models.py +35 -4
- edu_rdm_integration/redis_cache.py +52 -0
- edu_rdm_integration/tasks.py +38 -3
- edu_rdm_integration/utils.py +1 -0
- {edu_rdm_integration-3.2.8.dist-info → edu_rdm_integration-3.3.3.dist-info}/METADATA +49 -1
- {edu_rdm_integration-3.2.8.dist-info → edu_rdm_integration-3.3.3.dist-info}/RECORD +26 -18
- {edu_rdm_integration-3.2.8.dist-info → edu_rdm_integration-3.3.3.dist-info}/LICENSE +0 -0
- {edu_rdm_integration-3.2.8.dist-info → edu_rdm_integration-3.3.3.dist-info}/WHEEL +0 -0
- {edu_rdm_integration-3.2.8.dist-info → edu_rdm_integration-3.3.3.dist-info}/namespace_packages.txt +0 -0
- {edu_rdm_integration-3.2.8.dist-info → edu_rdm_integration-3.3.3.dist-info}/top_level.txt +0 -0
@@ -43,3 +43,8 @@ RDM_UPLOAD_STATUS_TASK_MINUTE = '*/30'
|
|
43
43
|
RDM_UPLOAD_STATUS_TASK_HOUR = '*'
|
44
44
|
RDM_UPLOAD_STATUS_TASK_DAY_OF_WEEK = '*'
|
45
45
|
RDM_UPLOAD_STATUS_TASK_LOCK_EXPIRE_SECONDS = 60 * 60 * 2
|
46
|
+
|
47
|
+
# Настройка очереди Redis для формирования файлов РВД
|
48
|
+
RDM_UPLOAD_QUEUE_MAX_SIZE = 500_000_000
|
49
|
+
# Таймаут для сохранения параметров в общем кеш
|
50
|
+
RDM_REDIS_CACHE_TIMEOUT_SECONDS = 60 * 60 * 2
|
edu_rdm_integration/apps.py
CHANGED
@@ -30,11 +30,11 @@ class EduRDMIntegrationConfig(AppConfig):
|
|
30
30
|
"""
|
31
31
|
Инициализация клиента для взаимодействия с Региональной витриной данных.
|
32
32
|
"""
|
33
|
+
|
33
34
|
from django.core.cache import (
|
34
35
|
DEFAULT_CACHE_ALIAS,
|
35
36
|
caches,
|
36
37
|
)
|
37
|
-
|
38
38
|
import uploader_client
|
39
39
|
from uploader_client.contrib.rdm.interfaces.configurations import (
|
40
40
|
RegionalDataMartUploaderConfig,
|
@@ -3,7 +3,6 @@ from datetime import (
|
|
3
3
|
date,
|
4
4
|
datetime,
|
5
5
|
time,
|
6
|
-
timedelta,
|
7
6
|
)
|
8
7
|
from typing import (
|
9
8
|
TYPE_CHECKING,
|
@@ -42,7 +41,6 @@ from edu_rdm_integration.base import (
|
|
42
41
|
BaseOperationData,
|
43
42
|
)
|
44
43
|
from edu_rdm_integration.consts import (
|
45
|
-
LOGS_SUB_PERIOD_DAYS,
|
46
44
|
REGIONAL_DATA_MART_INTEGRATION_COLLECTING_DATA,
|
47
45
|
)
|
48
46
|
from edu_rdm_integration.helpers import (
|
@@ -195,13 +193,11 @@ class BaseCollectModelsDataByGeneratingLogs(BaseCollectModelsData):
|
|
195
193
|
models: Iterable[str],
|
196
194
|
logs_period_started_at=datetime.combine(date.today(), time.min),
|
197
195
|
logs_period_ended_at=datetime.combine(date.today(), time.min),
|
198
|
-
logs_sub_period_days=LOGS_SUB_PERIOD_DAYS,
|
199
196
|
institute_ids=(),
|
200
197
|
**kwargs,
|
201
198
|
):
|
202
199
|
super().__init__(models, logs_period_started_at, logs_period_ended_at, **kwargs)
|
203
200
|
|
204
|
-
self.logs_sub_period_days = logs_sub_period_days
|
205
201
|
# Учебные заведения, для которых производится выгрузка
|
206
202
|
self.institute_ids = institute_ids
|
207
203
|
# Генератор логов
|
@@ -211,42 +207,11 @@ class BaseCollectModelsDataByGeneratingLogs(BaseCollectModelsData):
|
|
211
207
|
"""Возвращает генератор логов."""
|
212
208
|
raise NotImplementedError
|
213
209
|
|
214
|
-
def
|
215
|
-
"""
|
216
|
-
Генерация логов с учетом подпериодов.
|
217
|
-
"""
|
218
|
-
temp_logs_period_started_at = self.logs_period_started_at
|
219
|
-
temp_logs_period_ended_at = self.logs_period_started_at + timedelta(days=self.logs_sub_period_days)
|
220
|
-
|
221
|
-
if temp_logs_period_ended_at > self.logs_period_ended_at:
|
222
|
-
temp_logs_period_ended_at = self.logs_period_ended_at
|
223
|
-
|
224
|
-
temp_logs: Dict[str, List['AuditLog']] = {}
|
225
|
-
|
226
|
-
while temp_logs_period_started_at < temp_logs_period_ended_at <= self.logs_period_ended_at:
|
227
|
-
for model in self.models:
|
228
|
-
logs = self.log_generator.generate(
|
229
|
-
model=model,
|
230
|
-
logs_period_started_at=temp_logs_period_started_at,
|
231
|
-
logs_period_ended_at=temp_logs_period_ended_at,
|
232
|
-
institute_ids=self.institute_ids,
|
233
|
-
)
|
234
|
-
|
235
|
-
temp_logs[model.key] = logs
|
236
|
-
|
237
|
-
yield temp_logs, temp_logs_period_started_at, temp_logs_period_ended_at
|
238
|
-
|
239
|
-
temp_logs_period_started_at = temp_logs_period_ended_at
|
240
|
-
temp_logs_period_ended_at += timedelta(days=self.logs_sub_period_days)
|
241
|
-
|
242
|
-
if temp_logs_period_ended_at > self.logs_period_ended_at:
|
243
|
-
temp_logs_period_ended_at = self.logs_period_ended_at
|
244
|
-
|
245
|
-
temp_logs.clear()
|
210
|
+
def _generate_logs(self) -> List[Tuple[Dict[str, List['AuditLog']], datetime, datetime]]:
|
211
|
+
"""Генерация логов.
|
246
212
|
|
247
|
-
|
248
|
-
|
249
|
-
Генерация логов за весь период.
|
213
|
+
Осуществляет генерацию логов по уже существующим записям в базе данных. В качестве параметров указываются
|
214
|
+
начало и конец периода сбора логов. Генерация логов производится только для указанных моделей.
|
250
215
|
"""
|
251
216
|
temp_logs: Dict[str, List['AuditLog']] = {}
|
252
217
|
|
@@ -262,21 +227,6 @@ class BaseCollectModelsDataByGeneratingLogs(BaseCollectModelsData):
|
|
262
227
|
|
263
228
|
return [(temp_logs, self.logs_period_started_at, self.logs_period_ended_at)]
|
264
229
|
|
265
|
-
def _generate_logs(self) -> List[Tuple[Dict[str, List['AuditLog']], datetime, datetime]]:
|
266
|
-
"""
|
267
|
-
Генерация логов.
|
268
|
-
|
269
|
-
Осуществляет генерацию логов по уже существующим записям в базе данных. В качестве параметров указываются
|
270
|
-
начало и конец периода сбора логов, размер подпериодов, на которые должен быть разбит основной период.
|
271
|
-
Генерация логов производится только для указанных моделей.
|
272
|
-
"""
|
273
|
-
if self.logs_sub_period_days:
|
274
|
-
logs = self._generate_logs_by_subperiod()
|
275
|
-
else:
|
276
|
-
logs = self._generate_logs_for_all_period()
|
277
|
-
|
278
|
-
return logs
|
279
|
-
|
280
230
|
def _collect(self):
|
281
231
|
"""
|
282
232
|
Собирает данные моделей РВД.
|
edu_rdm_integration/consts.py
CHANGED
@@ -1,22 +1,12 @@
|
|
1
|
-
import json
|
2
1
|
from abc import (
|
3
2
|
ABCMeta,
|
4
3
|
)
|
5
|
-
from pathlib import (
|
6
|
-
Path,
|
7
|
-
)
|
8
4
|
from typing import (
|
9
|
-
Dict,
|
10
5
|
List,
|
11
|
-
Optional,
|
12
|
-
Tuple,
|
13
6
|
Type,
|
14
7
|
Union,
|
15
8
|
)
|
16
9
|
|
17
|
-
from django.conf import (
|
18
|
-
settings,
|
19
|
-
)
|
20
10
|
from django.core.files.base import (
|
21
11
|
ContentFile,
|
22
12
|
)
|
@@ -26,9 +16,6 @@ from django.core.files.storage import (
|
|
26
16
|
from transliterate import (
|
27
17
|
slugify,
|
28
18
|
)
|
29
|
-
from uploader_client.adapters import (
|
30
|
-
adapter,
|
31
|
-
)
|
32
19
|
|
33
20
|
from educommon import (
|
34
21
|
logger,
|
@@ -46,22 +33,9 @@ from edu_rdm_integration.adapters.functions import (
|
|
46
33
|
from edu_rdm_integration.consts import (
|
47
34
|
LOGS_DELIMITER,
|
48
35
|
)
|
49
|
-
from edu_rdm_integration.enums import (
|
50
|
-
FileUploadStatusEnum,
|
51
|
-
)
|
52
|
-
from edu_rdm_integration.export_data.base.consts import (
|
53
|
-
OPERATIONS_METHODS_MAP,
|
54
|
-
OPERATIONS_URLS_MAP,
|
55
|
-
)
|
56
|
-
from edu_rdm_integration.export_data.base.errors import (
|
57
|
-
BaseExportDataError,
|
58
|
-
)
|
59
36
|
from edu_rdm_integration.export_data.base.helpers import (
|
60
37
|
BaseExportDataFunctionHelper,
|
61
38
|
)
|
62
|
-
from edu_rdm_integration.export_data.base.requests import (
|
63
|
-
RegionalDataMartEntityRequest,
|
64
|
-
)
|
65
39
|
from edu_rdm_integration.export_data.base.results import (
|
66
40
|
BaseExportDataFunctionResult,
|
67
41
|
)
|
@@ -75,8 +49,8 @@ from edu_rdm_integration.models import (
|
|
75
49
|
ExportingDataStage,
|
76
50
|
ExportingDataSubStage,
|
77
51
|
ExportingDataSubStageAttachment,
|
52
|
+
ExportingDataSubStageEntity,
|
78
53
|
ExportingDataSubStageStatus,
|
79
|
-
ExportingDataSubStageUploaderClientLog,
|
80
54
|
)
|
81
55
|
from edu_rdm_integration.utils import (
|
82
56
|
get_exporting_data_stage_attachment_path,
|
@@ -99,6 +73,10 @@ class BaseExportDataFunction(
|
|
99
73
|
stage=stage,
|
100
74
|
function_id=self.uuid,
|
101
75
|
)
|
76
|
+
ExportingDataSubStageEntity.objects.create(
|
77
|
+
exporting_data_sub_stage=self._sub_stage,
|
78
|
+
entity_id=self.first_entity.key
|
79
|
+
)
|
102
80
|
# Проставление подэтапа выгрузки
|
103
81
|
self.entities[0].main_model_enum.model.objects.filter(pk__in=model_ids).update(
|
104
82
|
exporting_sub_stage=self._sub_stage,
|
@@ -116,11 +94,7 @@ class BaseExportDataFunction(
|
|
116
94
|
EntityLogOperation.DELETE: [],
|
117
95
|
}
|
118
96
|
|
119
|
-
self.
|
120
|
-
EntityLogOperation.CREATE: (None, None),
|
121
|
-
EntityLogOperation.UPDATE: (None, None),
|
122
|
-
EntityLogOperation.DELETE: (None, None),
|
123
|
-
}
|
97
|
+
self.has_data = False
|
124
98
|
|
125
99
|
def _prepare_helper_class(self) -> Type[BaseExportDataFunctionHelper]:
|
126
100
|
"""
|
@@ -201,8 +175,10 @@ class BaseExportDataFunction(
|
|
201
175
|
Формирование файлов для дальнейшей выгрузки.
|
202
176
|
"""
|
203
177
|
logger.info(f'{LOGS_DELIMITER * 3}{self.__class__.__name__} prepare files..')
|
178
|
+
has_record = False
|
204
179
|
|
205
180
|
for operation in EntityLogOperation.values.keys():
|
181
|
+
|
206
182
|
records = self._data[operation]
|
207
183
|
if records:
|
208
184
|
title_record = (
|
@@ -230,83 +206,12 @@ class BaseExportDataFunction(
|
|
230
206
|
)
|
231
207
|
|
232
208
|
sub_stage_attachment.attachment = default_storage.save(file_path, ContentFile(joined_records))
|
209
|
+
sub_stage_attachment.attachment_size = sub_stage_attachment.attachment.size
|
233
210
|
|
234
211
|
self.do_on_save(sub_stage_attachment)
|
212
|
+
has_record |= True
|
235
213
|
|
236
|
-
|
237
|
-
|
238
|
-
def _send_files(self):
|
239
|
-
"""
|
240
|
-
Отправка файлов в "Региональная витрина данных".
|
241
|
-
"""
|
242
|
-
# TODO При старте проекта и инициализации приложения function_tools происходит поиск стратегий. При поиске
|
243
|
-
# возникает ошибка "Не произведена настройка клиента". Настройка производится в конфиге
|
244
|
-
# приложения regional_data_mart_integration. Нужно доработать механизм так, чтобы сборка стратегий запускалась
|
245
|
-
# после готовности всех приложений.
|
246
|
-
|
247
|
-
logger.info(f'{LOGS_DELIMITER * 3}{self.__class__.__name__} send files..')
|
248
|
-
|
249
|
-
requests_result = []
|
250
|
-
|
251
|
-
for operation, (relative_file_path, sub_stage_attachment) in self._file_contents.items():
|
252
|
-
if relative_file_path:
|
253
|
-
if settings.RDM_UPLOADER_CLIENT_ENABLE_REQUEST_EMULATION:
|
254
|
-
logger.warning(
|
255
|
-
f'{LOGS_DELIMITER * 3}ATTENTION!!! REGIONAL DATA MART INTEGRATION REQUEST EMULATION ENABLED!'
|
256
|
-
)
|
257
|
-
|
258
|
-
method = OPERATIONS_METHODS_MAP.get(operation)
|
259
|
-
file_path = Path.joinpath(Path(settings.MEDIA_ROOT), relative_file_path)
|
260
|
-
|
261
|
-
request = RegionalDataMartEntityRequest(
|
262
|
-
datamart_name=settings.RDM_UPLOADER_CLIENT_DATAMART_NAME,
|
263
|
-
table_name=self.first_entity.key.lower(),
|
264
|
-
method=method,
|
265
|
-
operation=OPERATIONS_URLS_MAP.get(operation),
|
266
|
-
parameters={},
|
267
|
-
headers={
|
268
|
-
'Content-Type': 'text/csv',
|
269
|
-
},
|
270
|
-
files=[],
|
271
|
-
data=file_path.open('rb').read(),
|
272
|
-
)
|
273
|
-
|
274
|
-
result = adapter.send(request)
|
275
|
-
|
276
|
-
request_id = ''
|
277
|
-
if not result.error:
|
278
|
-
request_id = result.response.text
|
279
|
-
|
280
|
-
file_upload_status = FileUploadStatusEnum.IN_PROGRESS if request_id else FileUploadStatusEnum.ERROR
|
281
|
-
|
282
|
-
sub_stage_uploader_client_log = ExportingDataSubStageUploaderClientLog(
|
283
|
-
entry=result.log,
|
284
|
-
sub_stage=self._sub_stage,
|
285
|
-
attachment=sub_stage_attachment,
|
286
|
-
request_id=request_id,
|
287
|
-
file_upload_status=file_upload_status,
|
288
|
-
is_emulation=settings.RDM_UPLOADER_CLIENT_ENABLE_REQUEST_EMULATION,
|
289
|
-
)
|
290
|
-
|
291
|
-
self.do_on_save(sub_stage_uploader_client_log)
|
292
|
-
|
293
|
-
if result.error:
|
294
|
-
logger.warning(
|
295
|
-
f'{result.error}\nrequest - "{result.log.request}"\nresponse - "{result.log.response}"'
|
296
|
-
)
|
297
|
-
|
298
|
-
error = BaseExportDataError(
|
299
|
-
message=result.error,
|
300
|
-
)
|
301
|
-
self.result.append_entity(entity=error)
|
302
|
-
|
303
|
-
break
|
304
|
-
else:
|
305
|
-
logger.info(
|
306
|
-
f'Response with {result.response.status_code} code and content {result.response.text}'
|
307
|
-
)
|
308
|
-
|
309
|
-
requests_result.append(result)
|
214
|
+
self.has_data = has_record
|
310
215
|
|
311
216
|
def _prepare(self, *args, **kwargs):
|
312
217
|
"""
|
@@ -316,7 +221,6 @@ class BaseExportDataFunction(
|
|
316
221
|
if self.helper.cache.entity_instances:
|
317
222
|
self._prepare_data()
|
318
223
|
self._prepare_files()
|
319
|
-
self._send_files()
|
320
224
|
else:
|
321
225
|
logger.info(f'{LOGS_DELIMITER * 3} no data for preparing.')
|
322
226
|
|
@@ -342,7 +246,10 @@ class BaseExportDataFunction(
|
|
342
246
|
super().run(*args, **kwargs)
|
343
247
|
|
344
248
|
if self.result.has_not_errors:
|
345
|
-
self._sub_stage.status_id =
|
249
|
+
self._sub_stage.status_id = (
|
250
|
+
ExportingDataSubStageStatus.FINISHED.key if not self.has_data
|
251
|
+
else ExportingDataSubStageStatus.READY_FOR_EXPORT.key
|
252
|
+
)
|
346
253
|
else:
|
347
254
|
self._sub_stage.status_id = ExportingDataSubStageStatus.FAILED.key
|
348
255
|
|
@@ -0,0 +1,246 @@
|
|
1
|
+
from datetime import (
|
2
|
+
datetime,
|
3
|
+
)
|
4
|
+
from pathlib import (
|
5
|
+
Path,
|
6
|
+
)
|
7
|
+
from typing import (
|
8
|
+
List,
|
9
|
+
)
|
10
|
+
|
11
|
+
from django.conf import (
|
12
|
+
settings,
|
13
|
+
)
|
14
|
+
from django.db.models import (
|
15
|
+
F,
|
16
|
+
Sum,
|
17
|
+
)
|
18
|
+
from django.db.transaction import (
|
19
|
+
atomic,
|
20
|
+
)
|
21
|
+
from uploader_client.adapters import (
|
22
|
+
adapter,
|
23
|
+
)
|
24
|
+
|
25
|
+
from educommon import (
|
26
|
+
logger,
|
27
|
+
)
|
28
|
+
|
29
|
+
from edu_rdm_integration.consts import (
|
30
|
+
LOGS_DELIMITER,
|
31
|
+
)
|
32
|
+
from edu_rdm_integration.enums import (
|
33
|
+
FileUploadStatusEnum,
|
34
|
+
)
|
35
|
+
from edu_rdm_integration.export_data.base.consts import (
|
36
|
+
OPERATIONS_METHODS_MAP,
|
37
|
+
OPERATIONS_URLS_MAP,
|
38
|
+
)
|
39
|
+
from edu_rdm_integration.export_data.base.requests import (
|
40
|
+
RegionalDataMartEntityRequest,
|
41
|
+
)
|
42
|
+
from edu_rdm_integration.export_data.consts import (
|
43
|
+
TOTAL_ATTACHMENTS_SIZE_KEY,
|
44
|
+
)
|
45
|
+
from edu_rdm_integration.export_data.dataclasses import (
|
46
|
+
UploadFile,
|
47
|
+
)
|
48
|
+
from edu_rdm_integration.export_data.queue import (
|
49
|
+
Queue,
|
50
|
+
)
|
51
|
+
from edu_rdm_integration.models import (
|
52
|
+
ExportingDataSubStage,
|
53
|
+
ExportingDataSubStageAttachment,
|
54
|
+
ExportingDataSubStageStatus,
|
55
|
+
ExportingDataSubStageUploaderClientLog,
|
56
|
+
)
|
57
|
+
from edu_rdm_integration.redis_cache import (
|
58
|
+
AbstractCache,
|
59
|
+
)
|
60
|
+
|
61
|
+
|
62
|
+
class ExportQueueSender:
|
63
|
+
"""Класс отправки данных в очередь РВД."""
|
64
|
+
|
65
|
+
def __init__(self, data_cache: AbstractCache, queue: Queue):
|
66
|
+
self.queue = queue
|
67
|
+
self.cache = data_cache
|
68
|
+
self.queue_total_file_size = self.cache.get(TOTAL_ATTACHMENTS_SIZE_KEY) or 0
|
69
|
+
|
70
|
+
@staticmethod
|
71
|
+
def get_exported_file_size() -> int:
|
72
|
+
"""Возвращает размер отправленных в РВД файлов.
|
73
|
+
|
74
|
+
При расчете используются только файлы, отмеченные как отправленные, но не обработанные витриной.
|
75
|
+
"""
|
76
|
+
sended_attachment_ids = ExportingDataSubStageUploaderClientLog.objects.filter(
|
77
|
+
file_upload_status=FileUploadStatusEnum.IN_PROGRESS
|
78
|
+
).values_list('attachment_id', flat=True)
|
79
|
+
file_size = ExportingDataSubStageAttachment.objects.filter(
|
80
|
+
id__in=sended_attachment_ids
|
81
|
+
).aggregate(Sum('attachment_size'))['attachment_size__sum']
|
82
|
+
|
83
|
+
return file_size or 0
|
84
|
+
|
85
|
+
@staticmethod
|
86
|
+
def get_sub_stages_attachments_to_export():
|
87
|
+
"""Выборка готовых к экспорту подэтапов."""
|
88
|
+
return ExportingDataSubStage.objects.filter(
|
89
|
+
status_id=ExportingDataSubStageStatus.READY_FOR_EXPORT.key
|
90
|
+
).annotate(
|
91
|
+
attachment_id=F('exportingdatasubstageattachment__id'),
|
92
|
+
attachment_name=F('exportingdatasubstageattachment__attachment'),
|
93
|
+
attachment_size=F('exportingdatasubstageattachment__attachment_size'),
|
94
|
+
operation=F('exportingdatasubstageattachment__operation'),
|
95
|
+
entity=F('exportingdatasubstageentity__entity_id')
|
96
|
+
).order_by('started_at', 'operation').values(
|
97
|
+
'id',
|
98
|
+
'attachment_id',
|
99
|
+
'attachment_name',
|
100
|
+
'attachment_size',
|
101
|
+
'operation',
|
102
|
+
'entity',
|
103
|
+
)
|
104
|
+
|
105
|
+
def set_sub_stage_to_cache(self, sub_stage_id: int, entity_name: str, attachments: List[UploadFile]) -> bool:
|
106
|
+
"""Помещение подэтапа в очередь вместе с информацией по файлам и обновление счетчика объема файлов."""
|
107
|
+
sub_stage_total_size = sum((attachment.attachment_size for attachment in attachments))
|
108
|
+
if self.queue_total_file_size + sub_stage_total_size > settings.RDM_UPLOAD_QUEUE_MAX_SIZE:
|
109
|
+
return False
|
110
|
+
|
111
|
+
self.queue.enqueue(sub_stage_id, entity_name, attachments)
|
112
|
+
self.queue_total_file_size += sub_stage_total_size
|
113
|
+
# Обновим размер файлов в кеш
|
114
|
+
self.cache.set(
|
115
|
+
TOTAL_ATTACHMENTS_SIZE_KEY,
|
116
|
+
self.queue_total_file_size,
|
117
|
+
timeout=settings.RDM_REDIS_CACHE_TIMEOUT_SECONDS
|
118
|
+
)
|
119
|
+
|
120
|
+
return True
|
121
|
+
|
122
|
+
def run(self):
|
123
|
+
"""Запуск работы менджера."""
|
124
|
+
if not self.queue_total_file_size:
|
125
|
+
self.queue_total_file_size = self.get_exported_file_size()
|
126
|
+
|
127
|
+
if self.queue_total_file_size < settings.RDM_UPLOAD_QUEUE_MAX_SIZE:
|
128
|
+
stage_files = []
|
129
|
+
prev_sub_stage = None
|
130
|
+
entity = ''
|
131
|
+
# Если размер очереди позволяет - то отправляем все файлы подэтапа в очередь - иначе прерываем процесс
|
132
|
+
for stage_attachment in self.get_sub_stages_attachments_to_export():
|
133
|
+
if prev_sub_stage != stage_attachment['id']:
|
134
|
+
if stage_files:
|
135
|
+
to_cache = self.set_sub_stage_to_cache(prev_sub_stage, entity, stage_files)
|
136
|
+
logger.info(
|
137
|
+
f'ExportedDataSubStage {prev_sub_stage} {entity} added to the queue'
|
138
|
+
)
|
139
|
+
stage_files = []
|
140
|
+
|
141
|
+
if not to_cache:
|
142
|
+
break
|
143
|
+
|
144
|
+
prev_sub_stage = stage_attachment['id']
|
145
|
+
|
146
|
+
if stage_attachment['attachment_size']:
|
147
|
+
stage_files.append(UploadFile(
|
148
|
+
stage_attachment['attachment_id'],
|
149
|
+
stage_attachment['attachment_name'],
|
150
|
+
stage_attachment['attachment_size'],
|
151
|
+
stage_attachment['operation']
|
152
|
+
))
|
153
|
+
entity = stage_attachment['entity']
|
154
|
+
|
155
|
+
# Обновляем общий объем очереди и закидываем последний элемент
|
156
|
+
if stage_files:
|
157
|
+
self.set_sub_stage_to_cache(prev_sub_stage, entity, stage_files)
|
158
|
+
else:
|
159
|
+
# Сохраняем объем отправленных файлов в кеш
|
160
|
+
self.cache.set(
|
161
|
+
TOTAL_ATTACHMENTS_SIZE_KEY,
|
162
|
+
self.queue_total_file_size,
|
163
|
+
timeout=settings.RDM_REDIS_CACHE_TIMEOUT_SECONDS
|
164
|
+
)
|
165
|
+
|
166
|
+
|
167
|
+
class WorkerSender:
|
168
|
+
"""Непосредственная отправка файлов."""
|
169
|
+
|
170
|
+
def __init__(self, queue: Queue):
|
171
|
+
self.queue = queue
|
172
|
+
|
173
|
+
def send_files(self):
|
174
|
+
"""Отправка файлов."""
|
175
|
+
sub_stages = self.queue.dequeue()
|
176
|
+
for (sub_stage_id, entity_key), upload_files in sub_stages.items():
|
177
|
+
sub_stage = ExportingDataSubStage.objects.filter(id=sub_stage_id).first()
|
178
|
+
if not sub_stage:
|
179
|
+
return
|
180
|
+
|
181
|
+
result_to_save = []
|
182
|
+
errors = []
|
183
|
+
status = ExportingDataSubStageStatus.FINISHED.key
|
184
|
+
for file in upload_files:
|
185
|
+
if settings.RDM_UPLOADER_CLIENT_ENABLE_REQUEST_EMULATION:
|
186
|
+
logger.warning(
|
187
|
+
f'{LOGS_DELIMITER * 3}ATTENTION!!! REGIONAL DATA MART INTEGRATION REQUEST EMULATION ENABLED!'
|
188
|
+
)
|
189
|
+
|
190
|
+
method = OPERATIONS_METHODS_MAP.get(file.operation)
|
191
|
+
file_path = Path.joinpath(Path(settings.MEDIA_ROOT), file.attachment_name)
|
192
|
+
|
193
|
+
request = RegionalDataMartEntityRequest(
|
194
|
+
datamart_name=settings.RDM_UPLOADER_CLIENT_DATAMART_NAME,
|
195
|
+
table_name=entity_key.lower(),
|
196
|
+
method=method,
|
197
|
+
operation=OPERATIONS_URLS_MAP.get(file.operation),
|
198
|
+
parameters={},
|
199
|
+
headers={
|
200
|
+
'Content-Type': 'text/csv',
|
201
|
+
},
|
202
|
+
files=[],
|
203
|
+
data=file_path.open('rb').read(),
|
204
|
+
)
|
205
|
+
|
206
|
+
result = adapter.send(request)
|
207
|
+
|
208
|
+
request_id = ''
|
209
|
+
if not result.error:
|
210
|
+
request_id = result.response.text
|
211
|
+
|
212
|
+
file_upload_status = FileUploadStatusEnum.IN_PROGRESS if request_id else FileUploadStatusEnum.ERROR
|
213
|
+
|
214
|
+
sub_stage_uploader_client_log = ExportingDataSubStageUploaderClientLog(
|
215
|
+
entry=result.log,
|
216
|
+
sub_stage_id=sub_stage_id,
|
217
|
+
attachment_id=file.attachment_id,
|
218
|
+
request_id=request_id,
|
219
|
+
file_upload_status=file_upload_status,
|
220
|
+
is_emulation=settings.RDM_UPLOADER_CLIENT_ENABLE_REQUEST_EMULATION,
|
221
|
+
)
|
222
|
+
result_to_save.append(sub_stage_uploader_client_log)
|
223
|
+
if result.error:
|
224
|
+
logger.warning(
|
225
|
+
f'{result.error}\nrequest - "{result.log.request}"\nresponse - "{result.log.response}"'
|
226
|
+
)
|
227
|
+
|
228
|
+
errors.append(result.error)
|
229
|
+
status = ExportingDataSubStageStatus.FAILED.key
|
230
|
+
else:
|
231
|
+
logger.info(
|
232
|
+
f'Response with {result.response.status_code} code and content {result.response.text}'
|
233
|
+
)
|
234
|
+
|
235
|
+
# Сохраняем информацию об отправке файлов и убираем подэтап с файлами из очереди
|
236
|
+
if result_to_save:
|
237
|
+
with atomic():
|
238
|
+
ExportingDataSubStageUploaderClientLog.objects.bulk_create(result_to_save)
|
239
|
+
sub_stage.status_id = status
|
240
|
+
sub_stage.save()
|
241
|
+
|
242
|
+
self.queue.delete_from_queue(sub_stage_id=sub_stage_id, entity_name=entity_key)
|
243
|
+
|
244
|
+
def run(self):
|
245
|
+
"""Запуск воркера отправки."""
|
246
|
+
self.send_files()
|