synapse-sdk 1.0.0b22__py3-none-any.whl → 1.0.0b23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

@@ -0,0 +1,221 @@
1
+ from enum import Enum
2
+
3
+ from synapse_sdk.shared.enums import Context
4
+
5
+
6
+ class UploadStatus(str, Enum):
7
+ """Upload processing status enumeration.
8
+
9
+ Defines the possible states for upload operations, data files, and data units
10
+ throughout the upload process.
11
+
12
+ Attributes:
13
+ SUCCESS: Upload completed successfully
14
+ FAILED: Upload failed with errors
15
+ """
16
+
17
+ SUCCESS = 'success'
18
+ FAILED = 'failed'
19
+
20
+
21
+ class LogCode(str, Enum):
22
+ """Type-safe logging codes for upload operations.
23
+
24
+ Enumeration of all possible log events during upload processing. Each code
25
+ corresponds to a specific event or error state with predefined message
26
+ templates and log levels.
27
+
28
+ The codes are organized by category:
29
+ - Validation codes (VALIDATION_FAILED, STORAGE_VALIDATION_FAILED, etc.)
30
+ - File processing codes (NO_FILES_FOUND, FILES_DISCOVERED, etc.)
31
+ - Excel processing codes (EXCEL_SECURITY_VIOLATION, EXCEL_PARSING_ERROR, etc.)
32
+ - Progress tracking codes (UPLOADING_DATA_FILES, GENERATING_DATA_UNITS, etc.)
33
+
34
+ Each code maps to a configuration in LOG_MESSAGES with message template
35
+ and appropriate log level.
36
+ """
37
+
38
+ STORAGE_VALIDATION_FAILED = 'STORAGE_VALIDATION_FAILED'
39
+ COLLECTION_VALIDATION_FAILED = 'COLLECTION_VALIDATION_FAILED'
40
+ PROJECT_VALIDATION_FAILED = 'PROJECT_VALIDATION_FAILED'
41
+ VALIDATION_FAILED = 'VALIDATION_FAILED'
42
+ NO_FILES_FOUND = 'NO_FILES_FOUND'
43
+ NO_FILES_UPLOADED = 'NO_FILES_UPLOADED'
44
+ NO_DATA_UNITS_GENERATED = 'NO_DATA_UNITS_GENERATED'
45
+ NO_TYPE_DIRECTORIES = 'NO_TYPE_DIRECTORIES'
46
+ EXCEL_SECURITY_VIOLATION = 'EXCEL_SECURITY_VIOLATION'
47
+ EXCEL_PARSING_ERROR = 'EXCEL_PARSING_ERROR'
48
+ EXCEL_METADATA_LOADED = 'EXCEL_METADATA_LOADED'
49
+ UPLOADING_DATA_FILES = 'UPLOADING_DATA_FILES'
50
+ GENERATING_DATA_UNITS = 'GENERATING_DATA_UNITS'
51
+ IMPORT_COMPLETED = 'IMPORT_COMPLETED'
52
+ TYPE_DIRECTORIES_FOUND = 'TYPE_DIRECTORIES_FOUND'
53
+ TYPE_STRUCTURE_DETECTED = 'TYPE_STRUCTURE_DETECTED'
54
+ FILES_DISCOVERED = 'FILES_DISCOVERED'
55
+ NO_FILES_FOUND_WARNING = 'NO_FILES_FOUND_WARNING'
56
+ FILE_UPLOAD_FAILED = 'FILE_UPLOAD_FAILED'
57
+ DATA_UNIT_BATCH_FAILED = 'DATA_UNIT_BATCH_FAILED'
58
+ FILENAME_TOO_LONG = 'FILENAME_TOO_LONG'
59
+ MISSING_REQUIRED_FILES = 'MISSING_REQUIRED_FILES'
60
+ EXCEL_FILE_NOT_FOUND = 'EXCEL_FILE_NOT_FOUND'
61
+ EXCEL_FILE_VALIDATION_STARTED = 'EXCEL_FILE_VALIDATION_STARTED'
62
+ EXCEL_WORKBOOK_LOADED = 'EXCEL_WORKBOOK_LOADED'
63
+ FILE_ORGANIZATION_STARTED = 'FILE_ORGANIZATION_STARTED'
64
+ BATCH_PROCESSING_STARTED = 'BATCH_PROCESSING_STARTED'
65
+ EXCEL_SECURITY_VALIDATION_STARTED = 'EXCEL_SECURITY_VALIDATION_STARTED'
66
+ EXCEL_MEMORY_ESTIMATION = 'EXCEL_MEMORY_ESTIMATION'
67
+ EXCEL_FILE_NOT_FOUND_PATH = 'EXCEL_FILE_NOT_FOUND_PATH'
68
+ EXCEL_SECURITY_VALIDATION_FAILED = 'EXCEL_SECURITY_VALIDATION_FAILED'
69
+ EXCEL_PARSING_FAILED = 'EXCEL_PARSING_FAILED'
70
+ EXCEL_INVALID_FILE_FORMAT = 'EXCEL_INVALID_FILE_FORMAT'
71
+ EXCEL_FILE_TOO_LARGE = 'EXCEL_FILE_TOO_LARGE'
72
+ EXCEL_FILE_ACCESS_ERROR = 'EXCEL_FILE_ACCESS_ERROR'
73
+ EXCEL_UNEXPECTED_ERROR = 'EXCEL_UNEXPECTED_ERROR'
74
+
75
+
76
+ LOG_MESSAGES = {
77
+ LogCode.STORAGE_VALIDATION_FAILED: {
78
+ 'message': 'Storage validation failed.',
79
+ 'level': Context.DANGER,
80
+ },
81
+ LogCode.COLLECTION_VALIDATION_FAILED: {
82
+ 'message': 'Collection validation failed.',
83
+ 'level': Context.DANGER,
84
+ },
85
+ LogCode.PROJECT_VALIDATION_FAILED: {
86
+ 'message': 'Project validation failed.',
87
+ 'level': Context.DANGER,
88
+ },
89
+ LogCode.VALIDATION_FAILED: {
90
+ 'message': 'Validation failed.',
91
+ 'level': Context.DANGER,
92
+ },
93
+ LogCode.NO_FILES_FOUND: {
94
+ 'message': 'Files not found on the path.',
95
+ 'level': Context.WARNING,
96
+ },
97
+ LogCode.NO_FILES_UPLOADED: {
98
+ 'message': 'No files were uploaded.',
99
+ 'level': Context.WARNING,
100
+ },
101
+ LogCode.NO_DATA_UNITS_GENERATED: {
102
+ 'message': 'No data units were generated.',
103
+ 'level': Context.WARNING,
104
+ },
105
+ LogCode.NO_TYPE_DIRECTORIES: {
106
+ 'message': 'No type-based directory structure found.',
107
+ 'level': Context.INFO,
108
+ },
109
+ LogCode.EXCEL_SECURITY_VIOLATION: {
110
+ 'message': 'Excel security validation failed: {}',
111
+ 'level': Context.DANGER,
112
+ },
113
+ LogCode.EXCEL_PARSING_ERROR: {
114
+ 'message': 'Excel parsing failed: {}',
115
+ 'level': Context.DANGER,
116
+ },
117
+ LogCode.EXCEL_METADATA_LOADED: {
118
+ 'message': 'Excel metadata loaded for {} files',
119
+ 'level': None,
120
+ },
121
+ LogCode.UPLOADING_DATA_FILES: {
122
+ 'message': 'Uploading data files...',
123
+ 'level': None,
124
+ },
125
+ LogCode.GENERATING_DATA_UNITS: {
126
+ 'message': 'Generating data units...',
127
+ 'level': None,
128
+ },
129
+ LogCode.IMPORT_COMPLETED: {
130
+ 'message': 'Import completed.',
131
+ 'level': None,
132
+ },
133
+ LogCode.TYPE_DIRECTORIES_FOUND: {
134
+ 'message': 'Found type directories: {}',
135
+ 'level': None,
136
+ },
137
+ LogCode.TYPE_STRUCTURE_DETECTED: {
138
+ 'message': 'Detected type-based directory structure',
139
+ 'level': None,
140
+ },
141
+ LogCode.FILES_DISCOVERED: {
142
+ 'message': 'Discovered {} files',
143
+ 'level': None,
144
+ },
145
+ LogCode.NO_FILES_FOUND_WARNING: {
146
+ 'message': 'No files found.',
147
+ 'level': Context.WARNING,
148
+ },
149
+ LogCode.FILE_UPLOAD_FAILED: {
150
+ 'message': 'Failed to upload file: {}',
151
+ 'level': Context.DANGER,
152
+ },
153
+ LogCode.DATA_UNIT_BATCH_FAILED: {
154
+ 'message': 'Failed to create data units batch: {}',
155
+ 'level': Context.DANGER,
156
+ },
157
+ LogCode.FILENAME_TOO_LONG: {
158
+ 'message': 'Skipping file with overly long name: {}...',
159
+ 'level': Context.WARNING,
160
+ },
161
+ LogCode.MISSING_REQUIRED_FILES: {
162
+ 'message': '{} missing required files: {}',
163
+ 'level': Context.WARNING,
164
+ },
165
+ LogCode.EXCEL_FILE_NOT_FOUND: {
166
+ 'message': 'Excel metadata file not found: {}',
167
+ 'level': Context.WARNING,
168
+ },
169
+ LogCode.EXCEL_FILE_VALIDATION_STARTED: {
170
+ 'message': 'Excel file validation started',
171
+ 'level': Context.INFO,
172
+ },
173
+ LogCode.EXCEL_WORKBOOK_LOADED: {
174
+ 'message': 'Excel workbook loaded successfully',
175
+ 'level': Context.INFO,
176
+ },
177
+ LogCode.FILE_ORGANIZATION_STARTED: {
178
+ 'message': 'File organization started',
179
+ 'level': Context.INFO,
180
+ },
181
+ LogCode.BATCH_PROCESSING_STARTED: {
182
+ 'message': 'Batch processing started: {} batches of {} items each',
183
+ 'level': Context.INFO,
184
+ },
185
+ LogCode.EXCEL_SECURITY_VALIDATION_STARTED: {
186
+ 'message': 'Excel security validation started for file size: {} bytes',
187
+ 'level': Context.INFO,
188
+ },
189
+ LogCode.EXCEL_MEMORY_ESTIMATION: {
190
+ 'message': 'Excel memory estimation: {} bytes (file) * 3 = {} bytes (estimated)',
191
+ 'level': Context.INFO,
192
+ },
193
+ LogCode.EXCEL_FILE_NOT_FOUND_PATH: {
194
+ 'message': 'Excel metadata file not found',
195
+ 'level': Context.WARNING,
196
+ },
197
+ LogCode.EXCEL_SECURITY_VALIDATION_FAILED: {
198
+ 'message': 'Excel security validation failed: {}',
199
+ 'level': Context.DANGER,
200
+ },
201
+ LogCode.EXCEL_PARSING_FAILED: {
202
+ 'message': 'Excel parsing failed: {}',
203
+ 'level': Context.DANGER,
204
+ },
205
+ LogCode.EXCEL_INVALID_FILE_FORMAT: {
206
+ 'message': 'Invalid Excel file format: {}',
207
+ 'level': Context.DANGER,
208
+ },
209
+ LogCode.EXCEL_FILE_TOO_LARGE: {
210
+ 'message': 'Excel file too large to process (memory limit exceeded)',
211
+ 'level': Context.DANGER,
212
+ },
213
+ LogCode.EXCEL_FILE_ACCESS_ERROR: {
214
+ 'message': 'File access error reading excel metadata: {}',
215
+ 'level': Context.DANGER,
216
+ },
217
+ LogCode.EXCEL_UNEXPECTED_ERROR: {
218
+ 'message': 'Unexpected error reading excel metadata: {}',
219
+ 'level': Context.DANGER,
220
+ },
221
+ }
@@ -0,0 +1,36 @@
1
+ class ExcelSecurityError(Exception):
2
+ """Exception raised when Excel file security validation fails.
3
+
4
+ This exception is raised when an Excel file violates security constraints
5
+ such as file size limits, memory usage limits, or contains potentially
6
+ dangerous content.
7
+
8
+ Used during Excel metadata processing to enforce security policies
9
+ and prevent processing of files that could pose security risks.
10
+
11
+ Example:
12
+ >>> if file_size > max_size:
13
+ ... raise ExcelSecurityError(f"File size {file_size} exceeds limit {max_size}")
14
+ """
15
+
16
+ pass
17
+
18
+
19
+ class ExcelParsingError(Exception):
20
+ """Exception raised when Excel file parsing encounters errors.
21
+
22
+ This exception is raised when an Excel file cannot be parsed due to
23
+ format issues, corruption, or other parsing-related problems that
24
+ prevent successful metadata extraction.
25
+
26
+ Used during Excel metadata loading to distinguish parsing errors
27
+ from security violations or other types of errors.
28
+
29
+ Example:
30
+ >>> try:
31
+ ... workbook = load_workbook(excel_file)
32
+ ... except InvalidFileException as e:
33
+ ... raise ExcelParsingError(f"Failed to parse Excel file: {e}")
34
+ """
35
+
36
+ pass
@@ -0,0 +1,149 @@
1
+ from pathlib import Path
2
+ from typing import Annotated
3
+
4
+ from pydantic import AfterValidator, BaseModel, ValidationInfo, field_validator
5
+ from pydantic_core import PydanticCustomError
6
+
7
+ from synapse_sdk.clients.exceptions import ClientError
8
+ from synapse_sdk.utils.pydantic.validators import non_blank
9
+
10
+ from .utils import ExcelSecurityConfig
11
+
12
+
13
+ class UploadParams(BaseModel):
14
+ """Upload action parameter validation model.
15
+
16
+ Defines and validates all parameters required for upload operations.
17
+ Uses Pydantic for type validation and custom validators to ensure
18
+ storage, data_collection, and project resources exist before processing.
19
+
20
+ Attributes:
21
+ name (str): Human-readable name for the upload operation
22
+ description (str | None): Optional description of the upload
23
+ path (str): File system path to upload from
24
+ storage (int): Storage ID where files will be uploaded
25
+ data_collection (int): Data data_collection ID for organizing uploads
26
+ project (int | None): Optional project ID for grouping
27
+ excel_metadata_path (str | None): Path to Excel metadata file
28
+ is_recursive (bool): Whether to recursively process subdirectories
29
+ max_file_size_mb (int): Maximum file size limit in megabytes
30
+ creating_data_unit_batch_size (int): Batch size for data unit creation
31
+ use_async_upload (bool): Whether to use asynchronous upload processing
32
+ extra_params (dict | None): Extra parameters for the action.
33
+ Example: {"include_metadata": True, "compression": "gzip"}
34
+
35
+ Validation:
36
+ - name: Must be non-blank after validation
37
+ - storage: Must exist and be accessible via client API
38
+ - data_collection: Must exist and be accessible via client API
39
+ - project: Must exist if specified, or can be None
40
+ - excel_metadata_path: Must be valid Excel file if specified
41
+
42
+ Example:
43
+ >>> params = UploadParams(
44
+ ... name="Data Upload",
45
+ ... path="/data/files",
46
+ ... storage=1,
47
+ ... data_collection=5
48
+ ... )
49
+ """
50
+
51
+ name: Annotated[str, AfterValidator(non_blank)]
52
+ description: str | None = None
53
+ path: str
54
+ storage: int
55
+ data_collection: int
56
+ project: int | None = None
57
+ excel_metadata_path: str | None = None
58
+ is_recursive: bool = True
59
+ max_file_size_mb: int = 50
60
+ creating_data_unit_batch_size: int = 1
61
+ use_async_upload: bool = True
62
+ extra_params: dict | None = None
63
+
64
+ @field_validator('storage', mode='before')
65
+ @classmethod
66
+ def check_storage_exists(cls, value, info: ValidationInfo) -> int:
67
+ if info.context is None:
68
+ raise PydanticCustomError('missing_context', 'Validation context is required.')
69
+
70
+ action = info.context['action']
71
+ client = action.client
72
+ try:
73
+ client.get_storage(value)
74
+ except ClientError:
75
+ raise PydanticCustomError('client_error', 'Error occurred while checking storage exists.')
76
+ return value
77
+
78
+ @field_validator('data_collection', mode='before')
79
+ @classmethod
80
+ def check_data_collection_exists(cls, value, info: ValidationInfo) -> int:
81
+ if info.context is None:
82
+ raise PydanticCustomError('missing_context', 'Validation context is required.')
83
+
84
+ action = info.context['action']
85
+ client = action.client
86
+ try:
87
+ client.get_data_collection(value)
88
+ except ClientError:
89
+ raise PydanticCustomError('client_error', 'Error occurred while checking data_collection exists.')
90
+ return value
91
+
92
+ @field_validator('project', mode='before')
93
+ @classmethod
94
+ def check_project_exists(cls, value, info: ValidationInfo) -> int | None:
95
+ if not value:
96
+ return value
97
+
98
+ if info.context is None:
99
+ raise PydanticCustomError('missing_context', 'Validation context is required.')
100
+
101
+ action = info.context['action']
102
+ client = action.client
103
+ try:
104
+ client.get_project(value)
105
+ except ClientError:
106
+ raise PydanticCustomError('client_error', 'Error occurred while checking project exists.')
107
+ return value
108
+
109
+ @field_validator('excel_metadata_path', mode='before')
110
+ @classmethod
111
+ def check_excel_metadata_path(cls, value, info: ValidationInfo) -> str | None:
112
+ if not value:
113
+ return value
114
+
115
+ excel_path = Path(value)
116
+
117
+ if not excel_path.exists():
118
+ raise PydanticCustomError('file_not_found', 'Excel metadata file not found.')
119
+
120
+ if excel_path.suffix.lower() not in ['.xlsx', '.xls']:
121
+ raise PydanticCustomError('invalid_file_type', 'Excel metadata file must be .xlsx or .xls format.')
122
+
123
+ file_size = excel_path.stat().st_size
124
+ excel_config = ExcelSecurityConfig()
125
+ if file_size > excel_config.MAX_FILE_SIZE_BYTES:
126
+ max_size_mb = excel_config.MAX_FILE_SIZE_MB
127
+ raise PydanticCustomError(
128
+ 'file_too_large',
129
+ 'Excel metadata file is too large. Maximum size is {max_size_mb}MB.',
130
+ {'max_size_mb': max_size_mb},
131
+ )
132
+
133
+ try:
134
+ with open(excel_path, 'rb') as f:
135
+ header = f.read(8)
136
+ if not header:
137
+ raise PydanticCustomError('invalid_file', 'Excel metadata file appears to be empty.')
138
+
139
+ if excel_path.suffix.lower() == '.xlsx':
140
+ if not header.startswith(b'PK'):
141
+ raise PydanticCustomError('invalid_file', 'Excel metadata file appears to be corrupted.')
142
+ elif excel_path.suffix.lower() == '.xls':
143
+ if not (header.startswith(b'\xd0\xcf\x11\xe0') or header.startswith(b'\x09\x08')):
144
+ raise PydanticCustomError('invalid_file', 'Excel metadata file appears to be corrupted.')
145
+
146
+ except (OSError, IOError):
147
+ raise PydanticCustomError('file_access_error', 'Cannot access Excel metadata file.')
148
+
149
+ return value
@@ -0,0 +1,178 @@
1
+ import json
2
+ from datetime import datetime
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+
7
+ from synapse_sdk.plugins.models import Run
8
+ from synapse_sdk.shared.enums import Context
9
+
10
+ from .enums import LOG_MESSAGES, LogCode, UploadStatus
11
+ from .utils import PathAwareJSONEncoder
12
+
13
+
14
+ class UploadRun(Run):
15
+ """Upload-specific run management class.
16
+
17
+ Extends the base Run class with upload-specific logging capabilities
18
+ and event tracking. Provides type-safe logging using LogCode enums
19
+ and specialized methods for tracking upload progress.
20
+
21
+ Manages logging for upload events, data files, data units, and tasks
22
+ throughout the upload lifecycle. Each log entry includes status,
23
+ timestamps, and relevant metadata.
24
+
25
+ Attributes:
26
+ Inherits all attributes from base Run class plus upload-specific
27
+ logging methods and nested model classes for structured logging.
28
+
29
+ Example:
30
+ >>> run = UploadRun(job_id, context)
31
+ >>> run.log_message_with_code(LogCode.UPLOAD_STARTED)
32
+ >>> run.log_upload_event(LogCode.FILES_DISCOVERED, file_count)
33
+ """
34
+
35
+ class UploadEventLog(BaseModel):
36
+ """Model for upload event log entries.
37
+
38
+ Records significant events during upload processing with
39
+ status information and timestamps.
40
+
41
+ Attributes:
42
+ info (str | None): Optional additional information
43
+ status (Context): Event status/severity level
44
+ created (str): Timestamp when event occurred
45
+ """
46
+
47
+ info: Optional[str] = None
48
+ status: Context
49
+ created: str
50
+
51
+ class DataFileLog(BaseModel):
52
+ """Model for data file processing log entries.
53
+
54
+ Tracks the processing status of individual data files
55
+ during upload operations.
56
+
57
+ Attributes:
58
+ data_file_info (str | None): Information about the data file
59
+ status (UploadStatus): Processing status (SUCCESS/FAILED)
60
+ created (str): Timestamp when log entry was created
61
+ """
62
+
63
+ data_file_info: str | None
64
+ status: UploadStatus
65
+ created: str
66
+
67
+ class DataUnitLog(BaseModel):
68
+ """Model for data unit creation log entries.
69
+
70
+ Records the creation status of data units generated from
71
+ uploaded files, including metadata and identifiers.
72
+
73
+ Attributes:
74
+ data_unit_id (int | None): ID of created data unit
75
+ status (UploadStatus): Creation status (SUCCESS/FAILED)
76
+ created (str): Timestamp when log entry was created
77
+ data_unit_meta (dict | None): Metadata associated with data unit
78
+ """
79
+
80
+ data_unit_id: int | None
81
+ status: UploadStatus
82
+ created: str
83
+ data_unit_meta: dict | None
84
+
85
+ class TaskLog(BaseModel):
86
+ """Model for task execution log entries.
87
+
88
+ Tracks the execution status of background tasks related
89
+ to upload processing.
90
+
91
+ Attributes:
92
+ task_id (int | None): ID of the executed task
93
+ status (UploadStatus): Task execution status (SUCCESS/FAILED)
94
+ created (str): Timestamp when log entry was created
95
+ """
96
+
97
+ task_id: int | None
98
+ status: UploadStatus
99
+ created: str
100
+
101
+ class MetricsRecord(BaseModel):
102
+ """Model for upload metrics tracking.
103
+
104
+ Records count-based metrics for monitoring upload
105
+ progress and success rates.
106
+
107
+ Attributes:
108
+ stand_by (int): Number of items waiting to be processed
109
+ failed (int): Number of items that failed processing
110
+ success (int): Number of items successfully processed
111
+ """
112
+
113
+ stand_by: int
114
+ failed: int
115
+ success: int
116
+
117
+ def log_message_with_code(self, code: LogCode, *args, level: Optional[Context] = None):
118
+ if code not in LOG_MESSAGES:
119
+ self.log_message(f'Unknown log code: {code}')
120
+ return
121
+
122
+ log_config = LOG_MESSAGES[code]
123
+ message = log_config['message'].format(*args) if args else log_config['message']
124
+ log_level = level or log_config['level'] or Context.INFO
125
+
126
+ # Always call log_message for basic logging
127
+ if log_level:
128
+ self.log_message(message, context=log_level.value)
129
+ else:
130
+ self.log_message(message)
131
+
132
+ def log_upload_event(self, code: LogCode, *args, level: Optional[Context] = None):
133
+ # Call log_message_with_code to handle the basic logging
134
+ self.log_message_with_code(code, *args, level=level)
135
+
136
+ # Also log the event for upload-specific tracking
137
+ if code not in LOG_MESSAGES:
138
+ now = datetime.now().isoformat()
139
+ self.log(
140
+ 'upload_event',
141
+ self.UploadEventLog(info=f'Unknown log code: {code}', status=Context.DANGER, created=now).model_dump(),
142
+ )
143
+ return
144
+
145
+ log_config = LOG_MESSAGES[code]
146
+ message = log_config['message'].format(*args) if args else log_config['message']
147
+ log_level = level or log_config['level'] or Context.INFO
148
+
149
+ now = datetime.now().isoformat()
150
+ self.log(
151
+ 'upload_event',
152
+ self.UploadEventLog(info=message, status=log_level, created=now).model_dump(),
153
+ )
154
+
155
+ def log_data_file(self, data_file_info: dict, status: UploadStatus):
156
+ now = datetime.now().isoformat()
157
+ data_file_info_str = json.dumps(data_file_info, ensure_ascii=False, cls=PathAwareJSONEncoder)
158
+ self.log(
159
+ 'upload_data_file',
160
+ self.DataFileLog(data_file_info=data_file_info_str, status=status, created=now).model_dump(),
161
+ )
162
+
163
+ def log_data_unit(self, data_unit_id: int, status: UploadStatus, data_unit_meta: dict | None = None):
164
+ now = datetime.now().isoformat()
165
+ self.log(
166
+ 'upload_data_unit',
167
+ self.DataUnitLog(
168
+ data_unit_id=data_unit_id, status=status, created=now, data_unit_meta=data_unit_meta
169
+ ).model_dump(),
170
+ )
171
+
172
+ def log_task(self, task_id: int, status: UploadStatus):
173
+ now = datetime.now().isoformat()
174
+ self.log('upload_task', self.TaskLog(task_id=task_id, status=status, created=now).model_dump())
175
+
176
+ def log_metrics(self, record: MetricsRecord, category: str):
177
+ record = self.MetricsRecord.model_validate(record)
178
+ self.set_metrics(value=record.model_dump(), category=category)