tol-sdk 1.7.2__py3-none-any.whl → 1.7.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tol/api_base/pipeline_steps.py +8 -5
- tol/s3/__init__.py +1 -0
- tol/{sql/summary → s3/data_upload}/__init__.py +0 -3
- tol/s3/data_upload/blueprint.py +83 -0
- tol/services/s3_client.py +5 -3
- tol/sql/pipeline_step/factory.py +1 -1
- tol/sql/standard/factory.py +54 -1
- tol/validators/unique_values.py +37 -9
- {tol_sdk-1.7.2.dist-info → tol_sdk-1.7.4.dist-info}/METADATA +1 -1
- {tol_sdk-1.7.2.dist-info → tol_sdk-1.7.4.dist-info}/RECORD +14 -14
- tol/sql/summary/factory.py +0 -91
- {tol_sdk-1.7.2.dist-info → tol_sdk-1.7.4.dist-info}/WHEEL +0 -0
- {tol_sdk-1.7.2.dist-info → tol_sdk-1.7.4.dist-info}/entry_points.txt +0 -0
- {tol_sdk-1.7.2.dist-info → tol_sdk-1.7.4.dist-info}/licenses/LICENSE +0 -0
- {tol_sdk-1.7.2.dist-info → tol_sdk-1.7.4.dist-info}/top_level.txt +0 -0
tol/api_base/pipeline_steps.py
CHANGED
|
@@ -49,7 +49,7 @@ REQUIRED_FIELDS: List = [
|
|
|
49
49
|
def pipeline_steps_blueprint(
|
|
50
50
|
sql_ds: SqlDataSource,
|
|
51
51
|
prefect_ds: PrefectDataSource,
|
|
52
|
-
role: str | None =
|
|
52
|
+
role: str | None = None,
|
|
53
53
|
url_prefix: str = '/run-pipeline',
|
|
54
54
|
|
|
55
55
|
ctx_getter: CtxGetter = default_ctx_getter,
|
|
@@ -155,7 +155,7 @@ def pipeline_steps_blueprint(
|
|
|
155
155
|
'deployment_name': flow_name,
|
|
156
156
|
'parameters': flow_params,
|
|
157
157
|
'tags': [
|
|
158
|
-
'app_name:
|
|
158
|
+
f'app_name: {os.environ.get("APP_NAME", "tol")}',
|
|
159
159
|
],
|
|
160
160
|
}
|
|
161
161
|
)
|
|
@@ -190,10 +190,13 @@ def pipeline_steps_blueprint(
|
|
|
190
190
|
def run_pipeline_steps() -> tuple[dict[str, Any], int]:
|
|
191
191
|
|
|
192
192
|
ctx = ctx_getter()
|
|
193
|
-
|
|
193
|
+
if role is not None:
|
|
194
|
+
if not ctx or not ctx.authenticated:
|
|
195
|
+
raise ForbiddenError()
|
|
196
|
+
if role not in ctx.roles:
|
|
197
|
+
raise ForbiddenError()
|
|
194
198
|
|
|
195
|
-
|
|
196
|
-
raise ForbiddenError()
|
|
199
|
+
user_id = ctx.user_id if ctx and ctx.authenticated else None
|
|
197
200
|
|
|
198
201
|
body: dict[str, Any] = request.json.get('data', {})
|
|
199
202
|
|
tol/s3/__init__.py
CHANGED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Genome Research Ltd.
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
|
|
5
|
+
from tempfile import NamedTemporaryFile
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from flask import Blueprint, request, send_file
|
|
9
|
+
|
|
10
|
+
from ...api_base import (
|
|
11
|
+
custom_blueprint,
|
|
12
|
+
)
|
|
13
|
+
from ...services.s3_client import S3Client
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
ALLOWED_EXTENSIONS: set[str] = {'csv', 'json', 'xlsx'}
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def allowed_file(filename: str) -> bool:
|
|
20
|
+
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def data_upload_blueprint(
|
|
24
|
+
url_prefix: str = '/pipeline/data_upload',
|
|
25
|
+
) -> Blueprint:
|
|
26
|
+
|
|
27
|
+
data_upload_blueprint = custom_blueprint(
|
|
28
|
+
name='data_upload',
|
|
29
|
+
url_prefix=url_prefix
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
@data_upload_blueprint.route('/upload', methods=['POST'])
|
|
33
|
+
def upload_file() -> tuple[dict[str, str], int]:
|
|
34
|
+
file = request.files['file']
|
|
35
|
+
s3_bucket: str = request.form.get('s3_bucket')
|
|
36
|
+
|
|
37
|
+
if not file:
|
|
38
|
+
return {'error': 'No file provided'}, 400
|
|
39
|
+
|
|
40
|
+
if not allowed_file(file.filename):
|
|
41
|
+
return {'error': 'File type not allowed'}, 400
|
|
42
|
+
|
|
43
|
+
if not s3_bucket:
|
|
44
|
+
return {'error': 'S3 bucket not specified'}, 400
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
s3_client = S3Client()
|
|
48
|
+
with NamedTemporaryFile() as temp_file:
|
|
49
|
+
file.save(temp_file.name)
|
|
50
|
+
s3_client.put_object(s3_bucket, file.filename, temp_file.name)
|
|
51
|
+
|
|
52
|
+
return {'message': 'File uploaded successfully'}, 200
|
|
53
|
+
|
|
54
|
+
except Exception as e:
|
|
55
|
+
return {'error': f'Failed to upload file: {str(e)}'}, 500
|
|
56
|
+
|
|
57
|
+
@data_upload_blueprint.route('/download', methods=['POST'])
|
|
58
|
+
def download_file() -> tuple[dict[str, str], int]:
|
|
59
|
+
body: dict[str, Any] = request.json.get('data', {})
|
|
60
|
+
|
|
61
|
+
if 's3_bucket' not in body or 'file_name' not in body:
|
|
62
|
+
return {'error': 'S3 bucket or file name not specified'}, 400
|
|
63
|
+
|
|
64
|
+
bucket_name = body['s3_bucket']
|
|
65
|
+
file_name = body['file_name']
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
s3_client = S3Client()
|
|
69
|
+
|
|
70
|
+
with NamedTemporaryFile() as temp_file:
|
|
71
|
+
s3_client.get_object(bucket_name, file_name, temp_file.name)
|
|
72
|
+
download_name = request.json.get('download_name', file_name)
|
|
73
|
+
|
|
74
|
+
return send_file(
|
|
75
|
+
temp_file.name,
|
|
76
|
+
as_attachment=True,
|
|
77
|
+
download_name=download_name,
|
|
78
|
+
mimetype='application/octet-stream'
|
|
79
|
+
)
|
|
80
|
+
except Exception as e:
|
|
81
|
+
return {'error': f'Failed to download file: {str(e)}'}, 500
|
|
82
|
+
|
|
83
|
+
return data_upload_blueprint
|
tol/services/s3_client.py
CHANGED
|
@@ -26,7 +26,7 @@ class S3Client:
|
|
|
26
26
|
secure = os.getenv('S3_SECURE', 'true').lower() == 'true'
|
|
27
27
|
|
|
28
28
|
self.client = Minio(
|
|
29
|
-
self.s3_uri,
|
|
29
|
+
endpoint=self.s3_uri,
|
|
30
30
|
access_key=self.access_key,
|
|
31
31
|
secret_key=self.secret_key,
|
|
32
32
|
secure=secure
|
|
@@ -39,7 +39,8 @@ class S3Client:
|
|
|
39
39
|
file_path: str
|
|
40
40
|
) -> None:
|
|
41
41
|
|
|
42
|
-
self.client.fget_object(bucket_name,
|
|
42
|
+
self.client.fget_object(bucket_name=bucket_name,
|
|
43
|
+
object_name=object_name, file_path=file_path)
|
|
43
44
|
|
|
44
45
|
def list_objects(
|
|
45
46
|
self,
|
|
@@ -55,4 +56,5 @@ class S3Client:
|
|
|
55
56
|
file_path: str
|
|
56
57
|
) -> None:
|
|
57
58
|
|
|
58
|
-
self.client.fput_object(bucket_name,
|
|
59
|
+
self.client.fput_object(bucket_name=bucket_name,
|
|
60
|
+
object_name=object_name, file_path=file_path)
|
tol/sql/pipeline_step/factory.py
CHANGED
tol/sql/standard/factory.py
CHANGED
|
@@ -37,6 +37,7 @@ class StandardModels(IterableABC[type[Model]]):
|
|
|
37
37
|
data_source_config: type[Model]
|
|
38
38
|
data_source_config_attribute: type[Model]
|
|
39
39
|
data_source_config_relationship: type[Model]
|
|
40
|
+
data_source_config_summary: type[Model]
|
|
40
41
|
component: type[Model]
|
|
41
42
|
component_zone: type[Model]
|
|
42
43
|
zone: type[Model]
|
|
@@ -61,6 +62,7 @@ class StandardModels(IterableABC[type[Model]]):
|
|
|
61
62
|
self.view_board,
|
|
62
63
|
self.view,
|
|
63
64
|
self.board,
|
|
65
|
+
self.data_source_config_summary,
|
|
64
66
|
self.data_source_config_relationship,
|
|
65
67
|
self.data_source_config_attribute,
|
|
66
68
|
self.loader_instance,
|
|
@@ -458,7 +460,6 @@ def create_standard_models(
|
|
|
458
460
|
|
|
459
461
|
name: Mapped[str] = mapped_column(nullable=False)
|
|
460
462
|
object_type: Mapped[str] = mapped_column(nullable=False)
|
|
461
|
-
name: Mapped[str] = mapped_column(nullable=False)
|
|
462
463
|
display_name: Mapped[str] = mapped_column(nullable=True)
|
|
463
464
|
description: Mapped[str] = mapped_column(nullable=True)
|
|
464
465
|
available_on_relationships: Mapped[bool] = mapped_column(nullable=False, default=True)
|
|
@@ -484,6 +485,53 @@ def create_standard_models(
|
|
|
484
485
|
foreign_keys=[data_source_config_id]
|
|
485
486
|
)
|
|
486
487
|
|
|
488
|
+
class DataSourceConfigSummary(base_model_class):
|
|
489
|
+
__tablename__ = 'data_source_config_summary'
|
|
490
|
+
|
|
491
|
+
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) # noqa A003
|
|
492
|
+
|
|
493
|
+
source_object_type: Mapped[str] = mapped_column(nullable=False)
|
|
494
|
+
destination_object_type: Mapped[str] = mapped_column(nullable=True)
|
|
495
|
+
|
|
496
|
+
object_filters: Mapped[dict] = mapped_column(
|
|
497
|
+
JSONB,
|
|
498
|
+
nullable=False,
|
|
499
|
+
default={}
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
group_by: Mapped[dict] = mapped_column(
|
|
503
|
+
JSONB,
|
|
504
|
+
nullable=False,
|
|
505
|
+
default=[]
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
stats_fields: Mapped[dict] = mapped_column(
|
|
509
|
+
JSONB,
|
|
510
|
+
nullable=False,
|
|
511
|
+
default=[]
|
|
512
|
+
)
|
|
513
|
+
|
|
514
|
+
stats: Mapped[dict] = mapped_column(
|
|
515
|
+
JSONB,
|
|
516
|
+
nullable=False,
|
|
517
|
+
default=[]
|
|
518
|
+
)
|
|
519
|
+
|
|
520
|
+
prefix: Mapped[str] = mapped_column(
|
|
521
|
+
nullable=False,
|
|
522
|
+
default=''
|
|
523
|
+
)
|
|
524
|
+
|
|
525
|
+
data_source_config_id: Mapped[int] = mapped_column(
|
|
526
|
+
ForeignKey('data_source_config.id'),
|
|
527
|
+
nullable=False
|
|
528
|
+
)
|
|
529
|
+
|
|
530
|
+
data_source_config: Mapped['DataSourceConfig'] = relationship( # noqa F821
|
|
531
|
+
back_populates='data_source_config_summaries',
|
|
532
|
+
foreign_keys=[data_source_config_id]
|
|
533
|
+
)
|
|
534
|
+
|
|
487
535
|
class DataSourceConfig(base_model_class):
|
|
488
536
|
__tablename__ = 'data_source_config'
|
|
489
537
|
|
|
@@ -504,6 +552,10 @@ def create_standard_models(
|
|
|
504
552
|
back_populates='data_source_config',
|
|
505
553
|
foreign_keys=[DataSourceConfigRelationship.data_source_config_id]
|
|
506
554
|
)
|
|
555
|
+
data_source_config_summaries: Mapped[list['DataSourceConfigSummary']] = relationship(
|
|
556
|
+
back_populates='data_source_config',
|
|
557
|
+
foreign_keys=[DataSourceConfigSummary.data_source_config_id]
|
|
558
|
+
)
|
|
507
559
|
|
|
508
560
|
class _UserMixin:
|
|
509
561
|
|
|
@@ -538,6 +590,7 @@ def create_standard_models(
|
|
|
538
590
|
data_source_config=DataSourceConfig,
|
|
539
591
|
data_source_config_attribute=DataSourceConfigAttribute,
|
|
540
592
|
data_source_config_relationship=DataSourceConfigRelationship,
|
|
593
|
+
data_source_config_summary=DataSourceConfigSummary,
|
|
541
594
|
component=Component,
|
|
542
595
|
component_zone=ComponentZone,
|
|
543
596
|
zone=Zone,
|
tol/validators/unique_values.py
CHANGED
|
@@ -14,7 +14,7 @@ class UniqueValuesValidator(Validator):
|
|
|
14
14
|
|
|
15
15
|
def __init__(
|
|
16
16
|
self,
|
|
17
|
-
unique_keys: list[str],
|
|
17
|
+
unique_keys: list[list[str] | str],
|
|
18
18
|
*,
|
|
19
19
|
detail: str = 'Value is not unique',
|
|
20
20
|
is_error: bool = True,
|
|
@@ -26,21 +26,49 @@ class UniqueValuesValidator(Validator):
|
|
|
26
26
|
self.__detail = detail
|
|
27
27
|
self.__is_error = is_error
|
|
28
28
|
self.__duplicates: dict[str, list[str]] = {}
|
|
29
|
-
self.__existing_values: dict[str, set] = {
|
|
29
|
+
self.__existing_values: dict[str, set] = {}
|
|
30
|
+
for key in self.__keys:
|
|
31
|
+
if isinstance(key, str):
|
|
32
|
+
self.__existing_values[key] = set()
|
|
33
|
+
elif isinstance(key, list):
|
|
34
|
+
concat_key = '/'.join(key)
|
|
35
|
+
self.__existing_values[concat_key] = set()
|
|
30
36
|
|
|
31
37
|
def _validate_data_object(
|
|
32
38
|
self,
|
|
33
39
|
obj: DataObject
|
|
34
40
|
) -> None:
|
|
35
41
|
|
|
36
|
-
for
|
|
37
|
-
if
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
+
for unique_key in self.__keys:
|
|
43
|
+
if isinstance(unique_key, list):
|
|
44
|
+
concat = ''
|
|
45
|
+
for key in unique_key:
|
|
46
|
+
concat = concat + '/' + (str(obj.attributes[key]))
|
|
47
|
+
if concat in self.__existing_values['/'.join(unique_key)]:
|
|
48
|
+
self._duplicate_checks(
|
|
49
|
+
key=key,
|
|
50
|
+
value=concat
|
|
51
|
+
)
|
|
42
52
|
else:
|
|
43
|
-
self.__existing_values[
|
|
53
|
+
self.__existing_values['/'.join(unique_key)].add(concat)
|
|
54
|
+
|
|
55
|
+
else:
|
|
56
|
+
if obj.attributes[unique_key] in self.__existing_values[unique_key]:
|
|
57
|
+
self._duplicate_checks(
|
|
58
|
+
key=unique_key,
|
|
59
|
+
value=obj.attributes[unique_key]
|
|
60
|
+
)
|
|
61
|
+
else:
|
|
62
|
+
self.__existing_values[unique_key].add(obj.attributes[unique_key])
|
|
63
|
+
|
|
64
|
+
def _duplicate_checks(
|
|
65
|
+
self,
|
|
66
|
+
key: str,
|
|
67
|
+
value: str
|
|
68
|
+
):
|
|
69
|
+
if key not in self.__duplicates:
|
|
70
|
+
self.__duplicates[key] = []
|
|
71
|
+
self.__duplicates[key].append(value)
|
|
44
72
|
|
|
45
73
|
def _post_validation(
|
|
46
74
|
self,
|
|
@@ -8,7 +8,7 @@ tol/api_base/__init__.py,sha256=xv9kkAyb1Q5vhKgq6f-JpY7lIAla51U-gIpA5U1OfkA,312
|
|
|
8
8
|
tol/api_base/action.py,sha256=l3MzD3jSDdYiZs-HCT4YEsXkp34X26k6yWugXrRB2XU,6575
|
|
9
9
|
tol/api_base/blueprint.py,sha256=lyNXgyyZ7kEL5uuIuSNkardFy8ZcDIfftt8c5xiRjyA,19061
|
|
10
10
|
tol/api_base/controller.py,sha256=5Fp2FobjXXPlongu1j7adGJKlSVufW0_jpk_wZIMRa4,26818
|
|
11
|
-
tol/api_base/pipeline_steps.py,sha256=
|
|
11
|
+
tol/api_base/pipeline_steps.py,sha256=Y7tOwaBBanxAqiUrTpmF6yaLkEJI6o0ELnM-2CaCvHs,5832
|
|
12
12
|
tol/api_base/system.py,sha256=c47sbcNaCd3teiJLO-Zwg1cUNvfCvRO4GmKzAMNxZrw,1219
|
|
13
13
|
tol/api_base/auth/__init__.py,sha256=LNbVtQDlzKWzLoCmxAPSIAcM6mlVMnn0Aj9HFUudseo,460
|
|
14
14
|
tol/api_base/auth/asserts.py,sha256=3kHP2yW6HZxevIgeO8Rl3iKovi8WHZcguVbPSFUjawQ,2765
|
|
@@ -237,11 +237,13 @@ tol/prefect/factory.py,sha256=mO4KVnaEYMv-ojGJuiencNQMq6PAMU8cIc4QN5Kq8Gw,2208
|
|
|
237
237
|
tol/prefect/filter.py,sha256=aNVTSmcrbb_4EllcFtBOSPuUGgwE-f-Kd5_an6lREtM,7044
|
|
238
238
|
tol/prefect/prefect_datasource.py,sha256=JXLS4odWNWUu4mgojQogwE28mBTAbj-OQYFxhYNYHnM,8865
|
|
239
239
|
tol/prefect/prefect_object.py,sha256=Nn3Y6ErQnXubX7M-GzRsfLHEzdHwKr5RWCjIJ4FA28U,495
|
|
240
|
-
tol/s3/__init__.py,sha256=
|
|
240
|
+
tol/s3/__init__.py,sha256=AnYM6rWRDJ-AkUBPuIFGrF3wyeh7euwUCiv2-gPy9J4,351
|
|
241
241
|
tol/s3/converter.py,sha256=KylGAQvu1jRYJl4dcYK1IgbFsmHP3JXRI22MX8UzHz4,1023
|
|
242
242
|
tol/s3/factory.py,sha256=rFcRVy4B8KVsUaaHhtpLUE90_hRnixkO2IjnB6-arkc,2163
|
|
243
243
|
tol/s3/parser.py,sha256=EOiPYJ8ZLigAxKGL4cC8r7OGyjGdSMHmJMnDqHSiG_E,1907
|
|
244
244
|
tol/s3/s3_datasource.py,sha256=tSUc78FN1OudpJcQ-7FI4S_LPT5j-BYIcwFaQPDy_VA,2072
|
|
245
|
+
tol/s3/data_upload/__init__.py,sha256=4dkK7MPB0CzkkujFSVj0FzoG5ri_usQ7XaBB81Vc_qE,85
|
|
246
|
+
tol/s3/data_upload/blueprint.py,sha256=rwZAXsHe_AJXAdRZVdUhtJg3P3SxOs_G_0F1eewPuT4,2574
|
|
245
247
|
tol/sciops/__init__.py,sha256=PaC1_5wZhsZlqeYQRMpDeIf9L6Hi0eXs_pHXaWfscAo,131
|
|
246
248
|
tol/sciops/configuration.py,sha256=21ddgHOmhZE1tcE-5_f4KiV-7CUqkF8yXi4iBYPtfmc,1279
|
|
247
249
|
tol/sciops/consumer.py,sha256=Ikl8glrZ0yQKBhegC84vwNj43iK1Ca_SAn_gcyAhbKQ,2137
|
|
@@ -250,7 +252,7 @@ tol/sciops/messages.py,sha256=0kFsa9pIyWV6rADHjcqVa2q2D61U1AmtFK9Lq5mfjbU,1750
|
|
|
250
252
|
tol/sciops/response_processors.py,sha256=QDBYT__3tmWArTzeVR29k6yfYQdXanvWSy5VI7nrpio,2658
|
|
251
253
|
tol/sciops/sequencing_datasource.py,sha256=JT5eenYjrN-pJmaReSGigMq5PvmE2NSDI4bUMHfeMz4,7553
|
|
252
254
|
tol/services/__init__.py,sha256=FgW9Rb3vp-bfG7OYGyyNdh0JC420opH0gKtr89z6WmE,125
|
|
253
|
-
tol/services/s3_client.py,sha256=
|
|
255
|
+
tol/services/s3_client.py,sha256=OR2PDz7EiU2i__UUIwtvd18MpMEOlPGJqoTGYxIT4Mo,1565
|
|
254
256
|
tol/sources/__init__.py,sha256=4LbvDIZOOG7p-ebbvivP7NvrJeApUvGEIcDL58ahQJE,85
|
|
255
257
|
tol/sources/benchling.py,sha256=C1mDri10F6DJ3PYOHHnrf8sJI2WF3xwMoFGLZw0nkKI,585
|
|
256
258
|
tol/sources/benchling_warehouse.py,sha256=8i91fBSYWy0YvYj031BTGUL4bdi4vzQzcOBkhHM2s_Q,694
|
|
@@ -303,11 +305,9 @@ tol/sql/auth/__init__.py,sha256=e3JuwugXmXobklqZ1Mt1w03qPgb1WdUaJVM7oblzHyk,202
|
|
|
303
305
|
tol/sql/auth/blueprint.py,sha256=u0vT_TC9IMKrg08QFa9W29_83mT0y0jzLj3DvXy1BBw,25906
|
|
304
306
|
tol/sql/auth/models.py,sha256=4xNWBvyGiI3mwRyDY5ty48Bv9178ApQXPR-YjIdCsvk,11879
|
|
305
307
|
tol/sql/pipeline_step/__init__.py,sha256=O7u4RrLfuoB0mwLcPxFoUrdTBZGB_4bE1vWCn5ho-qw,147
|
|
306
|
-
tol/sql/pipeline_step/factory.py,sha256=
|
|
308
|
+
tol/sql/pipeline_step/factory.py,sha256=hrv9iAjdUu3sq6R94PXKclatZNMKY8YMgBDwLWKuvR0,5131
|
|
307
309
|
tol/sql/standard/__init__.py,sha256=2NbLXFk0rneGZosZ2ESIRcT0WMK0KncmPWaLPqvX-i4,142
|
|
308
|
-
tol/sql/standard/factory.py,sha256=
|
|
309
|
-
tol/sql/summary/__init__.py,sha256=5fNYAeOJ-XOdG9cgu7cUgDBwzG89cuc6RGGw_ZOjURk,143
|
|
310
|
-
tol/sql/summary/factory.py,sha256=F01du9HYl4kmUBHGxgpibSi651GAI5wsRUdk7LBZzzI,2009
|
|
310
|
+
tol/sql/standard/factory.py,sha256=yY8iWmZRMvUqphwnzBeOIQtKGgxsU6AcA7YTz53UYvc,20010
|
|
311
311
|
tol/status/__init__.py,sha256=sBo-j1wCmberl89uryVCBEJk8ohbfsYhaNpIp_brR9Y,146
|
|
312
312
|
tol/status/status_datasource.py,sha256=UYU2vB561XRWY8y2dY96qHiWXy15xaHxsbGCVCIUnqs,1916
|
|
313
313
|
tol/sts/__init__.py,sha256=1nb_lBWDwxJo3hutxSid2rqMIpfZ4GHxDS6cfj-FKv4,187
|
|
@@ -324,10 +324,10 @@ tol/validators/allowed_keys.py,sha256=BJMomJtaQdxsdGsueDtLewv75TlwdIXiQipLGFcJ7_
|
|
|
324
324
|
tol/validators/allowed_values.py,sha256=yJ5SdiUlV7PSKORtsBJ9hYSqwvlx_esbFmFL_Gxh-p4,2262
|
|
325
325
|
tol/validators/regex.py,sha256=dKodGH0sv6DbqWeV6QXE6-GYjnG4rMO0rg8IEIaQG60,2364
|
|
326
326
|
tol/validators/regex_by_value.py,sha256=o99NJlWPgQ0GrpVnep8-cHfjWnc9F2rChmXHIxjrMrk,2543
|
|
327
|
-
tol/validators/unique_values.py,sha256=
|
|
328
|
-
tol_sdk-1.7.
|
|
329
|
-
tol_sdk-1.7.
|
|
330
|
-
tol_sdk-1.7.
|
|
331
|
-
tol_sdk-1.7.
|
|
332
|
-
tol_sdk-1.7.
|
|
333
|
-
tol_sdk-1.7.
|
|
327
|
+
tol/validators/unique_values.py,sha256=tEXDNJj95XUJdhsZlO-pkXdQz-EYaSG-mcp4MDTL7eY,2835
|
|
328
|
+
tol_sdk-1.7.4.dist-info/licenses/LICENSE,sha256=RF9Jacy-9BpUAQQ20INhTgtaNBkmdTolYCHtrrkM2-8,1077
|
|
329
|
+
tol_sdk-1.7.4.dist-info/METADATA,sha256=1gosjFEvdmraYpLVv81dHvwCKBFGXDUdz__wwvImdLo,3079
|
|
330
|
+
tol_sdk-1.7.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
331
|
+
tol_sdk-1.7.4.dist-info/entry_points.txt,sha256=jH3HfTwxjzog7E3lq8CKpUWGIRY9FSXbyL6CpUmv6D0,36
|
|
332
|
+
tol_sdk-1.7.4.dist-info/top_level.txt,sha256=PwKMQLphyZNvagBoriVbl8uwHXQl8IC1niawVG0iXMM,10
|
|
333
|
+
tol_sdk-1.7.4.dist-info/RECORD,,
|
tol/sql/summary/factory.py
DELETED
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
# SPDX-FileCopyrightText: 2025 Genome Research Ltd.
|
|
2
|
-
#
|
|
3
|
-
# SPDX-License-Identifier: MIT
|
|
4
|
-
|
|
5
|
-
from __future__ import annotations
|
|
6
|
-
|
|
7
|
-
from collections.abc import Iterable as IterableABC
|
|
8
|
-
from dataclasses import dataclass
|
|
9
|
-
from typing import Iterator
|
|
10
|
-
|
|
11
|
-
from sqlalchemy.dialects.postgresql import JSONB
|
|
12
|
-
from sqlalchemy.orm import (
|
|
13
|
-
Mapped,
|
|
14
|
-
mapped_column
|
|
15
|
-
)
|
|
16
|
-
|
|
17
|
-
from ..model import Model
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
@dataclass(frozen=True, kw_only=True)
|
|
21
|
-
class SummaryModels(IterableABC[type[Model]]):
|
|
22
|
-
"""
|
|
23
|
-
Contains the needed models for summarys.
|
|
24
|
-
|
|
25
|
-
"""
|
|
26
|
-
|
|
27
|
-
summary: type[Model]
|
|
28
|
-
|
|
29
|
-
def __iter__(self) -> Iterator[type[Model]]:
|
|
30
|
-
"""
|
|
31
|
-
Returns in order they should be deleted
|
|
32
|
-
"""
|
|
33
|
-
|
|
34
|
-
return iter(
|
|
35
|
-
[
|
|
36
|
-
self.summary
|
|
37
|
-
]
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def create_summary_models(
|
|
42
|
-
base_model_class: type[Model]
|
|
43
|
-
) -> SummaryModels:
|
|
44
|
-
"""
|
|
45
|
-
Creates all needed models (and joining tables) for
|
|
46
|
-
summaries.
|
|
47
|
-
|
|
48
|
-
Returns a `SummaryModels` instance that functions like an
|
|
49
|
-
`Iterable`.
|
|
50
|
-
"""
|
|
51
|
-
|
|
52
|
-
class Summary(base_model_class):
|
|
53
|
-
__tablename__ = 'summary'
|
|
54
|
-
|
|
55
|
-
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) # noqa A003
|
|
56
|
-
|
|
57
|
-
source_object_type: Mapped[str] = mapped_column(nullable=False)
|
|
58
|
-
destination_object_type: Mapped[str] = mapped_column(nullable=True)
|
|
59
|
-
|
|
60
|
-
object_filters: Mapped[dict] = mapped_column(
|
|
61
|
-
JSONB,
|
|
62
|
-
nullable=False,
|
|
63
|
-
default={}
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
group_by: Mapped[dict] = mapped_column(
|
|
67
|
-
JSONB,
|
|
68
|
-
nullable=False,
|
|
69
|
-
default=[]
|
|
70
|
-
)
|
|
71
|
-
|
|
72
|
-
stats_fields: Mapped[dict] = mapped_column(
|
|
73
|
-
JSONB,
|
|
74
|
-
nullable=False,
|
|
75
|
-
default=[]
|
|
76
|
-
)
|
|
77
|
-
|
|
78
|
-
stats: Mapped[dict] = mapped_column(
|
|
79
|
-
JSONB,
|
|
80
|
-
nullable=False,
|
|
81
|
-
default=[]
|
|
82
|
-
)
|
|
83
|
-
|
|
84
|
-
prefix: Mapped[str] = mapped_column(
|
|
85
|
-
nullable=False,
|
|
86
|
-
default=''
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
return SummaryModels(
|
|
90
|
-
summary=Summary,
|
|
91
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|