tol-sdk 1.8.5__py3-none-any.whl → 1.8.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. tol/api_base/system.py +18 -0
  2. tol/api_client/api_datasource.py +23 -15
  3. tol/api_client/client.py +13 -8
  4. tol/api_client/factory.py +9 -12
  5. tol/benchling/benchling_warehouse_datasource.py +6 -3
  6. tol/benchling/sql/{extraction_containers_dna.sql → extraction_container_extraction_extraction_type_dna.sql} +25 -23
  7. tol/benchling/sql/sequencing_request_sequencing_platform_pacbio.sql +8 -8
  8. tol/core/factory.py +1 -1
  9. tol/core/requested_fields.py +21 -0
  10. tol/dummy/__init__.py +6 -0
  11. tol/dummy/client.py +88 -0
  12. tol/dummy/converter.py +48 -0
  13. tol/dummy/dummy_datasource.py +105 -0
  14. tol/dummy/factory.py +95 -0
  15. tol/dummy/parser.py +70 -0
  16. tol/flows/converters/__init__.py +5 -0
  17. tol/flows/converters/benchling_extraction_container_to_elastic_extraction_container_converter.py +53 -0
  18. tol/flows/converters/benchling_sequencing_request_to_elastic_sequencing_request_converter.py +8 -1
  19. tol/flows/converters/combine_fields_converter.py +45 -0
  20. tol/flows/converters/default_field_value_if_missing_converter.py +43 -0
  21. tol/flows/converters/elastic_sequencing_request_to_elastic_run_data_update_converter.py +8 -0
  22. tol/flows/converters/prefix_field_converter.py +49 -0
  23. tol/flows/converters/time_string_to_time.py +37 -28
  24. tol/sources/dummy.py +17 -0
  25. tol/sql/auth/blueprint.py +12 -5
  26. tol/sql/sql_datasource.py +1 -20
  27. tol/sql/standard/factory.py +2 -0
  28. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/METADATA +1 -1
  29. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/RECORD +33 -22
  30. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/WHEEL +1 -1
  31. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/entry_points.txt +0 -0
  32. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/licenses/LICENSE +0 -0
  33. {tol_sdk-1.8.5.dist-info → tol_sdk-1.8.7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,105 @@
1
+ # SPDX-FileCopyrightText: 2026 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from __future__ import annotations
6
+
7
+ import typing
8
+ from functools import cache
9
+ from typing import Callable, Iterable, Optional
10
+
11
+ from .client import DummyClient
12
+ from .converter import (
13
+ DummyConverter
14
+ )
15
+ from ..core import DataObject, DataSource, DataSourceError, DataSourceFilter
16
+ from ..core.operator import (
17
+ DetailGetter,
18
+ ListGetter
19
+ )
20
+
21
+ if typing.TYPE_CHECKING:
22
+ from ..core.session import OperableSession
23
+
24
+ ClientFactory = Callable[[], DummyClient]
25
+ DummyConverterFactory = Callable[[], DummyConverter]
26
+
27
+
28
+ class DummyDataSource(
29
+ DataSource,
30
+
31
+ # the supported operators
32
+ DetailGetter,
33
+ ListGetter
34
+ ):
35
+ """
36
+ A `DataSource` that outputs dummy data.
37
+
38
+ Developers should likely use `create_dummy_datasource`
39
+ instead of this directly.
40
+ """
41
+
42
+ def __init__(
43
+ self,
44
+ client_factory: ClientFactory,
45
+ dummy_converter_factory: DummyConverterFactory
46
+ ) -> None:
47
+
48
+ self.__client_factory = client_factory
49
+ self.__dummy_converter_factory = dummy_converter_factory
50
+ super().__init__({})
51
+
52
+ @property
53
+ @cache
54
+ def attribute_types(self) -> dict[str, dict[str, str]]:
55
+ return {
56
+ 'record': {
57
+ 'big_string': 'str',
58
+ 'little_string': 'str',
59
+ 'bool': 'bool',
60
+ 'date': 'datetime',
61
+ 'int': 'int',
62
+ 'list': 'list[str]'
63
+ },
64
+ 'category': {
65
+ 'name': 'str'
66
+ }
67
+ }
68
+
69
+ @property
70
+ @cache
71
+ def supported_types(self) -> list[str]:
72
+ return list(
73
+ self.attribute_types.keys()
74
+ )
75
+
76
+ def get_by_id(
77
+ self,
78
+ object_type: str,
79
+ object_ids: Iterable[str],
80
+ **kwargs
81
+ ) -> Iterable[Optional[DataObject]]:
82
+ if object_type not in self.supported_types:
83
+ raise DataSourceError(f'{object_type} is not supported')
84
+
85
+ client = self.__client_factory()
86
+ dummy_response = client.get_detail(object_type, object_ids)
87
+ dummy_converter = self.__dummy_converter_factory()
88
+
89
+ converted_objects, _ = dummy_converter.convert_list(dummy_response) \
90
+ if dummy_response is not None else ([], 0)
91
+ yield from self.sort_by_id(converted_objects, object_ids)
92
+
93
+ def get_list(
94
+ self,
95
+ object_type: str,
96
+ object_filters: Optional[DataSourceFilter] = None,
97
+ session: Optional[OperableSession] = None
98
+ ) -> Iterable[DataObject]:
99
+ if object_filters:
100
+ raise DataSourceError('Filtering is not supported')
101
+ objects = self.__client_factory().get_list(
102
+ object_type
103
+ )
104
+ converted_objects, _ = self.__dummy_converter_factory().convert_list(objects)
105
+ return iter(converted_objects)
tol/dummy/factory.py ADDED
@@ -0,0 +1,95 @@
1
+ # SPDX-FileCopyrightText: 2024 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from collections.abc import Mapping
6
+ from typing import Callable, Iterator, Optional
7
+
8
+ from .client import DummyClient
9
+ from .converter import (
10
+ DummyConverter
11
+ )
12
+ from .dummy_datasource import (
13
+ DummyConverterFactory,
14
+ DummyDataSource
15
+ )
16
+ from .parser import DefaultParser
17
+ from ..core import DataSource
18
+
19
+
20
+ class _DummyDSDict(Mapping):
21
+ def __init__(self, api_ds: DummyDataSource) -> None:
22
+ self.__ds = api_ds
23
+
24
+ def __getitem__(self, __k: str) -> DummyDataSource:
25
+ if __k not in self.__ds.supported_types:
26
+ raise KeyError()
27
+ return self.__ds
28
+
29
+ def __iter__(self) -> Iterator[str]:
30
+ return iter(self.__ds.supported_types)
31
+
32
+ def __len__(self) -> int:
33
+ return len(self.__ds.supported_types)
34
+
35
+
36
+ class _ConverterFactory:
37
+ """
38
+ Manges the instantation of:
39
+
40
+ - `DummyConverter`
41
+ """
42
+
43
+ def __init__(self) -> None:
44
+ self.__data_source: Optional[DataSource] = None
45
+
46
+ @property
47
+ def data_source(self) -> Optional[DataSource]:
48
+ return self.__data_source
49
+
50
+ @data_source.setter
51
+ def data_source(
52
+ self,
53
+ ds: DataSource
54
+ ) -> None:
55
+
56
+ self.__data_source = ds
57
+
58
+ def dummy_converter_factory(self) -> DummyConverterFactory:
59
+ """
60
+ Returns an instantiated `DummyConverter`.
61
+ """
62
+
63
+ parser = DefaultParser(self.__ds_dict)
64
+ return DummyConverter(parser)
65
+
66
+ @property
67
+ def __ds_dict(self) -> dict[str, DataSource]:
68
+ return _DummyDSDict(self.data_source)
69
+
70
+
71
+ def _get_client_factory() -> Callable[[], DummyClient]:
72
+ """
73
+ A resonable default for creating
74
+ a `DummyClient` instance
75
+ """
76
+
77
+ return lambda: DummyClient()
78
+
79
+
80
+ def create_dummy_datasource() -> DummyDataSource:
81
+ """
82
+ Instantiates `DummyDataSource`
83
+ """
84
+
85
+ client_factory = _get_client_factory()
86
+ manager = _ConverterFactory()
87
+
88
+ dummy_ds = DummyDataSource(
89
+ client_factory,
90
+ manager.dummy_converter_factory
91
+ )
92
+
93
+ manager.data_source = dummy_ds
94
+
95
+ return dummy_ds
tol/dummy/parser.py ADDED
@@ -0,0 +1,70 @@
1
+ # SPDX-FileCopyrightText: 2024 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from __future__ import annotations
6
+
7
+ import typing
8
+ from abc import ABC, abstractmethod
9
+ from typing import Any, Iterable
10
+
11
+ from dateutil.parser import parse as dateutil_parse
12
+
13
+ from ..core import DataObject
14
+
15
+ if typing.TYPE_CHECKING:
16
+ from ..core import DataSource
17
+
18
+
19
+ DummyResource = dict[str, Any]
20
+ DummyDoc = dict[str, list[DummyResource]]
21
+
22
+
23
+ class Parser(ABC):
24
+
25
+ def parse_iterable(
26
+ self,
27
+ transfers: Iterable[DummyResource]
28
+ ) -> Iterable[DataObject]:
29
+ """
30
+ Parses an `Iterable` of Dummy transfer resources
31
+ """
32
+
33
+ return (
34
+ self.parse(t) for t in transfers
35
+ )
36
+
37
+ @abstractmethod
38
+ def parse(self, transfer: DummyResource) -> DataObject:
39
+ """
40
+ Parses an individual Dummy transfer resource to a
41
+ `DataObject` instance
42
+ """
43
+
44
+
45
+ class DefaultParser(Parser):
46
+
47
+ def __init__(self, data_source_dict: dict[str, DataSource]) -> None:
48
+ self.__dict = data_source_dict
49
+
50
+ def parse(self, transfer: DummyResource) -> DataObject:
51
+ ds = self.__dict[transfer.get('type')]
52
+ return ds.data_object_factory(
53
+ transfer.get('type'),
54
+ id_=transfer.get('id'),
55
+ attributes={
56
+ k: (
57
+ dateutil_parse(v)
58
+ if k in ['date'] and v is not None
59
+ else v
60
+ )
61
+ for k, v in transfer.items()
62
+ if k not in ['id', 'type', 'category']
63
+ },
64
+ to_one={
65
+ 'category': ds.data_object_factory(
66
+ 'category',
67
+ transfer.get('category')
68
+ ) if 'category' in transfer else None
69
+ }
70
+ )
@@ -2,6 +2,7 @@
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
4
 
5
+ from .benchling_extraction_container_to_elastic_extraction_container_converter import BenchlingExtractionContainerToElasticExtractionContainerConverter # noqa F401
5
6
  from .benchling_extraction_to_elastic_extraction_converter import BenchlingExtractionToElasticExtractionConverter # noqa F401
6
7
  from .benchling_extraction_to_elastic_sequencing_request_converter import BenchlingExtractionToElasticSequencingRequestConverter # noqa F401
7
8
  from .benchling_sequencing_request_to_elastic_sequencing_request_converter import ( # noqa F401
@@ -56,4 +57,8 @@ from .sts_sample_to_casm_benchling_converter import StsSampleToCasmBenchlingConv
56
57
  from .treeofsex_species_to_treeofsexwh_species_converter import TreeofsexSpeciesToTreeofsexwhSpeciesConverter # noqa F401
57
58
  from .treeofsex_upload_to_treeofsex_attribute_converter import TreeofsexUploadToTreeofsexAttributeConverter # noqa F401
58
59
  from .skip_null_fields_converter import SkipNullFieldsConverter # noqa F401
60
+ from .default_field_value_if_missing_converter import DefaultFieldValueIfMissingConverter # noqa F401
61
+ from .prefix_field_converter import PrefixFieldConverter # noqa F401
62
+ from .combine_fields_converter import CombineFieldsConverter # noqa F401
59
63
  from .auto_detect_manifest_type_converter import AutoDetectManifestTypeConverter # noqa F401
64
+ from .time_string_to_time import TimeStringToTimeConverter # noqa F401
@@ -0,0 +1,53 @@
1
+ # SPDX-FileCopyrightText: 2026 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from typing import Iterable
6
+
7
+ from ...core import (
8
+ DataObject,
9
+ DataObjectToDataObjectOrUpdateConverter
10
+ )
11
+
12
+
13
+ class BenchlingExtractionContainerToElasticExtractionContainerConverter(
14
+ DataObjectToDataObjectOrUpdateConverter):
15
+ def convert(self, data_object: DataObject) -> Iterable[DataObject]:
16
+ if data_object.tissue_sts_id is not None:
17
+ ret = self._data_object_factory(
18
+ 'extraction_container',
19
+ data_object.id,
20
+ attributes={
21
+ **{k: v
22
+ for k, v in data_object.attributes.items()
23
+ if k not in ['tissue_sts_id', 'specimen_id', 'taxon_id',
24
+ 'programme_id', 'eln_tissue_prep_id', 'extraction_id']}
25
+ },
26
+ to_one={
27
+ 'sample': self._data_object_factory(
28
+ 'sample',
29
+ data_object.tissue_sts_id
30
+ ) if data_object.tissue_sts_id is not None else None,
31
+ 'species': self._data_object_factory(
32
+ 'species',
33
+ data_object.taxon_id
34
+ ) if data_object.taxon_id is not None else None,
35
+ 'specimen': self._data_object_factory(
36
+ 'specimen',
37
+ data_object.specimen_id
38
+ ) if data_object.specimen_id is not None else None,
39
+ 'tolid': self._data_object_factory(
40
+ 'tolid',
41
+ data_object.programme_id
42
+ ) if data_object.programme_id is not None else None,
43
+ 'tissue_prep': self._data_object_factory(
44
+ 'tissue_prep',
45
+ data_object.eln_tissue_prep_id
46
+ ) if data_object.eln_tissue_prep_id is not None else None,
47
+ 'extraction': self._data_object_factory(
48
+ 'extraction',
49
+ data_object.extraction_id
50
+ ) if data_object.extraction_id is not None else None,
51
+ }
52
+ )
53
+ yield ret
@@ -15,12 +15,18 @@ class BenchlingSequencingRequestToElasticSequencingRequestConverter(
15
15
  def convert(self, data_object: DataObject) -> Iterable[DataObject]:
16
16
  if data_object.sts_id is not None:
17
17
  extraction = None
18
+ extraction_container = None
18
19
  tissue_prep = None
19
20
  if 'extraction_id' in data_object.attributes:
20
21
  extraction = self._data_object_factory(
21
22
  'extraction',
22
23
  data_object.extraction_id
23
24
  )
25
+ if 'fluidx_container_id' in data_object.attributes:
26
+ extraction_container = self._data_object_factory(
27
+ 'extraction_container',
28
+ data_object.fluidx_container_id
29
+ )
24
30
  if 'tissue_prep_id' in data_object.attributes:
25
31
  tissue_prep = self._data_object_factory(
26
32
  'tissue_prep',
@@ -34,7 +40,7 @@ class BenchlingSequencingRequestToElasticSequencingRequestConverter(
34
40
  for k, v in data_object.attributes.items()
35
41
  if k not in ['sanger_sample_id', 'sts_id',
36
42
  'specimen_id', 'taxon_id', 'extraction_id',
37
- 'programme_id', 'tissue_prep_id']}
43
+ 'programme_id', 'tissue_prep_id', 'fluidx_container_id']}
38
44
  },
39
45
  to_one={
40
46
  'sample': self._data_object_factory(
@@ -54,6 +60,7 @@ class BenchlingSequencingRequestToElasticSequencingRequestConverter(
54
60
  data_object.programme_id
55
61
  ) if data_object.programme_id is not None else None,
56
62
  'extraction': extraction,
63
+ 'extraction_container': extraction_container,
57
64
  'tissue_prep': tissue_prep,
58
65
  })
59
66
  yield ret
@@ -0,0 +1,45 @@
1
+ # SPDX-FileCopyrightText: 2025 Genome Research Ltd.
2
+ # SPDX-License-Identifier: MIT
3
+
4
+ from dataclasses import dataclass
5
+ from typing import Iterable
6
+
7
+ from tol.core import DataObject, DataObjectToDataObjectOrUpdateConverter
8
+
9
+
10
+ class CombineFieldsConverter(DataObjectToDataObjectOrUpdateConverter):
11
+
12
+ @dataclass(slots=True, frozen=True, kw_only=True)
13
+ class Config:
14
+ field1: str
15
+ field2: str
16
+ dest_field: str
17
+ lowercase_field1: bool
18
+
19
+ __slots__ = ('__config',)
20
+ __config: Config
21
+
22
+ def __init__(self, data_object_factory, config: Config) -> None:
23
+ super().__init__(data_object_factory)
24
+ self.__config = config
25
+
26
+ def convert(self, data_object: DataObject) -> Iterable[DataObject]:
27
+ """
28
+ Concatenates the values of two fields and stores the result in a new field.
29
+ The first field's value may be lowercased if specified in the configuration.
30
+ The destination field name is given by the configuration.
31
+ """
32
+
33
+ val1 = data_object.get_field_by_name(self.__config.field1)
34
+ val2 = data_object.get_field_by_name(self.__config.field2)
35
+ attributes = dict(data_object.attributes)
36
+
37
+ if val1 is not None and val2 is not None:
38
+ part1 = str(val1).lower() if self.__config.lowercase_field1 else str(val1)
39
+ attributes[self.__config.dest_field] = f'{part1}{val2}'
40
+
41
+ yield self._data_object_factory(
42
+ data_object.type,
43
+ data_object.id,
44
+ attributes=attributes,
45
+ )
@@ -0,0 +1,43 @@
1
+ # SPDX-FileCopyrightText: 2025 Genome Research Ltd.
2
+ # SPDX-License-Identifier: MIT
3
+
4
+ from dataclasses import dataclass
5
+ from typing import Iterable
6
+
7
+ from tol.core import DataObject, DataObjectToDataObjectOrUpdateConverter
8
+
9
+
10
+ class DefaultFieldValueIfMissingConverter(DataObjectToDataObjectOrUpdateConverter):
11
+
12
+ @dataclass(slots=True, frozen=True, kw_only=True)
13
+ class Config:
14
+ field_name: str
15
+ default_value: str
16
+
17
+ __slots__ = ['__config']
18
+ __config: Config
19
+
20
+ def __init__(self, data_object_factory, config: Config) -> None:
21
+ super().__init__(data_object_factory)
22
+ self.__config = config
23
+
24
+ def convert(self, data_object: DataObject) -> Iterable[DataObject]:
25
+ """
26
+ Adds a default value for a configured field if missing, empty, or None
27
+ """
28
+
29
+ attributes_obj = data_object.attributes
30
+ if hasattr(attributes_obj, 'get_field_by_name'):
31
+ current_value = attributes_obj.get_field_by_name(self.__config.field_name)
32
+ else:
33
+ current_value = attributes_obj.get(self.__config.field_name)
34
+ attributes = dict(attributes_obj)
35
+ if not current_value:
36
+ attributes[self.__config.field_name] = self.__config.default_value
37
+
38
+ ret = self._data_object_factory(
39
+ data_object.type,
40
+ data_object.id,
41
+ attributes=attributes
42
+ )
43
+ yield ret
@@ -29,5 +29,13 @@ class ElasticSequencingRequestToElasticRunDataUpdateConverter(
29
29
  'extraction',
30
30
  extraction.id
31
31
  )
32
+ if 'benchling_extraction_container' in data_object.to_one_relationships:
33
+ extraction_container = \
34
+ data_object.to_one_relationships['benchling_extraction_container']
35
+ if extraction_container is not None:
36
+ to_ones['benchling_extraction_container'] = self._data_object_factory(
37
+ 'extraction_container',
38
+ extraction_container.id
39
+ )
32
40
  yield (None, to_ones | {
33
41
  'mlwh_sequencing_request.id': data_object.id}) # The candidate key
@@ -0,0 +1,49 @@
1
+ # SPDX-FileCopyrightText: 2025 Genome Research Ltd.
2
+ # SPDX-License-Identifier: MIT
3
+
4
+ from dataclasses import dataclass
5
+ from typing import Iterable
6
+
7
+ from tol.core import DataObject, DataObjectToDataObjectOrUpdateConverter
8
+
9
+
10
+ class PrefixFieldConverter(DataObjectToDataObjectOrUpdateConverter):
11
+
12
+ @dataclass(slots=True, frozen=True, kw_only=True)
13
+ class Config:
14
+ field_name: str
15
+ prefix: str
16
+
17
+ __slots__ = ['__config']
18
+ __config: Config
19
+
20
+ def __init__(self, data_object_factory, config: Config) -> None:
21
+ super().__init__(data_object_factory)
22
+ self.__config = config
23
+ self._data_object_factory = data_object_factory
24
+
25
+ def convert(self, data_object: DataObject) -> Iterable[DataObject]:
26
+ """
27
+ Ensures the configured field value
28
+ starts with the configured prefix. If the field is None, it is
29
+ left as-is.
30
+ """
31
+
32
+ value = data_object.get_field_by_name(
33
+ self.__config.field_name
34
+ )
35
+
36
+ if value is not None:
37
+ value_str = str(value)
38
+ if not value_str.startswith(self.__config.prefix):
39
+ value = f'{self.__config.prefix}{value_str}'
40
+
41
+ ret = self._data_object_factory(
42
+ data_object.type,
43
+ data_object.id,
44
+ attributes={
45
+ **data_object.attributes,
46
+ self.__config.field_name: value
47
+ }
48
+ )
49
+ yield ret
@@ -3,33 +3,42 @@
3
3
  # SPDX-License-Identifier: MIT
4
4
 
5
5
  import re
6
+ from dataclasses import dataclass
6
7
  from datetime import time
8
+ from typing import Iterable
7
9
 
8
- from tol.core import DataObject
9
-
10
-
11
- class Converter:
12
- def convert(self, obj):
13
- raise NotImplementedError()
14
-
15
-
16
- class TimeStringToTimeConverter(Converter):
17
- """
18
- Converts string fields representing time in HH:MM (24-hour) format to Python time objects.
19
- If the string is not in HH:MM, tries to append ':00' and parse as HH:MM:SS.
20
- """
21
- def __init__(self, field: str):
22
- self.field = field
23
-
24
- def convert(self, obj: DataObject) -> DataObject:
25
- value = obj.attributes.get(self.field)
26
- if isinstance(value, str):
27
- match = re.match(r'^(\d{1,2}):(\d{2})(?::(\d{2}))?$', value)
28
- if match:
29
- h, m = int(match.group(1)), int(match.group(2))
30
- s = int(match.group(3)) if match.group(3) else 0
31
- try:
32
- obj.attributes[self.field] = time(h, m, s)
33
- except ValueError:
34
- pass
35
- return obj
10
+ from tol.core import DataObject, DataObjectToDataObjectOrUpdateConverter
11
+
12
+
13
+ class TimeStringToTimeConverter(DataObjectToDataObjectOrUpdateConverter):
14
+
15
+ @dataclass(slots=True, frozen=True, kw_only=True)
16
+ class Config:
17
+ field_names: list[str]
18
+
19
+ __slots__ = ['__config']
20
+ __config: Config
21
+
22
+ def __init__(self, data_object_factory, config: Config) -> None:
23
+ super().__init__(data_object_factory)
24
+ self.__config = config
25
+ self._data_object_factory = data_object_factory
26
+
27
+ def convert(self, data_object: DataObject) -> Iterable[DataObject]:
28
+ """
29
+ Converts string fields representing time in HH:MM (24-hour) format to Python time objects.
30
+ If the string is not in HH:MM, tries to append ':00' and parse as HH:MM:SS.
31
+ """
32
+
33
+ for field_name in self.__config.field_names:
34
+ value = data_object.attributes.get(field_name)
35
+ if isinstance(value, str):
36
+ match = re.match(r'^(\d{1,2}):(\d{2})(?::(\d{2}))?$', value)
37
+ if match:
38
+ h, m = int(match.group(1)), int(match.group(2))
39
+ s = int(match.group(3)) if match.group(3) else 0
40
+ try:
41
+ data_object.attributes[field_name] = time(h, m, s)
42
+ except ValueError:
43
+ pass
44
+ yield data_object
tol/sources/dummy.py ADDED
@@ -0,0 +1,17 @@
1
+ # SPDX-FileCopyrightText: 2024 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from ..core import (
6
+ core_data_object
7
+ )
8
+ from ..dummy import (
9
+ DummyDataSource,
10
+ create_dummy_datasource
11
+ )
12
+
13
+
14
+ def dummy(**kwargs) -> DummyDataSource:
15
+ dummy = create_dummy_datasource()
16
+ core_data_object(dummy)
17
+ return dummy
tol/sql/auth/blueprint.py CHANGED
@@ -8,7 +8,6 @@ from datetime import datetime, timedelta
8
8
  from typing import Any, Callable, Optional
9
9
  from urllib.parse import urlencode
10
10
 
11
- import requests
12
11
  from requests.auth import HTTPBasicAuth
13
12
 
14
13
  from .models import ModelClass, ModelTuple, create_models
@@ -228,9 +227,14 @@ class DbAuthManager(AuthManager):
228
227
  Raises:
229
228
  requests.HTTPError: If the user info request fails
230
229
  """
230
+
231
+ client = HttpClient()
232
+
233
+ session = client.get_session()
234
+
231
235
  headers = {'Authorization': f'Bearer {token}'}
232
236
 
233
- r = requests.get(self.__config.user_info_url, headers=headers)
237
+ r = session.get(self.__config.user_info_url, headers=headers)
234
238
  r.raise_for_status()
235
239
 
236
240
  json_return = r.json()
@@ -401,13 +405,16 @@ class DbAuthManager(AuthManager):
401
405
  Raises:
402
406
  requests.HTTPError: If the token request fails
403
407
  """
404
- r = requests.post(
408
+ client = HttpClient()
409
+
410
+ session = client.get_session()
411
+ r = session.post(
405
412
  self.__config.token_url,
406
- auth=self.__basic_auth(),
407
413
  data=self.__token_post_data(code),
414
+ auth=self.__basic_auth()
408
415
  )
409
- r.raise_for_status()
410
416
 
417
+ r.raise_for_status()
411
418
  return r.json()
412
419
 
413
420
  def __basic_auth(self) -> HTTPBasicAuth: