tol-sdk 1.8.6__py3-none-any.whl → 1.8.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -134,28 +134,17 @@ class ApiDataSource(
134
134
  for k, v in transfer.items()
135
135
  }
136
136
 
137
+ @requested_fields_to_tree
137
138
  @validate('detailGet')
138
139
  def get_by_id(
139
140
  self,
140
141
  object_type: str,
141
142
  object_ids: Iterable[str],
142
143
  session: Optional[OperableSession] = None,
143
- requested_fields: list[str] | None = None,
144
144
  requested_tree: ReqFieldsTree | None = None,
145
145
  ) -> Iterable[Optional[DataObject]]:
146
146
 
147
147
  client = self.__client_factory()
148
- # This would be better handled in the decorator but there is a slight
149
- # difference in handling here with the requested_tree when requested_fields is None
150
- requested_tree = (
151
- ReqFieldsTree(
152
- object_type,
153
- self,
154
- requested_fields=requested_fields,
155
- )
156
- if object_type
157
- else None
158
- )
159
148
 
160
149
  json_responses = (
161
150
  client.get_detail(
@@ -182,7 +171,6 @@ class ApiDataSource(
182
171
  object_filters: Optional[DataSourceFilter] = None,
183
172
  sort_by: Optional[str] = None,
184
173
  session: Optional[OperableSession] = None,
185
- requested_fields: list[str] | None = None,
186
174
  requested_tree: ReqFieldsTree | None = None,
187
175
  ) -> tuple[Iterable[DataObject], int]:
188
176
 
@@ -204,7 +192,6 @@ class ApiDataSource(
204
192
  object_type: str,
205
193
  object_filters: Optional[DataSourceFilter] = None,
206
194
  session: Optional[OperableSession] = None,
207
- requested_fields: list[str] | None = None,
208
195
  requested_tree: ReqFieldsTree | None = None,
209
196
  ) -> Iterable[DataObject]:
210
197
  if self.__can_cursor(object_type, object_filters):
tol/api_client/client.py CHANGED
@@ -54,11 +54,13 @@ class JsonApiClient(HttpClient):
54
54
  url = self.__detail_url(object_type, object_id)
55
55
  headers = self._merge_headers()
56
56
 
57
+ params = {}
58
+ if requested_tree:
59
+ params['requested_fields'] = requested_tree.to_paths()
60
+
57
61
  return self.__fetch_detail(
58
62
  url,
59
- params={
60
- 'requested_fields': requested_tree.to_paths() if requested_tree else None,
61
- },
63
+ params=params,
62
64
  headers=headers,
63
65
  )
64
66
 
tol/api_client/factory.py CHANGED
@@ -64,6 +64,14 @@ class _ConverterFactory:
64
64
  """
65
65
  Returns an instantiated `JsonApiConverter`.
66
66
  """
67
+
68
+ # If we're going to parse a JSON:API data object (i.e. we have been
69
+ # passed an `object_type`), we need to add a ReqFieldsTree so that
70
+ # DataSources with default loading of to-one objects correctly
71
+ # process the to-ones from the `included` array.
72
+ if object_type and not requested_tree:
73
+ requested_tree = ReqFieldsTree(object_type, self.__data_source)
74
+
67
75
  parser = DefaultParser(self.__ds_dict, requested_tree)
68
76
  return JsonApiConverter(parser)
69
77
 
@@ -45,6 +45,7 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
45
45
  'sample': 'sts_id',
46
46
  'sequencing_request': 'sanger_sample_id',
47
47
  'extraction': 'extraction_id',
48
+ 'extraction_container': 'fluidx_container_id',
48
49
  'tissue_prep': 'eln_tissue_prep_id'
49
50
  }
50
51
 
@@ -70,7 +71,7 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
70
71
  yield self.data_object_factory(
71
72
  object_type,
72
73
  id_=obj[id_col],
73
- attributes=obj
74
+ attributes={k: v for k, v in obj.items() if k != id_col}
74
75
  )
75
76
 
76
77
  def get_list(
@@ -82,7 +83,9 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
82
83
  file_suffix = ''
83
84
  if object_filters is not None:
84
85
  if isinstance(object_filters.and_, dict):
85
- for field_name in ['sequencing_platform', 'extraction_type']:
86
+ for field_name in [
87
+ 'sequencing_platform', 'extraction_type', 'extraction.extraction_type'
88
+ ]:
86
89
  if field_name in object_filters.and_:
87
90
  # For an in_list, treat as multiple eq
88
91
  if 'in_list' in object_filters.and_[field_name]:
@@ -101,7 +104,7 @@ class BenchlingWarehouseDataSource(DataSource, ListGetter):
101
104
  ) for val in object_filters.and_[field_name]['in_list']['value']
102
105
  )
103
106
  elif 'eq' in object_filters.and_[field_name]:
104
- file_suffix = '_' + field_name + '_' \
107
+ file_suffix = '_' + field_name.replace('.', '_') + '_' \
105
108
  + object_filters.and_[field_name]['eq']['value']
106
109
  else:
107
110
  raise DataSourceError('Filtering only on sequencing platform and extraction '
@@ -9,28 +9,29 @@ Output: Table with columns:
9
9
 
10
10
  1) taxon_id: [character] Tissue metadata. Origin: STS
11
11
  2) eln_tissue_id: [character] Benchling ID for the tissue the extraction is derived from.
12
- 3) eln_tissue_prep_id: [character] Benchling ID for the tissue prep the extraction is derived from.
13
- 4) extraction_id: [character] DNA extraction entity ID (Benchling).
14
- 5) programme_id: [character] ToLID. Origin: BWH.
15
- 6) specimen_id: [character] Specimen ID. Origin: STS.
16
- 7) creation_date: [date] Date the container was created.
17
- 8) fluidx_container_id: [character] Primary key for the FluidX container.
18
- 9) fluidx_id: [character] FluidX barcode.
19
- 10) tube_type: [character] Type of tube/container.
20
- 11) volume_ul: [numeric] Volume in microliters (0 if archived as 'Retired' or 'Expended').
21
- 12) location: [character] Storage location name.
22
- 13) rack: [character] Box/rack barcode.
23
- 14) archive_purpose: [character] Reason for archiving the DNA extraction.
24
- 15) nanodrop_concentration_ngul: [numeric] Latest Nanodrop concentration (ng/µL).
25
- 16) dna_260_280_ratio: [numeric] Latest Nanodrop 260/280 ratio.
26
- 17) dna_260_230_ratio: [numeric] Latest Nanodrop 260/230 ratio.
27
- 18) qubit_concentration_ngul: [numeric] Latest Qubit concentration (ng/µL).
28
- 19) yield_ng: [numeric] Latest yield (ng).
29
- 20) femto_date_code: [character] Latest Femto date code.
30
- 21) femto_description: [character] Latest Femto profile description.
31
- 22) gqn_index: [numeric] Latest GQN index from Femto.
32
- 23) next_step: [character] Latest decision making next step.
33
- 24) extraction_qc_result: [character] Latest extraction QC result.
12
+ 3) tissue_sts_id: [character] STS ID for the tissue the extraction is derived from.
13
+ 4) eln_tissue_prep_id: [character] Benchling ID for the tissue prep the extraction is derived from.
14
+ 5) extraction_id: [character] DNA extraction entity ID (Benchling).
15
+ 6) programme_id: [character] ToLID. Origin: BWH.
16
+ 7) specimen_id: [character] Specimen ID. Origin: STS.
17
+ 8) creation_date: [date] Date the container was created.
18
+ 9) fluidx_container_id: [character] Primary key for the FluidX container.
19
+ 10) fluidx_id: [character] FluidX barcode.
20
+ 11) tube_type: [character] Type of tube/container.
21
+ 12) volume_ul: [numeric] Volume in microliters (0 if archived as 'Retired' or 'Expended').
22
+ 13) location: [character] Storage location name.
23
+ 14) rack: [character] Box/rack barcode.
24
+ 15) archive_purpose: [character] Reason for archiving the DNA extraction.
25
+ 16) nanodrop_concentration_ngul: [numeric] Latest Nanodrop concentration (ng/µL).
26
+ 17) dna_260_280_ratio: [numeric] Latest Nanodrop 260/280 ratio.
27
+ 18) dna_260_230_ratio: [numeric] Latest Nanodrop 260/230 ratio.
28
+ 19) qubit_concentration_ngul: [numeric] Latest Qubit concentration (ng/µL).
29
+ 20) yield_ng: [numeric] Latest yield (ng).
30
+ 21) femto_date_code: [character] Latest Femto date code.
31
+ 22) femto_description: [character] Latest Femto profile description.
32
+ 23) gqn_index: [numeric] Latest GQN index from Femto.
33
+ 24) next_step: [character] Latest decision making next step.
34
+ 25) extraction_qc_result: [character] Latest extraction QC result.
34
35
 
35
36
  NOTES:
36
37
  1) Only extractions from the 'ToL Core Lab' project and relevant folders are included.
@@ -109,6 +110,7 @@ latest_decision_making AS (
109
110
  SELECT DISTINCT
110
111
  t.taxon_id,
111
112
  t.id AS eln_tissue_id,
113
+ t.sts_id AS tissue_sts_id,
112
114
  tp.id AS eln_tissue_prep_id,
113
115
  dna.id AS extraction_id,
114
116
  t.programme_id,
@@ -170,4 +172,4 @@ LEFT JOIN entry$raw AS ent
170
172
  WHERE proj.name = 'ToL Core Lab'
171
173
  AND (f.name IN ('Routine Throughput', 'DNA', 'Core Lab Entities', 'Benchling MS Project Move') OR f.name IS NULL)
172
174
  AND (con.archive_purpose$ != ('Made in error') OR con.archive_purpose$ IS NULL)
173
- AND ent.name NOT LIKE '%Nuclei isolation and tagmentation%'
175
+ AND COALESCE(ent.name, '') NOT LIKE '%Nuclei isolation and tagmentation%'
@@ -24,7 +24,7 @@ Output: Table with cols:
24
24
  4) eln_file_registry_id: [character] id in Benchling Registry. Origin: BWH
25
25
  5) extraction_id: [character] Original DNA extract entity name. For pooled samples, the first DNA extract pooled. Origin: BWH
26
26
  6) submission_sample_name: [character] Entity name. Origin: BWH
27
- 7) fluidx_id: [character] Container barcode of the DNA fluidx tube. Origin: BWH
27
+ 7) fluidx_container_id: [character] Container id of the DNA fluidx tube. Origin: BWH
28
28
  8) programme_id: [character] ToLID. Origin: BWH
29
29
  9) specimen_id: [character] Specimen ID. Origin: STS
30
30
  10) tube_name: [character] Name of the submission tube/container.
@@ -73,7 +73,7 @@ pacbio_submissions_container_routine AS (
73
73
  subsam.file_registry_id$ AS eln_file_registry_id,
74
74
  subsam.original_dna_extract AS extraction_id,
75
75
  subsam.name$ AS submission_sample_name,
76
- c_dna.barcode AS fluidx_id,
76
+ c_dna.id AS fluidx_container_id,
77
77
  t.programme_id,
78
78
  t.specimen_id,
79
79
  con.name AS tube_name,
@@ -145,7 +145,7 @@ pacbio_submissions_container_pooled AS (
145
145
  subsam.file_registry_id$ AS eln_file_registry_id,
146
146
  subsam.pooled_sample AS extraction_id,
147
147
  subsam.name$ AS eln_submission_sample_name,
148
- c_pool.barcode AS fluidx_id,
148
+ c_pool.id AS fluidx_container_id,
149
149
  t.programme_id,
150
150
  t.specimen_id,
151
151
  con.name AS tube_name,
@@ -219,7 +219,7 @@ pacbio_submissions_container_legacy_deprecated AS (
219
219
  subsam.file_registry_id$ AS eln_file_registry_id,
220
220
  subsam.original_dna_extract AS extraction_id,
221
221
  subsam.name$ AS submission_sample_name,
222
- c_dna.barcode AS fluidx_id,
222
+ c_dna.id AS fluidx_container_id,
223
223
  t.programme_id,
224
224
  t.specimen_id,
225
225
  con.name AS tube_name,
@@ -283,7 +283,7 @@ pacbio_submissions_plate_automated_manifest AS (
283
283
  subsam.file_registry_id$ AS eln_file_registry_id,
284
284
  subsam.originaL_dna_extract AS extraction_id,
285
285
  subsam.name$ AS submission_sample_name,
286
- c_dna.barcode AS fluidx_id,
286
+ c_dna.id AS fluidx_container_id,
287
287
  t.programme_id,
288
288
  t.specimen_id,
289
289
  con.name AS tube_name,
@@ -350,7 +350,7 @@ pacbio_submissions_plate_automated_manifest_pooled AS (
350
350
  subsam.file_registry_id$ AS eln_file_registry_id,
351
351
  subsam.pooled_sample AS extraction_id,
352
352
  subsam.name$ AS submission_sample_name,
353
- c_pool.barcode AS fluidx_id,
353
+ c_pool.id AS fluidx_container_id,
354
354
  t.programme_id,
355
355
  t.specimen_id,
356
356
  con.name AS tube_name,
@@ -415,7 +415,7 @@ pacbio_submissions_plate_routine AS (
415
415
  subsam.file_registry_id$ AS eln_file_registry_id,
416
416
  subsam.original_dna_extract AS extraction_id,
417
417
  subsam.name$ AS submission_sample_name,
418
- c_dna.barcode AS fluidx_id,
418
+ c_dna.id AS fluidx_container_id,
419
419
  t.programme_id,
420
420
  t.specimen_id,
421
421
  c_subsam.name AS tube_name,
@@ -485,7 +485,7 @@ pacbio_submissions_plate_routine_pooled AS (
485
485
  subsam.file_registry_id$ AS eln_file_registry_id,
486
486
  subsam.pooled_sample AS extraction_id,
487
487
  subsam.name$ AS submission_sample_name,
488
- c_pool.barcode AS fluidx_id,
488
+ c_pool.id AS fluidx_container_id,
489
489
  t.programme_id,
490
490
  t.specimen_id,
491
491
  c_subsam.name AS tube_name,
tol/core/factory.py CHANGED
@@ -189,7 +189,7 @@ def core_data_object(
189
189
  if object_from_datasource is not None:
190
190
  self.__type = object_from_datasource.type
191
191
  self.__attributes = object_from_datasource.attributes
192
- self.__to_one_objects = object_from_datasource.to_one_objects
192
+ self.__to_one_objects = object_from_datasource._to_one_objects
193
193
  break
194
194
 
195
195
  @property
@@ -17,17 +17,17 @@ def requested_fields_to_tree(func):
17
17
  `requested_tree` object has not been given.
18
18
  """
19
19
 
20
- def wrapper(self, tablename, *args, **kwargs):
20
+ def wrapper(self, object_type, *args, **kwargs):
21
21
  if 'requested_fields' in kwargs:
22
22
  if 'requested_tree' in kwargs:
23
23
  msg = 'Both requested_fields and requested_tree arguments given'
24
24
  raise TypeError(msg)
25
25
  flds = kwargs.pop('requested_fields')
26
- kwargs['requested_tree'] = ReqFieldsTree(
27
- tablename, self, requested_fields=flds
28
- )
29
- return func(self, tablename, *args, **kwargs)
30
-
26
+ if flds and object_type:
27
+ kwargs['requested_tree'] = ReqFieldsTree(
28
+ object_type, self, requested_fields=flds
29
+ )
30
+ return func(self, object_type, *args, **kwargs)
31
31
  return wrapper
32
32
 
33
33
 
@@ -2,6 +2,7 @@
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
4
 
5
+ from .benchling_extraction_container_to_elastic_extraction_container_converter import BenchlingExtractionContainerToElasticExtractionContainerConverter # noqa F401
5
6
  from .benchling_extraction_to_elastic_extraction_converter import BenchlingExtractionToElasticExtractionConverter # noqa F401
6
7
  from .benchling_extraction_to_elastic_sequencing_request_converter import BenchlingExtractionToElasticSequencingRequestConverter # noqa F401
7
8
  from .benchling_sequencing_request_to_elastic_sequencing_request_converter import ( # noqa F401
@@ -0,0 +1,53 @@
1
+ # SPDX-FileCopyrightText: 2026 Genome Research Ltd.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ from typing import Iterable
6
+
7
+ from ...core import (
8
+ DataObject,
9
+ DataObjectToDataObjectOrUpdateConverter
10
+ )
11
+
12
+
13
+ class BenchlingExtractionContainerToElasticExtractionContainerConverter(
14
+ DataObjectToDataObjectOrUpdateConverter):
15
+ def convert(self, data_object: DataObject) -> Iterable[DataObject]:
16
+ if data_object.tissue_sts_id is not None:
17
+ ret = self._data_object_factory(
18
+ 'extraction_container',
19
+ data_object.id,
20
+ attributes={
21
+ **{k: v
22
+ for k, v in data_object.attributes.items()
23
+ if k not in ['tissue_sts_id', 'specimen_id', 'taxon_id',
24
+ 'programme_id', 'eln_tissue_prep_id', 'extraction_id']}
25
+ },
26
+ to_one={
27
+ 'sample': self._data_object_factory(
28
+ 'sample',
29
+ data_object.tissue_sts_id
30
+ ) if data_object.tissue_sts_id is not None else None,
31
+ 'species': self._data_object_factory(
32
+ 'species',
33
+ data_object.taxon_id
34
+ ) if data_object.taxon_id is not None else None,
35
+ 'specimen': self._data_object_factory(
36
+ 'specimen',
37
+ data_object.specimen_id
38
+ ) if data_object.specimen_id is not None else None,
39
+ 'tolid': self._data_object_factory(
40
+ 'tolid',
41
+ data_object.programme_id
42
+ ) if data_object.programme_id is not None else None,
43
+ 'tissue_prep': self._data_object_factory(
44
+ 'tissue_prep',
45
+ data_object.eln_tissue_prep_id
46
+ ) if data_object.eln_tissue_prep_id is not None else None,
47
+ 'extraction': self._data_object_factory(
48
+ 'extraction',
49
+ data_object.extraction_id
50
+ ) if data_object.extraction_id is not None else None,
51
+ }
52
+ )
53
+ yield ret
@@ -15,12 +15,18 @@ class BenchlingSequencingRequestToElasticSequencingRequestConverter(
15
15
  def convert(self, data_object: DataObject) -> Iterable[DataObject]:
16
16
  if data_object.sts_id is not None:
17
17
  extraction = None
18
+ extraction_container = None
18
19
  tissue_prep = None
19
20
  if 'extraction_id' in data_object.attributes:
20
21
  extraction = self._data_object_factory(
21
22
  'extraction',
22
23
  data_object.extraction_id
23
24
  )
25
+ if 'fluidx_container_id' in data_object.attributes:
26
+ extraction_container = self._data_object_factory(
27
+ 'extraction_container',
28
+ data_object.fluidx_container_id
29
+ )
24
30
  if 'tissue_prep_id' in data_object.attributes:
25
31
  tissue_prep = self._data_object_factory(
26
32
  'tissue_prep',
@@ -34,7 +40,7 @@ class BenchlingSequencingRequestToElasticSequencingRequestConverter(
34
40
  for k, v in data_object.attributes.items()
35
41
  if k not in ['sanger_sample_id', 'sts_id',
36
42
  'specimen_id', 'taxon_id', 'extraction_id',
37
- 'programme_id', 'tissue_prep_id']}
43
+ 'programme_id', 'tissue_prep_id', 'fluidx_container_id']}
38
44
  },
39
45
  to_one={
40
46
  'sample': self._data_object_factory(
@@ -54,6 +60,7 @@ class BenchlingSequencingRequestToElasticSequencingRequestConverter(
54
60
  data_object.programme_id
55
61
  ) if data_object.programme_id is not None else None,
56
62
  'extraction': extraction,
63
+ 'extraction_container': extraction_container,
57
64
  'tissue_prep': tissue_prep,
58
65
  })
59
66
  yield ret
@@ -29,5 +29,13 @@ class ElasticSequencingRequestToElasticRunDataUpdateConverter(
29
29
  'extraction',
30
30
  extraction.id
31
31
  )
32
+ if 'benchling_extraction_container' in data_object.to_one_relationships:
33
+ extraction_container = \
34
+ data_object.to_one_relationships['benchling_extraction_container']
35
+ if extraction_container is not None:
36
+ to_ones['benchling_extraction_container'] = self._data_object_factory(
37
+ 'extraction_container',
38
+ extraction_container.id
39
+ )
32
40
  yield (None, to_ones | {
33
41
  'mlwh_sequencing_request.id': data_object.id}) # The candidate key
@@ -3,33 +3,42 @@
3
3
  # SPDX-License-Identifier: MIT
4
4
 
5
5
  import re
6
+ from dataclasses import dataclass
6
7
  from datetime import time
8
+ from typing import Iterable
7
9
 
8
- from tol.core import DataObject
9
-
10
-
11
- class Converter:
12
- def convert(self, obj):
13
- raise NotImplementedError()
14
-
15
-
16
- class TimeStringToTimeConverter(Converter):
17
- """
18
- Converts string fields representing time in HH:MM (24-hour) format to Python time objects.
19
- If the string is not in HH:MM, tries to append ':00' and parse as HH:MM:SS.
20
- """
21
- def __init__(self, field: str):
22
- self.field = field
23
-
24
- def convert(self, obj: DataObject) -> DataObject:
25
- value = obj.attributes.get(self.field)
26
- if isinstance(value, str):
27
- match = re.match(r'^(\d{1,2}):(\d{2})(?::(\d{2}))?$', value)
28
- if match:
29
- h, m = int(match.group(1)), int(match.group(2))
30
- s = int(match.group(3)) if match.group(3) else 0
31
- try:
32
- obj.attributes[self.field] = time(h, m, s)
33
- except ValueError:
34
- pass
35
- return obj
10
+ from tol.core import DataObject, DataObjectToDataObjectOrUpdateConverter
11
+
12
+
13
+ class TimeStringToTimeConverter(DataObjectToDataObjectOrUpdateConverter):
14
+
15
+ @dataclass(slots=True, frozen=True, kw_only=True)
16
+ class Config:
17
+ field_names: list[str]
18
+
19
+ __slots__ = ['__config']
20
+ __config: Config
21
+
22
+ def __init__(self, data_object_factory, config: Config) -> None:
23
+ super().__init__(data_object_factory)
24
+ self.__config = config
25
+ self._data_object_factory = data_object_factory
26
+
27
+ def convert(self, data_object: DataObject) -> Iterable[DataObject]:
28
+ """
29
+ Converts string fields representing time in HH:MM (24-hour) format to Python time objects.
30
+ If the string is not in HH:MM, tries to append ':00' and parse as HH:MM:SS.
31
+ """
32
+
33
+ for field_name in self.__config.field_names:
34
+ value = data_object.attributes.get(field_name)
35
+ if isinstance(value, str):
36
+ match = re.match(r'^(\d{1,2}):(\d{2})(?::(\d{2}))?$', value)
37
+ if match:
38
+ h, m = int(match.group(1)), int(match.group(2))
39
+ s = int(match.group(3)) if match.group(3) else 0
40
+ try:
41
+ data_object.attributes[field_name] = time(h, m, s)
42
+ except ValueError:
43
+ pass
44
+ yield data_object
@@ -46,7 +46,7 @@ class AllowedValuesFromDataSourceValidator(Validator):
46
46
  obj.get_field_by_name(
47
47
  self.__config.datasource_field_name
48
48
  ) for obj in ds.get_list(
49
- object_type=self.__config.datasource_object_type
49
+ self.__config.datasource_object_type
50
50
  )
51
51
  ]
52
52
 
tol/validators/tolid.py CHANGED
@@ -90,7 +90,7 @@ class TolidValidator(Validator):
90
90
  f = DataSourceFilter()
91
91
  f.and_ = {'specimen_id': {'eq': {'value': specimen_id}}}
92
92
  self.__cached_tolids[specimen_id] = list(self.__datasource.get_list(
93
- object_type='specimen',
93
+ 'specimen',
94
94
  object_filters=f
95
95
  ))
96
96
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tol-sdk
3
- Version: 1.8.6
3
+ Version: 1.8.8
4
4
  Summary: SDK for interaction with ToL, Sanger and external services
5
5
  Author-email: ToL Platforms Team <tol-platforms@sanger.ac.uk>
6
6
  License: MIT
@@ -31,11 +31,11 @@ tol/api_base/misc/list_get_parameters.py,sha256=6DtUgfKPTok3utKPOxceJdwV_ZnJ5nis
31
31
  tol/api_base/misc/relation_url.py,sha256=qfo-okp8Gv9-PEDghMfGZ2pHdYbHRhohvA9v3Govtlo,1127
32
32
  tol/api_base/misc/stats_parameters.py,sha256=IVpHqUeGQyjuih59jwqT-fIQMCBeESi2T9b4r9i4J28,1721
33
33
  tol/api_client/__init__.py,sha256=58SAywuMrIUCBAY9us_d_RLTMnaUTYWWts0LRQC5wLo,187
34
- tol/api_client/api_datasource.py,sha256=AYvaOqsrTuBu8CTRSmm9Fo-VHReJAQjpZUSZc63dLXQ,15161
35
- tol/api_client/client.py,sha256=H4X5_Jj-JmnaeAN85ghCWGtQUhYprOZqJXNmCJq_Vz0,14216
34
+ tol/api_client/api_datasource.py,sha256=B27MWyptuIfZzCjiETv-sdjVKs0aluMCtyKUmptSKXY,14639
35
+ tol/api_client/client.py,sha256=vCcnnDjvDlrf5i_idAwpN9oWtguY9_4_nK_k1k0HKIw,14231
36
36
  tol/api_client/converter.py,sha256=g32fjqga4mC923n95HmQImPuawMfeb9rQcl3ZxUWP2s,4463
37
37
  tol/api_client/exception.py,sha256=MkvJaIyRVCzQ2rKOYnCOcT747mpOeQwGJJl3Kkb1BsQ,3999
38
- tol/api_client/factory.py,sha256=PHRMpCJwBcgaEU_9x_9hNM__RI0udTN7Q7xzAxfVcNk,3492
38
+ tol/api_client/factory.py,sha256=fMeqIomKZXrpt7qq1vZAdQH19lErgUqc_G1-kRqw-TY,3896
39
39
  tol/api_client/filter.py,sha256=D49RIai5Yj4CiQvIkgaEIXdw_oSU7CD5cn9SdXWRYXU,1474
40
40
  tol/api_client/parser.py,sha256=88Q2RlPwn9JJsSIukC2_yxfaM2AaXjsGuDOESqY7se4,8351
41
41
  tol/api_client/validate.py,sha256=9wFZotTJ4fI21BdO5AuMZok0rDQ4s8zM_WojLdVvA0A,3071
@@ -45,9 +45,9 @@ tol/barcodes/main.py,sha256=QxueF2AdrclIaQuu7W4lb4eF5tU_l-p3UMsDFIYgCfo,6213
45
45
  tol/benchling/__init__.py,sha256=9VIL6PBxvZRaBrN1j2Di75MdEY261kEHs_D-lUutIlg,229
46
46
  tol/benchling/benchling_converter.py,sha256=CO7BMvMAM52uIFjQWZFedRve0XNSmC9YtumJVAZrYac,30809
47
47
  tol/benchling/benchling_datasource.py,sha256=aHoGOJYX_dsL3G-9lXlY0bQQl4pMXf4a852sAkl-sKs,35112
48
- tol/benchling/benchling_warehouse_datasource.py,sha256=opsdvHz8l06NTmt84HrIgUJxV_DsurVgFtGs3_5PMoM,4635
48
+ tol/benchling/benchling_warehouse_datasource.py,sha256=r3FcEknQmdO5kAChbClOyuCk1_vGncpUvjAa-UepmGs,4822
49
49
  tol/benchling/sql/__init__.py,sha256=4LbvDIZOOG7p-ebbvivP7NvrJeApUvGEIcDL58ahQJE,85
50
- tol/benchling/sql/extraction_containers_dna.sql,sha256=YRQ0W1d-BjXB9gcMpf5ZyjHbPVp2VU0KkYi4e0JvYtA,6680
50
+ tol/benchling/sql/extraction_container_extraction_extraction_type_dna.sql,sha256=Qk6vHhTFQzSiDFzEVtr0sfyiyThNxx1VAAUgp4yNn8E,6810
51
51
  tol/benchling/sql/extraction_extraction_type_dna.sql,sha256=UvxboWBoXXp7RHUdRKNiQTS-AXdLdz8bFEXCS6q9SoE,3094
52
52
  tol/benchling/sql/extraction_extraction_type_lres.sql,sha256=7Y6a8v0V-jjU5Kg3czuZjcPLvGSfrnUDekpHo2mUgnc,1556
53
53
  tol/benchling/sql/extraction_extraction_type_pooled_dna.sql,sha256=fNjCJPaViGrR6D8sLwZK2Zg5LqQqh16HB0s7ZeqTqdg,4480
@@ -61,7 +61,7 @@ tol/benchling/sql/results_pacbio_prep.sql,sha256=a1tGu9irtsyPlCA0_FxBrqYj_uFiYPM
61
61
  tol/benchling/sql/results_pacbio_prep_pooled.sql,sha256=WZfMZbOeOfD55iDQQEwPNc7mF0gSJfMj94A1m9whLtw,6291
62
62
  tol/benchling/sql/sample.sql,sha256=ZFRXWabV9jjivAECCJz-zi05a5oSzTUQSbT2ssy4sGU,4174
63
63
  tol/benchling/sql/sequencing_request_sequencing_platform_hic.sql,sha256=W5VCnWvR16CJHnljzqdcQKJ8GWoci9QQhYVA2SbNyKk,3044
64
- tol/benchling/sql/sequencing_request_sequencing_platform_pacbio.sql,sha256=ecJvV_qvZwnWKvr5tS0kZVHySNTEgU0irbklsE6tUfQ,22958
64
+ tol/benchling/sql/sequencing_request_sequencing_platform_pacbio.sql,sha256=B4cVlU--Z8E6-7hCt1k5WVB1qCJZSd4qVSd66YHiu2w,22998
65
65
  tol/benchling/sql/sequencing_request_sequencing_platform_rnaseq.sql,sha256=zZ3d_VLXMHAp3n6agX-Y-Oj6HIzk32IDq5UssJuYPMs,3420
66
66
  tol/benchling/sql/sequencing_request_sequencing_platform_wgs.sql,sha256=RVSR2y_ZYT0j-NDYYAzGSkLwofXy6A_9AfI0RFuztbQ,7062
67
67
  tol/benchling/sql/tissue_prep.sql,sha256=8JAOUaXDc0nu0qJeIYLdTXY5APklSighDoESHtzZ8vw,6141
@@ -97,10 +97,10 @@ tol/core/datasource.py,sha256=e9GaeDPfO_Gs7cgQhmNxCiSDlRNf64reegzFebcMNkA,6303
97
97
  tol/core/datasource_error.py,sha256=TqfqaPANG0gishadhA7myCmTO1Fg9u7hVZOvsY6BdAo,1660
98
98
  tol/core/datasource_filter.py,sha256=RY2S9kTx0XwdrFRSE2n2GohB9__fKGzFVsZrkN5hzQk,726
99
99
  tol/core/datasource_utils.py,sha256=18mwvFmtJL73_mxtFb56rKXZCGCtZFoEb8sWFKn3Yf0,6232
100
- tol/core/factory.py,sha256=pLLu8l-yK8QaLTt52izMhKZ2VlFHqRQlUHwMaLL6DI4,9156
100
+ tol/core/factory.py,sha256=qbLvp5mLTcHbxOjopqtnK-wbZrlskgXjRIREszZjSyE,9157
101
101
  tol/core/http_client.py,sha256=QyZarplEHVYIrqEfrySeHbawfbnBU4nN62TLt41x4tY,2242
102
102
  tol/core/relationship.py,sha256=etdyCjLbfi2tgkaqzE6cntpNtTzgT_jOPGeNKmPu5yc,4624
103
- tol/core/requested_fields.py,sha256=-UXBnalzK1Fd-6zaYbZxlvlQOqIndOW0Zjvr4Z5UgaQ,7397
103
+ tol/core/requested_fields.py,sha256=YH3i3VCCPGDJGntMNxRu9JmeBGYBFwywivhyl2bdTww,7451
104
104
  tol/core/session.py,sha256=6AohamIEfB8oV3Z414ishKqmlTgVfUaGYWzvxLgZgM4,3803
105
105
  tol/core/validate.py,sha256=kFRPhgYyeZisGbSXR7S1pmD8jvz_aZ7RX40bT-gD_iA,4083
106
106
  tol/core/operator/__init__.py,sha256=ircyLdj45IlaL5k0cDzWHjEr-_66BAx7YRbuEvWF30k,1826
@@ -155,14 +155,15 @@ tol/flows/__init__.py,sha256=M7iSvnBJs6fJ8M38cW0bYQa9WW0TN8FHAMjIHPDNAJ4,166
155
155
  tol/flows/logger.py,sha256=rWXbaknGcPEZRFvC1CiB1qkhFRZsQk435w7VyJ3cpyw,170
156
156
  tol/flows/secrets.py,sha256=1mlbsxaahzYRfVAx3XdztHOmUCtDMSJDzHysdbaCtj0,352
157
157
  tol/flows/sequencing_submissions.py,sha256=ukz_y5be-BCBN2y3JPQ2EK6b3jwOCh-187j-jnw3EUY,11027
158
- tol/flows/converters/__init__.py,sha256=m2q04EXBdG5SZ3ieMsm_ork9eUuj-g8z6ulMOlVHs4U,6641
158
+ tol/flows/converters/__init__.py,sha256=Fcq-w2MwBjbnfXaNITgNubFfPqtyMgoM7lAkqwc7LHw,6806
159
159
  tol/flows/converters/auto_detect_manifest_type_converter.py,sha256=uHakTmVHMbm2kFQOWaAv8ynD9ueh7p-kq6wmfEGnmEw,1361
160
160
  tol/flows/converters/benchling_entity_to_benchling_worklist_item_converter_factory.py,sha256=PN27fcvN4JLBnLrtPPAot1cWjAwPQHVcIDoMfPDeKzU,1210
161
+ tol/flows/converters/benchling_extraction_container_to_elastic_extraction_container_converter.py,sha256=QThTQkawm_fmHnPUuVpljexH4aCAi3BcaCW7ci70l0I,2262
161
162
  tol/flows/converters/benchling_extraction_to_elastic_extraction_converter.py,sha256=S8pbmIeKlcXrLPRJHYBUGP0-Q7jTOV2QQk2TeA2naWo,1966
162
163
  tol/flows/converters/benchling_extraction_to_elastic_sequencing_request_converter.py,sha256=2RiyRvGRSWzpUwEI4p-s0afshJpFUUxPqv2z-nyDSVg,1992
163
164
  tol/flows/converters/benchling_sample_casm_to_sts_sample_converter.py,sha256=AIfuLpeSLN1OEAxPxr89C6zS8rXrvo4MBSnwzK27Rgo,1252
164
165
  tol/flows/converters/benchling_sample_to_elastic_sample_converter.py,sha256=mGeNBiUI8xWMVjyfVbs2ywpjbJx2-WhbLLnAPfmsWac,1503
165
- tol/flows/converters/benchling_sequencing_request_to_elastic_sequencing_request_converter.py,sha256=IOytX0vHW0Ittd5XCLfcNeMtOuMg7LN20VJLzeg51xw,2406
166
+ tol/flows/converters/benchling_sequencing_request_to_elastic_sequencing_request_converter.py,sha256=zBd9QcfdqAfJ-LrxuSddLiefr7hNqOVqnjNIRe_hhP4,2779
166
167
  tol/flows/converters/benchling_tissue_prep_to_elastic_tissue_prep_converter.py,sha256=0hcypPlef4xTBC1wLyGUTrXsNHea2aMSzBun3F45fRo,1448
167
168
  tol/flows/converters/benchling_tissue_to_sts_sample_converter.py,sha256=DM9JFeqrjkH27GagYGStn5HDOw6ynbv3FM9GpNiAMIg,1194
168
169
  tol/flows/converters/bioscan_extra_pantheon_species_to_elastic_sample_update_converter.py,sha256=qaLyHVpOItM7DoOPera8-DHbylY9Ppx-2CjPyqvPUko,611
@@ -179,7 +180,7 @@ tol/flows/converters/elastic_sample_to_benchling_tissue_update_converter.py,sha2
179
180
  tol/flows/converters/elastic_sample_to_bold_sample_converter.py,sha256=eyowTZ-iNzI4aj7VgV6hBmmnAOhQ8AXdshnhNrM7vOE,3148
180
181
  tol/flows/converters/elastic_sample_to_elastic_sequencing_request_update_converter.py,sha256=nKYcLgZ2LV7A-uCeqfTm2AYFNRwcHpH5X6uxrcsYbgc,776
181
182
  tol/flows/converters/elastic_sample_to_sts_sample_converter.py,sha256=X1_unJoxUYmZH8Yu1WbFoTQEdU9ZPngxF109PHJBWFI,1071
182
- tol/flows/converters/elastic_sequencing_request_to_elastic_run_data_update_converter.py,sha256=NRpfnAOIbHB8g3bX-80XzJGGjl1v34UVUDkPAanP6nE,1262
183
+ tol/flows/converters/elastic_sequencing_request_to_elastic_run_data_update_converter.py,sha256=GDtBD0jw1Xd4UOnSwR4SpNvDuJ-BUVHo9nlkLtFCS4E,1705
183
184
  tol/flows/converters/elastic_sequencing_request_to_sts_sequencing_request_converter.py,sha256=Ur0_lqH-kepHw16BjQBkvzyEWMJxm4rwea7G7BlVHMU,1038
184
185
  tol/flows/converters/elastic_species_to_sts_species_converter.py,sha256=f2MIKncCmldAp6Dmy8OCf13yKubC6GK2DuFasUBM9KE,1433
185
186
  tol/flows/converters/elastic_tolid_to_elastic_curation_update_converter.py,sha256=9Sn_QBp-1QfAuBmoRhTrib17vgve4oS0T7AdpH9T7xI,668
@@ -207,7 +208,7 @@ tol/flows/converters/sts_sample_project_to_elastic_sample_converter.py,sha256=YE
207
208
  tol/flows/converters/sts_sample_to_casm_benchling_converter.py,sha256=Zo577u2v5_Fela2uQVtZsGZmHq4bLecTCC4Ewvq61Xo,39414
208
209
  tol/flows/converters/sts_sampleset_to_elastic_sampleset_converter.py,sha256=PUP0Qjy9wTmqp5GHNEd9fukqtWdoMvDfs4rJisfLzcc,3197
209
210
  tol/flows/converters/sts_species_to_elastic_species_converter.py,sha256=ELZ_ML8vPlLkfXrx0B_wxUWiPyxkI8UXgSPQZCKknXU,1164
210
- tol/flows/converters/time_string_to_time.py,sha256=myo9K9pRMT6GtT5vXf3c0UDK5n8bMVbvW7ewhSPkecU,1062
211
+ tol/flows/converters/time_string_to_time.py,sha256=7kIo6YhcHqFTygsTWzE_BRV8bg92MMoDOLV8hz75x7w,1558
211
212
  tol/flows/converters/tolid_specimen_to_elastic_tolid_converter.py,sha256=4Ird6ATYjsjSFZ1AGsxpuWdQ6QtQTvKJ-rF-_TCC_rg,1072
212
213
  tol/flows/converters/tolqc_data_to_elastic_run_data_converter.py,sha256=f9PYnsswikskvXpnSlrfYN7wXfyn8iXQpg996ZsQHbw,3799
213
214
  tol/flows/converters/tolqc_sample_to_elastic_sequencing_request_converter.py,sha256=MzPcO75Z_3-6nsWC8X0kmmmxJTurV_HOZhseC-tWdFo,1397
@@ -337,7 +338,7 @@ tol/utils/s3.py,sha256=aoYCwJ-qcMqFrpxmViFqPa0O1jgp0phtztO3-0CSNjw,491
337
338
  tol/validators/__init__.py,sha256=_ETv6oGQ2bTH_6-foYFy9T5wP5OG3cl96zEjvrIS7zk,1399
338
339
  tol/validators/allowed_keys.py,sha256=RJcHBiguL84B8hjSRaXLNES21yZqaKFwJNp2Tz9zvh0,1506
339
340
  tol/validators/allowed_values.py,sha256=-Yy3Sqo1WYacGKlot_dn3M2o7Oj5MXOioJrJmrWCCxs,1536
340
- tol/validators/allowed_values_from_datasource.py,sha256=ICFO6FcYXDN7M2Cv1OwpyN38CdhmY7oU-njzIatA3-w,3185
341
+ tol/validators/allowed_values_from_datasource.py,sha256=9cVwllBbzfCls8UsojazfCInt9_AakA0_H9pBO1wSL4,3173
341
342
  tol/validators/assert_on_condition.py,sha256=eBGgSVfIQ6e45SheM-ZDg7daXJjyZxRVS5L8AWvbXag,2027
342
343
  tol/validators/branching.py,sha256=7YFjHNjrrTmy4hZ3E7JKDT6MEsBMhrc3P3p3ykv4wKI,5720
343
344
  tol/validators/converter_and_validate.py,sha256=O1uYdrU4YDZ8eZjb7Koots4-8fMVOkJFXESg-LVw2o8,2992
@@ -350,7 +351,7 @@ tol/validators/regex.py,sha256=dLAi_vQt9_DsT6wQZmbYC7X5-Wp15l0leUE6XkPaItg,2602
350
351
  tol/validators/regex_by_value.py,sha256=XM5EnT4vgD17rfpR3bUE9I56IemSw26BI9MZtMakd4E,2582
351
352
  tol/validators/specimens_have_same_taxon.py,sha256=BaJcZ38ZprPcuGTIorSxxC9uGN0_lj6HS6B54EObcuY,2183
352
353
  tol/validators/sts_fields.py,sha256=aYbzy15btEg4-ocDT1qrspe7-atoWRrOJ_KmuPU6J14,8936
353
- tol/validators/tolid.py,sha256=yODebLYbKtlem3IpVcv8XImvq90r-AK68asH9JEawqo,3897
354
+ tol/validators/tolid.py,sha256=VOb6lNFz11H_0KaWX8_nvsw8xJEa6KrjB0p-5lkcqog,3885
354
355
  tol/validators/types.py,sha256=jMVpckRp8RS93f7usf58YH_K-5rKWgZIYs7bO9dHhQc,2914
355
356
  tol/validators/unique_value_check.py,sha256=sFvDooYkKeORvULGEOTsgIcxlbe0AXDWxY3Gbr3j0KI,1282
356
357
  tol/validators/unique_values.py,sha256=o5IrfUNLEmlEp8kpInTtFnTq-FqiHSC9TItKdf-LI1o,3114
@@ -358,9 +359,9 @@ tol/validators/unique_whole_organisms.py,sha256=RdqA1GzIf3LTdrmNGGdxv0aW2udDY2P9
358
359
  tol/validators/value_check.py,sha256=DdNx_B1gns01zgBg5N6Bwia46Aukw6MAteM-M37Kv1k,1122
359
360
  tol/validators/interfaces/__init__.py,sha256=jtOxnwnwqV_29xjmmMcS_kvlt-pQiWwQYJn2YRP07_w,172
360
361
  tol/validators/interfaces/condition_evaluator.py,sha256=nj8Cb8hi47OBy6OVNfeLhF-Pjwtr8MiOSymYL6hfVes,3766
361
- tol_sdk-1.8.6.dist-info/licenses/LICENSE,sha256=RF9Jacy-9BpUAQQ20INhTgtaNBkmdTolYCHtrrkM2-8,1077
362
- tol_sdk-1.8.6.dist-info/METADATA,sha256=drjwvfHHt_dIUvdywaSjXVuTa0wNCbdMHsLPuONx92o,3142
363
- tol_sdk-1.8.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
364
- tol_sdk-1.8.6.dist-info/entry_points.txt,sha256=jH3HfTwxjzog7E3lq8CKpUWGIRY9FSXbyL6CpUmv6D0,36
365
- tol_sdk-1.8.6.dist-info/top_level.txt,sha256=PwKMQLphyZNvagBoriVbl8uwHXQl8IC1niawVG0iXMM,10
366
- tol_sdk-1.8.6.dist-info/RECORD,,
362
+ tol_sdk-1.8.8.dist-info/licenses/LICENSE,sha256=RF9Jacy-9BpUAQQ20INhTgtaNBkmdTolYCHtrrkM2-8,1077
363
+ tol_sdk-1.8.8.dist-info/METADATA,sha256=iyFf12GxWPRsRVPQgR5mrGzLDhAH-OrJ58-4T6CROLA,3142
364
+ tol_sdk-1.8.8.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
365
+ tol_sdk-1.8.8.dist-info/entry_points.txt,sha256=jH3HfTwxjzog7E3lq8CKpUWGIRY9FSXbyL6CpUmv6D0,36
366
+ tol_sdk-1.8.8.dist-info/top_level.txt,sha256=PwKMQLphyZNvagBoriVbl8uwHXQl8IC1niawVG0iXMM,10
367
+ tol_sdk-1.8.8.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5